repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._save_potentials
def _save_potentials(self, directory): """save potentials to a directory """ print('saving potentials') digits = int(np.ceil(np.log10(self.configs.configs.shape[0]))) for i in range(0, self.configs.configs.shape[0]): pot_data = self.get_potential(i) filename_raw = 'pot{0:0' + '{0}'.format(digits) + '}.dat' filename = directory + os.sep + filename_raw.format(i + 1) nodes = self.grid.nodes['sorted'][:, 1:3] all_data = np.hstack(( nodes, pot_data[0][:, np.newaxis], pot_data[1][:, np.newaxis], )) with open(filename, 'wb') as fid: np.savetxt(fid, all_data)
python
def _save_potentials(self, directory): """save potentials to a directory """ print('saving potentials') digits = int(np.ceil(np.log10(self.configs.configs.shape[0]))) for i in range(0, self.configs.configs.shape[0]): pot_data = self.get_potential(i) filename_raw = 'pot{0:0' + '{0}'.format(digits) + '}.dat' filename = directory + os.sep + filename_raw.format(i + 1) nodes = self.grid.nodes['sorted'][:, 1:3] all_data = np.hstack(( nodes, pot_data[0][:, np.newaxis], pot_data[1][:, np.newaxis], )) with open(filename, 'wb') as fid: np.savetxt(fid, all_data)
[ "def", "_save_potentials", "(", "self", ",", "directory", ")", ":", "print", "(", "'saving potentials'", ")", "digits", "=", "int", "(", "np", ".", "ceil", "(", "np", ".", "log10", "(", "self", ".", "configs", ".", "configs", ".", "shape", "[", "0", "]", ")", ")", ")", "for", "i", "in", "range", "(", "0", ",", "self", ".", "configs", ".", "configs", ".", "shape", "[", "0", "]", ")", ":", "pot_data", "=", "self", ".", "get_potential", "(", "i", ")", "filename_raw", "=", "'pot{0:0'", "+", "'{0}'", ".", "format", "(", "digits", ")", "+", "'}.dat'", "filename", "=", "directory", "+", "os", ".", "sep", "+", "filename_raw", ".", "format", "(", "i", "+", "1", ")", "nodes", "=", "self", ".", "grid", ".", "nodes", "[", "'sorted'", "]", "[", ":", ",", "1", ":", "3", "]", "all_data", "=", "np", ".", "hstack", "(", "(", "nodes", ",", "pot_data", "[", "0", "]", "[", ":", ",", "np", ".", "newaxis", "]", ",", "pot_data", "[", "1", "]", "[", ":", ",", "np", ".", "newaxis", "]", ",", ")", ")", "with", "open", "(", "filename", ",", "'wb'", ")", "as", "fid", ":", "np", ".", "savetxt", "(", "fid", ",", "all_data", ")" ]
save potentials to a directory
[ "save", "potentials", "to", "a", "directory" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L409-L426
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.clear_measurements
def clear_measurements(self): """Forget any previous measurements """ mid_list = self.assignments.get('measurements', None) if mid_list is not None: for mid in mid_list: self.configs.delete_measurements(mid=mid) self.assignments['measurements'] = None
python
def clear_measurements(self): """Forget any previous measurements """ mid_list = self.assignments.get('measurements', None) if mid_list is not None: for mid in mid_list: self.configs.delete_measurements(mid=mid) self.assignments['measurements'] = None
[ "def", "clear_measurements", "(", "self", ")", ":", "mid_list", "=", "self", ".", "assignments", ".", "get", "(", "'measurements'", ",", "None", ")", "if", "mid_list", "is", "not", "None", ":", "for", "mid", "in", "mid_list", ":", "self", ".", "configs", ".", "delete_measurements", "(", "mid", "=", "mid", ")", "self", ".", "assignments", "[", "'measurements'", "]", "=", "None" ]
Forget any previous measurements
[ "Forget", "any", "previous", "measurements" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L428-L435
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.measurements
def measurements(self): """Return the measurements associated with this instance. if measurements are not present, check if we can model, and then run CRMod to load the measurements. """ # check if we have measurements mid = self.assignments.get('measurements', None) if mid is None: return_value = self.model( voltages=True, sensitivities=False, potentials=False, ) if return_value is None: print('cannot model') return # retrieve measurements cids = self.assignments['measurements'] measurements = np.vstack(( self.configs.measurements[cids[0]], self.configs.measurements[cids[1]], )).T return measurements
python
def measurements(self): """Return the measurements associated with this instance. if measurements are not present, check if we can model, and then run CRMod to load the measurements. """ # check if we have measurements mid = self.assignments.get('measurements', None) if mid is None: return_value = self.model( voltages=True, sensitivities=False, potentials=False, ) if return_value is None: print('cannot model') return # retrieve measurements cids = self.assignments['measurements'] measurements = np.vstack(( self.configs.measurements[cids[0]], self.configs.measurements[cids[1]], )).T return measurements
[ "def", "measurements", "(", "self", ")", ":", "# check if we have measurements", "mid", "=", "self", ".", "assignments", ".", "get", "(", "'measurements'", ",", "None", ")", "if", "mid", "is", "None", ":", "return_value", "=", "self", ".", "model", "(", "voltages", "=", "True", ",", "sensitivities", "=", "False", ",", "potentials", "=", "False", ",", ")", "if", "return_value", "is", "None", ":", "print", "(", "'cannot model'", ")", "return", "# retrieve measurements", "cids", "=", "self", ".", "assignments", "[", "'measurements'", "]", "measurements", "=", "np", ".", "vstack", "(", "(", "self", ".", "configs", ".", "measurements", "[", "cids", "[", "0", "]", "]", ",", "self", ".", "configs", ".", "measurements", "[", "cids", "[", "1", "]", "]", ",", ")", ")", ".", "T", "return", "measurements" ]
Return the measurements associated with this instance. if measurements are not present, check if we can model, and then run CRMod to load the measurements.
[ "Return", "the", "measurements", "associated", "with", "this", "instance", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L437-L461
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._read_sensitivities
def _read_sensitivities(self, sens_dir): """import sensitivities from a directory Note ---- * check that signs are correct in case CRMod switches potential electrodes """ if self.assignments['sensitivities'] is not None: print('Sensitivities already imported. Will not overwrite!') return else: self.assignments['sensitivities'] = {} sens_files = sorted(glob(sens_dir + os.sep + 'sens*.dat')) for nr, filename in enumerate(sens_files): with open(filename, 'r') as fid: metadata = np.fromstring( fid.readline().strip(), sep=' ', count=2 ) meta_re = metadata[0] meta_im = metadata[1] sens_data = np.loadtxt(fid) cids = self.parman.add_data( sens_data[:, 2:4], [meta_re, meta_im], ) # store cids for later retrieval self.assignments['sensitivities'][nr] = cids
python
def _read_sensitivities(self, sens_dir): """import sensitivities from a directory Note ---- * check that signs are correct in case CRMod switches potential electrodes """ if self.assignments['sensitivities'] is not None: print('Sensitivities already imported. Will not overwrite!') return else: self.assignments['sensitivities'] = {} sens_files = sorted(glob(sens_dir + os.sep + 'sens*.dat')) for nr, filename in enumerate(sens_files): with open(filename, 'r') as fid: metadata = np.fromstring( fid.readline().strip(), sep=' ', count=2 ) meta_re = metadata[0] meta_im = metadata[1] sens_data = np.loadtxt(fid) cids = self.parman.add_data( sens_data[:, 2:4], [meta_re, meta_im], ) # store cids for later retrieval self.assignments['sensitivities'][nr] = cids
[ "def", "_read_sensitivities", "(", "self", ",", "sens_dir", ")", ":", "if", "self", ".", "assignments", "[", "'sensitivities'", "]", "is", "not", "None", ":", "print", "(", "'Sensitivities already imported. Will not overwrite!'", ")", "return", "else", ":", "self", ".", "assignments", "[", "'sensitivities'", "]", "=", "{", "}", "sens_files", "=", "sorted", "(", "glob", "(", "sens_dir", "+", "os", ".", "sep", "+", "'sens*.dat'", ")", ")", "for", "nr", ",", "filename", "in", "enumerate", "(", "sens_files", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "metadata", "=", "np", ".", "fromstring", "(", "fid", ".", "readline", "(", ")", ".", "strip", "(", ")", ",", "sep", "=", "' '", ",", "count", "=", "2", ")", "meta_re", "=", "metadata", "[", "0", "]", "meta_im", "=", "metadata", "[", "1", "]", "sens_data", "=", "np", ".", "loadtxt", "(", "fid", ")", "cids", "=", "self", ".", "parman", ".", "add_data", "(", "sens_data", "[", ":", ",", "2", ":", "4", "]", ",", "[", "meta_re", ",", "meta_im", "]", ",", ")", "# store cids for later retrieval", "self", ".", "assignments", "[", "'sensitivities'", "]", "[", "nr", "]", "=", "cids" ]
import sensitivities from a directory Note ---- * check that signs are correct in case CRMod switches potential electrodes
[ "import", "sensitivities", "from", "a", "directory" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L500-L532
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._read_potentials
def _read_potentials(self, pot_dir): """import potentials from a directory """ if self.assignments['potentials'] is not None: print('Potentials already imported. Will not overwrite!') return else: self.assignments['potentials'] = {} pot_files = sorted(glob(pot_dir + os.sep + 'pot*.dat')) for nr, filename in enumerate(pot_files): with open(filename, 'r') as fid: pot_data = np.loadtxt(fid) nids = self.nodeman.add_data( pot_data[:, 2:4], ) # store cids for later retrieval self.assignments['potentials'][nr] = nids
python
def _read_potentials(self, pot_dir): """import potentials from a directory """ if self.assignments['potentials'] is not None: print('Potentials already imported. Will not overwrite!') return else: self.assignments['potentials'] = {} pot_files = sorted(glob(pot_dir + os.sep + 'pot*.dat')) for nr, filename in enumerate(pot_files): with open(filename, 'r') as fid: pot_data = np.loadtxt(fid) nids = self.nodeman.add_data( pot_data[:, 2:4], ) # store cids for later retrieval self.assignments['potentials'][nr] = nids
[ "def", "_read_potentials", "(", "self", ",", "pot_dir", ")", ":", "if", "self", ".", "assignments", "[", "'potentials'", "]", "is", "not", "None", ":", "print", "(", "'Potentials already imported. Will not overwrite!'", ")", "return", "else", ":", "self", ".", "assignments", "[", "'potentials'", "]", "=", "{", "}", "pot_files", "=", "sorted", "(", "glob", "(", "pot_dir", "+", "os", ".", "sep", "+", "'pot*.dat'", ")", ")", "for", "nr", ",", "filename", "in", "enumerate", "(", "pot_files", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "pot_data", "=", "np", ".", "loadtxt", "(", "fid", ")", "nids", "=", "self", ".", "nodeman", ".", "add_data", "(", "pot_data", "[", ":", ",", "2", ":", "4", "]", ",", ")", "# store cids for later retrieval", "self", ".", "assignments", "[", "'potentials'", "]", "[", "nr", "]", "=", "nids" ]
import potentials from a directory
[ "import", "potentials", "from", "a", "directory" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L534-L552
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.get_potential
def get_potential(self, config_nr): """Return potential data for a given measurement configuration. Parameters ---------- config_nr: int Number of the configurations. Starts at 0 Returns ------- pot_data: list with two numpy.ndarrays First array: magnitude potentials, second array: phase potentials """ if self.assignments['potentials'] is None: self._check_state() if self.can_model: self.model(potentials=True) nids = self.assignments['potentials'][config_nr] pot_data = [self.nodeman.nodevals[nid] for nid in nids] return pot_data
python
def get_potential(self, config_nr): """Return potential data for a given measurement configuration. Parameters ---------- config_nr: int Number of the configurations. Starts at 0 Returns ------- pot_data: list with two numpy.ndarrays First array: magnitude potentials, second array: phase potentials """ if self.assignments['potentials'] is None: self._check_state() if self.can_model: self.model(potentials=True) nids = self.assignments['potentials'][config_nr] pot_data = [self.nodeman.nodevals[nid] for nid in nids] return pot_data
[ "def", "get_potential", "(", "self", ",", "config_nr", ")", ":", "if", "self", ".", "assignments", "[", "'potentials'", "]", "is", "None", ":", "self", ".", "_check_state", "(", ")", "if", "self", ".", "can_model", ":", "self", ".", "model", "(", "potentials", "=", "True", ")", "nids", "=", "self", ".", "assignments", "[", "'potentials'", "]", "[", "config_nr", "]", "pot_data", "=", "[", "self", ".", "nodeman", ".", "nodevals", "[", "nid", "]", "for", "nid", "in", "nids", "]", "return", "pot_data" ]
Return potential data for a given measurement configuration. Parameters ---------- config_nr: int Number of the configurations. Starts at 0 Returns ------- pot_data: list with two numpy.ndarrays First array: magnitude potentials, second array: phase potentials
[ "Return", "potential", "data", "for", "a", "given", "measurement", "configuration", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L554-L575
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.get_sensitivity
def get_sensitivity(self, config_nr): """return a sensitivity, as well as corresponding metadata, for a given measurement configuration. Indices start at zero. """ if self.assignments['sensitivities'] is None: self._check_state() if self.can_model: self.model(sensitivities=True) cids = self.assignments['sensitivities'][config_nr] sens_data = [self.parman.parsets[cid] for cid in cids] meta_data = [self.parman.metadata[cid] for cid in cids] return sens_data, meta_data
python
def get_sensitivity(self, config_nr): """return a sensitivity, as well as corresponding metadata, for a given measurement configuration. Indices start at zero. """ if self.assignments['sensitivities'] is None: self._check_state() if self.can_model: self.model(sensitivities=True) cids = self.assignments['sensitivities'][config_nr] sens_data = [self.parman.parsets[cid] for cid in cids] meta_data = [self.parman.metadata[cid] for cid in cids] return sens_data, meta_data
[ "def", "get_sensitivity", "(", "self", ",", "config_nr", ")", ":", "if", "self", ".", "assignments", "[", "'sensitivities'", "]", "is", "None", ":", "self", ".", "_check_state", "(", ")", "if", "self", ".", "can_model", ":", "self", ".", "model", "(", "sensitivities", "=", "True", ")", "cids", "=", "self", ".", "assignments", "[", "'sensitivities'", "]", "[", "config_nr", "]", "sens_data", "=", "[", "self", ".", "parman", ".", "parsets", "[", "cid", "]", "for", "cid", "in", "cids", "]", "meta_data", "=", "[", "self", ".", "parman", ".", "metadata", "[", "cid", "]", "for", "cid", "in", "cids", "]", "return", "sens_data", ",", "meta_data" ]
return a sensitivity, as well as corresponding metadata, for a given measurement configuration. Indices start at zero.
[ "return", "a", "sensitivity", "as", "well", "as", "corresponding", "metadata", "for", "a", "given", "measurement", "configuration", ".", "Indices", "start", "at", "zero", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L577-L589
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.read_voltages
def read_voltages(self, voltage_file): """import voltages from a volt.dat file Parameters ---------- voltage_file : string Path to volt.dat file """ measurements_raw = np.loadtxt( voltage_file, skiprows=1, ) measurements = np.atleast_2d(measurements_raw) # extract measurement configurations A = (measurements[:, 0] / 1e4).astype(int) B = (measurements[:, 0] % 1e4).astype(int) M = (measurements[:, 1] / 1e4).astype(int) N = (measurements[:, 1] % 1e4).astype(int) ABMN = np.vstack((A, B, M, N)).T if self.configs.configs is None: self.configs.configs = ABMN else: # configurations don't match if not np.all(ABMN == self.configs.configs): for nr, (old_config, new_config) in enumerate(zip( self.configs.configs, ABMN)): if np.all(old_config == new_config): continue # check polarity current_electrodes_are_equal = np.all( old_config[0:2] == new_config[0:2] ) voltage_electrodes_are_switched = np.all( old_config[2:4] == new_config[4:1:-1] ) if(current_electrodes_are_equal and voltage_electrodes_are_switched): if len(self.configs.measurements.keys()) > 0: raise Exception( 'need to switch electrode polarity, but ' + 'there are already measurements stored for ' + 'the old configuration!') else: # switch M/N in configurations self.configs.configs[nr, :] = new_config else: raise Exception( 'There was an error matching configurations of ' + 'voltages with configurations already imported' ) # add measurements to the config instance mid_mag = self.configs.add_measurements( measurements[:, 2] ) mid_pha = self.configs.add_measurements( measurements[:, 3] ) self.assignments['measurements'] = [mid_mag, mid_pha]
python
def read_voltages(self, voltage_file): """import voltages from a volt.dat file Parameters ---------- voltage_file : string Path to volt.dat file """ measurements_raw = np.loadtxt( voltage_file, skiprows=1, ) measurements = np.atleast_2d(measurements_raw) # extract measurement configurations A = (measurements[:, 0] / 1e4).astype(int) B = (measurements[:, 0] % 1e4).astype(int) M = (measurements[:, 1] / 1e4).astype(int) N = (measurements[:, 1] % 1e4).astype(int) ABMN = np.vstack((A, B, M, N)).T if self.configs.configs is None: self.configs.configs = ABMN else: # configurations don't match if not np.all(ABMN == self.configs.configs): for nr, (old_config, new_config) in enumerate(zip( self.configs.configs, ABMN)): if np.all(old_config == new_config): continue # check polarity current_electrodes_are_equal = np.all( old_config[0:2] == new_config[0:2] ) voltage_electrodes_are_switched = np.all( old_config[2:4] == new_config[4:1:-1] ) if(current_electrodes_are_equal and voltage_electrodes_are_switched): if len(self.configs.measurements.keys()) > 0: raise Exception( 'need to switch electrode polarity, but ' + 'there are already measurements stored for ' + 'the old configuration!') else: # switch M/N in configurations self.configs.configs[nr, :] = new_config else: raise Exception( 'There was an error matching configurations of ' + 'voltages with configurations already imported' ) # add measurements to the config instance mid_mag = self.configs.add_measurements( measurements[:, 2] ) mid_pha = self.configs.add_measurements( measurements[:, 3] ) self.assignments['measurements'] = [mid_mag, mid_pha]
[ "def", "read_voltages", "(", "self", ",", "voltage_file", ")", ":", "measurements_raw", "=", "np", ".", "loadtxt", "(", "voltage_file", ",", "skiprows", "=", "1", ",", ")", "measurements", "=", "np", ".", "atleast_2d", "(", "measurements_raw", ")", "# extract measurement configurations", "A", "=", "(", "measurements", "[", ":", ",", "0", "]", "/", "1e4", ")", ".", "astype", "(", "int", ")", "B", "=", "(", "measurements", "[", ":", ",", "0", "]", "%", "1e4", ")", ".", "astype", "(", "int", ")", "M", "=", "(", "measurements", "[", ":", ",", "1", "]", "/", "1e4", ")", ".", "astype", "(", "int", ")", "N", "=", "(", "measurements", "[", ":", ",", "1", "]", "%", "1e4", ")", ".", "astype", "(", "int", ")", "ABMN", "=", "np", ".", "vstack", "(", "(", "A", ",", "B", ",", "M", ",", "N", ")", ")", ".", "T", "if", "self", ".", "configs", ".", "configs", "is", "None", ":", "self", ".", "configs", ".", "configs", "=", "ABMN", "else", ":", "# configurations don't match", "if", "not", "np", ".", "all", "(", "ABMN", "==", "self", ".", "configs", ".", "configs", ")", ":", "for", "nr", ",", "(", "old_config", ",", "new_config", ")", "in", "enumerate", "(", "zip", "(", "self", ".", "configs", ".", "configs", ",", "ABMN", ")", ")", ":", "if", "np", ".", "all", "(", "old_config", "==", "new_config", ")", ":", "continue", "# check polarity", "current_electrodes_are_equal", "=", "np", ".", "all", "(", "old_config", "[", "0", ":", "2", "]", "==", "new_config", "[", "0", ":", "2", "]", ")", "voltage_electrodes_are_switched", "=", "np", ".", "all", "(", "old_config", "[", "2", ":", "4", "]", "==", "new_config", "[", "4", ":", "1", ":", "-", "1", "]", ")", "if", "(", "current_electrodes_are_equal", "and", "voltage_electrodes_are_switched", ")", ":", "if", "len", "(", "self", ".", "configs", ".", "measurements", ".", "keys", "(", ")", ")", ">", "0", ":", "raise", "Exception", "(", "'need to switch electrode polarity, but '", "+", "'there are already measurements stored for '", "+", "'the old configuration!'", ")", "else", ":", "# switch M/N in configurations", "self", ".", "configs", ".", "configs", "[", "nr", ",", ":", "]", "=", "new_config", "else", ":", "raise", "Exception", "(", "'There was an error matching configurations of '", "+", "'voltages with configurations already imported'", ")", "# add measurements to the config instance", "mid_mag", "=", "self", ".", "configs", ".", "add_measurements", "(", "measurements", "[", ":", ",", "2", "]", ")", "mid_pha", "=", "self", ".", "configs", ".", "add_measurements", "(", "measurements", "[", ":", ",", "3", "]", ")", "self", ".", "assignments", "[", "'measurements'", "]", "=", "[", "mid_mag", ",", "mid_pha", "]" ]
import voltages from a volt.dat file Parameters ---------- voltage_file : string Path to volt.dat file
[ "import", "voltages", "from", "a", "volt", ".", "dat", "file" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L769-L834
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.model
def model(self, voltages=True, sensitivities=False, potentials=False, output_directory=None, silent=False, ): """Forward model the tomodir and read in the results """ self._check_state() if self.can_model: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._model(voltages, sensitivities, potentials, tempdir) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._model( voltages, sensitivities, potentials, tempdir, silent=silent ) return 1 else: print('Sorry, not all required information to model are present') print('Check:') print('1) configurations present: self.configs.configs') print('2) is a model present') return None
python
def model(self, voltages=True, sensitivities=False, potentials=False, output_directory=None, silent=False, ): """Forward model the tomodir and read in the results """ self._check_state() if self.can_model: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._model(voltages, sensitivities, potentials, tempdir) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._model( voltages, sensitivities, potentials, tempdir, silent=silent ) return 1 else: print('Sorry, not all required information to model are present') print('Check:') print('1) configurations present: self.configs.configs') print('2) is a model present') return None
[ "def", "model", "(", "self", ",", "voltages", "=", "True", ",", "sensitivities", "=", "False", ",", "potentials", "=", "False", ",", "output_directory", "=", "None", ",", "silent", "=", "False", ",", ")", ":", "self", ".", "_check_state", "(", ")", "if", "self", ".", "can_model", ":", "if", "output_directory", "is", "not", "None", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_directory", ")", ":", "os", ".", "makedirs", "(", "output_directory", ")", "tempdir", "=", "output_directory", "self", ".", "_model", "(", "voltages", ",", "sensitivities", ",", "potentials", ",", "tempdir", ")", "else", ":", "raise", "IOError", "(", "'output directory already exists: {0}'", ".", "format", "(", "output_directory", ")", ")", "else", ":", "with", "tempfile", ".", "TemporaryDirectory", "(", "dir", "=", "self", ".", "tempdir", ")", "as", "tempdir", ":", "self", ".", "_model", "(", "voltages", ",", "sensitivities", ",", "potentials", ",", "tempdir", ",", "silent", "=", "silent", ")", "return", "1", "else", ":", "print", "(", "'Sorry, not all required information to model are present'", ")", "print", "(", "'Check:'", ")", "print", "(", "'1) configurations present: self.configs.configs'", ")", "print", "(", "'2) is a model present'", ")", "return", "None" ]
Forward model the tomodir and read in the results
[ "Forward", "model", "the", "tomodir", "and", "read", "in", "the", "results" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L875-L910
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._invert
def _invert(self, tempdir, catch_output=True, **kwargs): """Internal function than runs an inversion using CRTomo. Parameters ---------- tempdir : string directory which to use as a tomodir catch_output : bool, optional if True, catch all outputs of the CRTomo call (default: True) cores : int, optional how many cores to use. (default 2) """ nr_cores = kwargs.get('cores', 2) print('attempting inversion in directory: {0}'.format(tempdir)) pwd = os.getcwd() os.chdir(tempdir) self.save_to_tomodir('.') os.chdir('exe') binary = CRBin.get('CRTomo') print('Using binary: {0}'.format(binary)) print('calling CRTomo') # store env variable env_omp = os.environ.get('OMP_NUM_THREADS', '') os.environ['OMP_NUM_THREADS'] = '{0}'.format(nr_cores) if catch_output: subprocess.check_output( binary, shell=True, stderr=subprocess.STDOUT, ) else: subprocess.call( binary, shell=True, ) # reset environment variable os.environ['OMP_NUM_THREADS'] = env_omp print('finished') os.chdir(pwd) self.read_inversion_results(tempdir)
python
def _invert(self, tempdir, catch_output=True, **kwargs): """Internal function than runs an inversion using CRTomo. Parameters ---------- tempdir : string directory which to use as a tomodir catch_output : bool, optional if True, catch all outputs of the CRTomo call (default: True) cores : int, optional how many cores to use. (default 2) """ nr_cores = kwargs.get('cores', 2) print('attempting inversion in directory: {0}'.format(tempdir)) pwd = os.getcwd() os.chdir(tempdir) self.save_to_tomodir('.') os.chdir('exe') binary = CRBin.get('CRTomo') print('Using binary: {0}'.format(binary)) print('calling CRTomo') # store env variable env_omp = os.environ.get('OMP_NUM_THREADS', '') os.environ['OMP_NUM_THREADS'] = '{0}'.format(nr_cores) if catch_output: subprocess.check_output( binary, shell=True, stderr=subprocess.STDOUT, ) else: subprocess.call( binary, shell=True, ) # reset environment variable os.environ['OMP_NUM_THREADS'] = env_omp print('finished') os.chdir(pwd) self.read_inversion_results(tempdir)
[ "def", "_invert", "(", "self", ",", "tempdir", ",", "catch_output", "=", "True", ",", "*", "*", "kwargs", ")", ":", "nr_cores", "=", "kwargs", ".", "get", "(", "'cores'", ",", "2", ")", "print", "(", "'attempting inversion in directory: {0}'", ".", "format", "(", "tempdir", ")", ")", "pwd", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "tempdir", ")", "self", ".", "save_to_tomodir", "(", "'.'", ")", "os", ".", "chdir", "(", "'exe'", ")", "binary", "=", "CRBin", ".", "get", "(", "'CRTomo'", ")", "print", "(", "'Using binary: {0}'", ".", "format", "(", "binary", ")", ")", "print", "(", "'calling CRTomo'", ")", "# store env variable", "env_omp", "=", "os", ".", "environ", ".", "get", "(", "'OMP_NUM_THREADS'", ",", "''", ")", "os", ".", "environ", "[", "'OMP_NUM_THREADS'", "]", "=", "'{0}'", ".", "format", "(", "nr_cores", ")", "if", "catch_output", ":", "subprocess", ".", "check_output", "(", "binary", ",", "shell", "=", "True", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", ")", "else", ":", "subprocess", ".", "call", "(", "binary", ",", "shell", "=", "True", ",", ")", "# reset environment variable", "os", ".", "environ", "[", "'OMP_NUM_THREADS'", "]", "=", "env_omp", "print", "(", "'finished'", ")", "os", ".", "chdir", "(", "pwd", ")", "self", ".", "read_inversion_results", "(", "tempdir", ")" ]
Internal function than runs an inversion using CRTomo. Parameters ---------- tempdir : string directory which to use as a tomodir catch_output : bool, optional if True, catch all outputs of the CRTomo call (default: True) cores : int, optional how many cores to use. (default 2)
[ "Internal", "function", "than", "runs", "an", "inversion", "using", "CRTomo", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L912-L954
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.invert
def invert(self, output_directory=None, catch_output=True, **kwargs): """Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present. """ self._check_state() if self.can_invert: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._invert(tempdir, catch_output, **kwargs) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._invert(tempdir, catch_output, **kwargs) return 0 else: print( 'Sorry, no measurements present, cannot model yet' ) return 1
python
def invert(self, output_directory=None, catch_output=True, **kwargs): """Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present. """ self._check_state() if self.can_invert: if output_directory is not None: if not os.path.isdir(output_directory): os.makedirs(output_directory) tempdir = output_directory self._invert(tempdir, catch_output, **kwargs) else: raise IOError( 'output directory already exists: {0}'.format( output_directory ) ) else: with tempfile.TemporaryDirectory(dir=self.tempdir) as tempdir: self._invert(tempdir, catch_output, **kwargs) return 0 else: print( 'Sorry, no measurements present, cannot model yet' ) return 1
[ "def", "invert", "(", "self", ",", "output_directory", "=", "None", ",", "catch_output", "=", "True", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_check_state", "(", ")", "if", "self", ".", "can_invert", ":", "if", "output_directory", "is", "not", "None", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_directory", ")", ":", "os", ".", "makedirs", "(", "output_directory", ")", "tempdir", "=", "output_directory", "self", ".", "_invert", "(", "tempdir", ",", "catch_output", ",", "*", "*", "kwargs", ")", "else", ":", "raise", "IOError", "(", "'output directory already exists: {0}'", ".", "format", "(", "output_directory", ")", ")", "else", ":", "with", "tempfile", ".", "TemporaryDirectory", "(", "dir", "=", "self", ".", "tempdir", ")", "as", "tempdir", ":", "self", ".", "_invert", "(", "tempdir", ",", "catch_output", ",", "*", "*", "kwargs", ")", "return", "0", "else", ":", "print", "(", "'Sorry, no measurements present, cannot model yet'", ")", "return", "1" ]
Invert this instance, and import the result files No directories/files will be overwritten. Raise an IOError if the output directory exists. Parameters ---------- output_directory: string, optional use this directory as output directory for the generated tomodir. If None, then a temporary directory will be used that is deleted after import. catch_output: bool, optional Do not show CRTomo output cores: int, optional how many cores to use for CRTomo Returns ------- return_code: bool Return 0 if the inversion completed successfully. Return 1 if no measurements are present.
[ "Invert", "this", "instance", "and", "import", "the", "result", "files" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L956-L1001
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.read_inversion_results
def read_inversion_results(self, tomodir): """Import inversion results from a tomodir into this instance WARNING: Not finished! """ self._read_inversion_results(tomodir) self._read_inv_ctr(tomodir) self._read_resm_m(tomodir) self._read_eps_ctr(tomodir)
python
def read_inversion_results(self, tomodir): """Import inversion results from a tomodir into this instance WARNING: Not finished! """ self._read_inversion_results(tomodir) self._read_inv_ctr(tomodir) self._read_resm_m(tomodir) self._read_eps_ctr(tomodir)
[ "def", "read_inversion_results", "(", "self", ",", "tomodir", ")", ":", "self", ".", "_read_inversion_results", "(", "tomodir", ")", "self", ".", "_read_inv_ctr", "(", "tomodir", ")", "self", ".", "_read_resm_m", "(", "tomodir", ")", "self", ".", "_read_eps_ctr", "(", "tomodir", ")" ]
Import inversion results from a tomodir into this instance WARNING: Not finished!
[ "Import", "inversion", "results", "from", "a", "tomodir", "into", "this", "instance" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1003-L1011
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.plot_eps_data_hist
def plot_eps_data_hist(self, dfs): """Plot histograms of data residuals and data error weighting TODO: * add percentage of data below/above the RMS value """ # check if this is a DC inversion if 'datum' in dfs[0]: dc_inv = True else: dc_inv = False nr_y = len(dfs) size_y = 5 / 2.54 * nr_y if dc_inv: nr_x = 1 else: nr_x = 3 size_x = 15 / 2.54 fig, axes = plt.subplots(nr_y, nr_x, figsize=(size_x, size_y)) axes = np.atleast_2d(axes) # plot initial data errors df = dfs[0] if dc_inv: ax = axes[0, 0] ax.hist( df['datum'] / df['eps_r'], 100, ) ax.set_xlabel(r'$-log(|R|) / \epsilon_r$') ax.set_ylabel(r'count') else: # complex inversion ax = axes[0, 0] ax.hist( df['-log(|R|)'] / df['eps'], 100, ) ax.set_xlabel(r'$-log(|R|)$') ax.set_ylabel(r'count') ax = axes[0, 1] ax.hist( df['-log(|R|)'] / df['eps_r'], 100, ) ax.set_xlabel(r'$-log(|R|) / \epsilon_r$') ax.set_ylabel(r'count') ax = axes[0, 2] phase_data = df['-Phase(rad)'] / df['eps_p'] if not np.all(np.isinf(phase_data) | np.isnan(phase_data)): ax.hist( phase_data, 100, ) ax.set_xlabel(r'$-\phi[rad] / \epsilon_p$') ax.set_ylabel(r'count') # iterations for it, df in enumerate(dfs[1:]): ax = axes[1 + it, 0] ax.hist( df['psi'], 100 ) rms = np.sqrt( 1 / df['psi'].shape[0] * np.sum( df['psi'] ** 2 ) ) ax.axvline(rms, color='k', linestyle='dashed') ax.set_title('iteration: {0}'.format(it)) ax.set_xlabel('psi') ax.set_ylabel(r'count') ax = axes[1 + it, 1] Rdat = df['Re(d)'] Rmod = df['Re(f(m))'] ax.scatter( Rdat, Rmod, ) ax.set_xlabel(r'$log(R_{data}~[\Omega])$') ax.set_ylabel(r'$log(R_{mod}~[\Omega])$') ax = axes[1 + it, 2] phidat = df['Im(d)'] phimod = df['Im(f(m))'] ax.scatter( phidat, phimod, ) ax.set_xlabel(r'$\phi_{data}~[mrad]$') ax.set_ylabel(r'$\phi_{mod}~[mrad]$') fig.tight_layout() fig.savefig('eps_plot_hist.png', dpi=300)
python
def plot_eps_data_hist(self, dfs): """Plot histograms of data residuals and data error weighting TODO: * add percentage of data below/above the RMS value """ # check if this is a DC inversion if 'datum' in dfs[0]: dc_inv = True else: dc_inv = False nr_y = len(dfs) size_y = 5 / 2.54 * nr_y if dc_inv: nr_x = 1 else: nr_x = 3 size_x = 15 / 2.54 fig, axes = plt.subplots(nr_y, nr_x, figsize=(size_x, size_y)) axes = np.atleast_2d(axes) # plot initial data errors df = dfs[0] if dc_inv: ax = axes[0, 0] ax.hist( df['datum'] / df['eps_r'], 100, ) ax.set_xlabel(r'$-log(|R|) / \epsilon_r$') ax.set_ylabel(r'count') else: # complex inversion ax = axes[0, 0] ax.hist( df['-log(|R|)'] / df['eps'], 100, ) ax.set_xlabel(r'$-log(|R|)$') ax.set_ylabel(r'count') ax = axes[0, 1] ax.hist( df['-log(|R|)'] / df['eps_r'], 100, ) ax.set_xlabel(r'$-log(|R|) / \epsilon_r$') ax.set_ylabel(r'count') ax = axes[0, 2] phase_data = df['-Phase(rad)'] / df['eps_p'] if not np.all(np.isinf(phase_data) | np.isnan(phase_data)): ax.hist( phase_data, 100, ) ax.set_xlabel(r'$-\phi[rad] / \epsilon_p$') ax.set_ylabel(r'count') # iterations for it, df in enumerate(dfs[1:]): ax = axes[1 + it, 0] ax.hist( df['psi'], 100 ) rms = np.sqrt( 1 / df['psi'].shape[0] * np.sum( df['psi'] ** 2 ) ) ax.axvline(rms, color='k', linestyle='dashed') ax.set_title('iteration: {0}'.format(it)) ax.set_xlabel('psi') ax.set_ylabel(r'count') ax = axes[1 + it, 1] Rdat = df['Re(d)'] Rmod = df['Re(f(m))'] ax.scatter( Rdat, Rmod, ) ax.set_xlabel(r'$log(R_{data}~[\Omega])$') ax.set_ylabel(r'$log(R_{mod}~[\Omega])$') ax = axes[1 + it, 2] phidat = df['Im(d)'] phimod = df['Im(f(m))'] ax.scatter( phidat, phimod, ) ax.set_xlabel(r'$\phi_{data}~[mrad]$') ax.set_ylabel(r'$\phi_{mod}~[mrad]$') fig.tight_layout() fig.savefig('eps_plot_hist.png', dpi=300)
[ "def", "plot_eps_data_hist", "(", "self", ",", "dfs", ")", ":", "# check if this is a DC inversion", "if", "'datum'", "in", "dfs", "[", "0", "]", ":", "dc_inv", "=", "True", "else", ":", "dc_inv", "=", "False", "nr_y", "=", "len", "(", "dfs", ")", "size_y", "=", "5", "/", "2.54", "*", "nr_y", "if", "dc_inv", ":", "nr_x", "=", "1", "else", ":", "nr_x", "=", "3", "size_x", "=", "15", "/", "2.54", "fig", ",", "axes", "=", "plt", ".", "subplots", "(", "nr_y", ",", "nr_x", ",", "figsize", "=", "(", "size_x", ",", "size_y", ")", ")", "axes", "=", "np", ".", "atleast_2d", "(", "axes", ")", "# plot initial data errors", "df", "=", "dfs", "[", "0", "]", "if", "dc_inv", ":", "ax", "=", "axes", "[", "0", ",", "0", "]", "ax", ".", "hist", "(", "df", "[", "'datum'", "]", "/", "df", "[", "'eps_r'", "]", ",", "100", ",", ")", "ax", ".", "set_xlabel", "(", "r'$-log(|R|) / \\epsilon_r$'", ")", "ax", ".", "set_ylabel", "(", "r'count'", ")", "else", ":", "# complex inversion", "ax", "=", "axes", "[", "0", ",", "0", "]", "ax", ".", "hist", "(", "df", "[", "'-log(|R|)'", "]", "/", "df", "[", "'eps'", "]", ",", "100", ",", ")", "ax", ".", "set_xlabel", "(", "r'$-log(|R|)$'", ")", "ax", ".", "set_ylabel", "(", "r'count'", ")", "ax", "=", "axes", "[", "0", ",", "1", "]", "ax", ".", "hist", "(", "df", "[", "'-log(|R|)'", "]", "/", "df", "[", "'eps_r'", "]", ",", "100", ",", ")", "ax", ".", "set_xlabel", "(", "r'$-log(|R|) / \\epsilon_r$'", ")", "ax", ".", "set_ylabel", "(", "r'count'", ")", "ax", "=", "axes", "[", "0", ",", "2", "]", "phase_data", "=", "df", "[", "'-Phase(rad)'", "]", "/", "df", "[", "'eps_p'", "]", "if", "not", "np", ".", "all", "(", "np", ".", "isinf", "(", "phase_data", ")", "|", "np", ".", "isnan", "(", "phase_data", ")", ")", ":", "ax", ".", "hist", "(", "phase_data", ",", "100", ",", ")", "ax", ".", "set_xlabel", "(", "r'$-\\phi[rad] / \\epsilon_p$'", ")", "ax", ".", "set_ylabel", "(", "r'count'", ")", "# iterations", "for", "it", ",", "df", "in", "enumerate", "(", "dfs", "[", "1", ":", "]", ")", ":", "ax", "=", "axes", "[", "1", "+", "it", ",", "0", "]", "ax", ".", "hist", "(", "df", "[", "'psi'", "]", ",", "100", ")", "rms", "=", "np", ".", "sqrt", "(", "1", "/", "df", "[", "'psi'", "]", ".", "shape", "[", "0", "]", "*", "np", ".", "sum", "(", "df", "[", "'psi'", "]", "**", "2", ")", ")", "ax", ".", "axvline", "(", "rms", ",", "color", "=", "'k'", ",", "linestyle", "=", "'dashed'", ")", "ax", ".", "set_title", "(", "'iteration: {0}'", ".", "format", "(", "it", ")", ")", "ax", ".", "set_xlabel", "(", "'psi'", ")", "ax", ".", "set_ylabel", "(", "r'count'", ")", "ax", "=", "axes", "[", "1", "+", "it", ",", "1", "]", "Rdat", "=", "df", "[", "'Re(d)'", "]", "Rmod", "=", "df", "[", "'Re(f(m))'", "]", "ax", ".", "scatter", "(", "Rdat", ",", "Rmod", ",", ")", "ax", ".", "set_xlabel", "(", "r'$log(R_{data}~[\\Omega])$'", ")", "ax", ".", "set_ylabel", "(", "r'$log(R_{mod}~[\\Omega])$'", ")", "ax", "=", "axes", "[", "1", "+", "it", ",", "2", "]", "phidat", "=", "df", "[", "'Im(d)'", "]", "phimod", "=", "df", "[", "'Im(f(m))'", "]", "ax", ".", "scatter", "(", "phidat", ",", "phimod", ",", ")", "ax", ".", "set_xlabel", "(", "r'$\\phi_{data}~[mrad]$'", ")", "ax", ".", "set_ylabel", "(", "r'$\\phi_{mod}~[mrad]$'", ")", "fig", ".", "tight_layout", "(", ")", "fig", ".", "savefig", "(", "'eps_plot_hist.png'", ",", "dpi", "=", "300", ")" ]
Plot histograms of data residuals and data error weighting TODO: * add percentage of data below/above the RMS value
[ "Plot", "histograms", "of", "data", "residuals", "and", "data", "error", "weighting" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1038-L1140
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._read_eps_ctr
def _read_eps_ctr(tomodir): """Parse a CRTomo eps.ctr file. TODO: change parameters to only provide eps.ctr file Parameters ---------- tomodir: string Path to directory path Returns ------- """ epsctr_file = tomodir + os.sep + 'inv' + os.sep + 'eps.ctr' if not os.path.isfile(epsctr_file): print('eps.ctr not found: {0}'.format(epsctr_file)) print(os.getcwd()) return 1 with open(epsctr_file, 'r') as fid: lines = fid.readlines() group = itertools.groupby(lines, lambda x: x == '\n') dfs = [] # group for x in group: # print(x) if not x[0]: data = [y for y in x[1]] if data[0].startswith('IT') or data[0].startswith('PIT'): del(data[0]) data[0] = data[0].replace('-Phase (rad)', '-Phase(rad)') tfile = StringIO(''.join(data)) df = pd.read_csv( tfile, delim_whitespace=True, na_values=['Infinity'], ) dfs.append(df) return dfs
python
def _read_eps_ctr(tomodir): """Parse a CRTomo eps.ctr file. TODO: change parameters to only provide eps.ctr file Parameters ---------- tomodir: string Path to directory path Returns ------- """ epsctr_file = tomodir + os.sep + 'inv' + os.sep + 'eps.ctr' if not os.path.isfile(epsctr_file): print('eps.ctr not found: {0}'.format(epsctr_file)) print(os.getcwd()) return 1 with open(epsctr_file, 'r') as fid: lines = fid.readlines() group = itertools.groupby(lines, lambda x: x == '\n') dfs = [] # group for x in group: # print(x) if not x[0]: data = [y for y in x[1]] if data[0].startswith('IT') or data[0].startswith('PIT'): del(data[0]) data[0] = data[0].replace('-Phase (rad)', '-Phase(rad)') tfile = StringIO(''.join(data)) df = pd.read_csv( tfile, delim_whitespace=True, na_values=['Infinity'], ) dfs.append(df) return dfs
[ "def", "_read_eps_ctr", "(", "tomodir", ")", ":", "epsctr_file", "=", "tomodir", "+", "os", ".", "sep", "+", "'inv'", "+", "os", ".", "sep", "+", "'eps.ctr'", "if", "not", "os", ".", "path", ".", "isfile", "(", "epsctr_file", ")", ":", "print", "(", "'eps.ctr not found: {0}'", ".", "format", "(", "epsctr_file", ")", ")", "print", "(", "os", ".", "getcwd", "(", ")", ")", "return", "1", "with", "open", "(", "epsctr_file", ",", "'r'", ")", "as", "fid", ":", "lines", "=", "fid", ".", "readlines", "(", ")", "group", "=", "itertools", ".", "groupby", "(", "lines", ",", "lambda", "x", ":", "x", "==", "'\\n'", ")", "dfs", "=", "[", "]", "# group", "for", "x", "in", "group", ":", "# print(x)", "if", "not", "x", "[", "0", "]", ":", "data", "=", "[", "y", "for", "y", "in", "x", "[", "1", "]", "]", "if", "data", "[", "0", "]", ".", "startswith", "(", "'IT'", ")", "or", "data", "[", "0", "]", ".", "startswith", "(", "'PIT'", ")", ":", "del", "(", "data", "[", "0", "]", ")", "data", "[", "0", "]", "=", "data", "[", "0", "]", ".", "replace", "(", "'-Phase (rad)'", ",", "'-Phase(rad)'", ")", "tfile", "=", "StringIO", "(", "''", ".", "join", "(", "data", ")", ")", "df", "=", "pd", ".", "read_csv", "(", "tfile", ",", "delim_whitespace", "=", "True", ",", "na_values", "=", "[", "'Infinity'", "]", ",", ")", "dfs", ".", "append", "(", "df", ")", "return", "dfs" ]
Parse a CRTomo eps.ctr file. TODO: change parameters to only provide eps.ctr file Parameters ---------- tomodir: string Path to directory path Returns -------
[ "Parse", "a", "CRTomo", "eps", ".", "ctr", "file", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1218-L1258
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan._read_resm_m
def _read_resm_m(self, tomodir): """Read in the resolution matrix of an inversion Parameters ---------- tomodir: string directory path to a tomodir """ resm_file = tomodir + os.sep + 'inv' + os.sep + 'res_m.diag' if not os.path.isfile(resm_file): print('res_m.diag not found: {0}'.format(resm_file)) print(os.getcwd()) return 1 # read header with open(resm_file, 'rb') as fid: first_line = fid.readline().strip() header_raw = np.fromstring(first_line, count=4, sep=' ') header_raw # nr_cells = int(header_raw[0]) # lam = float(header_raw[1]) subdata = np.genfromtxt(fid) print(subdata.shape) pid = self.parman.add_data(subdata[:, 0]) self.assignments['resm'] = pid
python
def _read_resm_m(self, tomodir): """Read in the resolution matrix of an inversion Parameters ---------- tomodir: string directory path to a tomodir """ resm_file = tomodir + os.sep + 'inv' + os.sep + 'res_m.diag' if not os.path.isfile(resm_file): print('res_m.diag not found: {0}'.format(resm_file)) print(os.getcwd()) return 1 # read header with open(resm_file, 'rb') as fid: first_line = fid.readline().strip() header_raw = np.fromstring(first_line, count=4, sep=' ') header_raw # nr_cells = int(header_raw[0]) # lam = float(header_raw[1]) subdata = np.genfromtxt(fid) print(subdata.shape) pid = self.parman.add_data(subdata[:, 0]) self.assignments['resm'] = pid
[ "def", "_read_resm_m", "(", "self", ",", "tomodir", ")", ":", "resm_file", "=", "tomodir", "+", "os", ".", "sep", "+", "'inv'", "+", "os", ".", "sep", "+", "'res_m.diag'", "if", "not", "os", ".", "path", ".", "isfile", "(", "resm_file", ")", ":", "print", "(", "'res_m.diag not found: {0}'", ".", "format", "(", "resm_file", ")", ")", "print", "(", "os", ".", "getcwd", "(", ")", ")", "return", "1", "# read header", "with", "open", "(", "resm_file", ",", "'rb'", ")", "as", "fid", ":", "first_line", "=", "fid", ".", "readline", "(", ")", ".", "strip", "(", ")", "header_raw", "=", "np", ".", "fromstring", "(", "first_line", ",", "count", "=", "4", ",", "sep", "=", "' '", ")", "header_raw", "# nr_cells = int(header_raw[0])", "# lam = float(header_raw[1])", "subdata", "=", "np", ".", "genfromtxt", "(", "fid", ")", "print", "(", "subdata", ".", "shape", ")", "pid", "=", "self", ".", "parman", ".", "add_data", "(", "subdata", "[", ":", ",", "0", "]", ")", "self", ".", "assignments", "[", "'resm'", "]", "=", "pid" ]
Read in the resolution matrix of an inversion Parameters ---------- tomodir: string directory path to a tomodir
[ "Read", "in", "the", "resolution", "matrix", "of", "an", "inversion" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1622-L1648
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.register_forward_model
def register_forward_model(self, pid_mag, pid_pha): """Register parameter sets as the forward models for magnitude and phase Parameters ---------- pid_mag: int parameter id corresponding to the magnitude model pid_pha: int parameter id corresponding to the phase model """ self.register_magnitude_model(pid_mag) self.register_phase_model(pid_pha)
python
def register_forward_model(self, pid_mag, pid_pha): """Register parameter sets as the forward models for magnitude and phase Parameters ---------- pid_mag: int parameter id corresponding to the magnitude model pid_pha: int parameter id corresponding to the phase model """ self.register_magnitude_model(pid_mag) self.register_phase_model(pid_pha)
[ "def", "register_forward_model", "(", "self", ",", "pid_mag", ",", "pid_pha", ")", ":", "self", ".", "register_magnitude_model", "(", "pid_mag", ")", "self", ".", "register_phase_model", "(", "pid_pha", ")" ]
Register parameter sets as the forward models for magnitude and phase Parameters ---------- pid_mag: int parameter id corresponding to the magnitude model pid_pha: int parameter id corresponding to the phase model
[ "Register", "parameter", "sets", "as", "the", "forward", "models", "for", "magnitude", "and", "phase" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1686-L1698
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.register_magnitude_model
def register_magnitude_model(self, pid): """Set a given parameter model to the forward magnitude model """ if self.assignments['forward_model'] is None: self.assignments['forward_model'] = [None, None] self.assignments['forward_model'][0] = pid
python
def register_magnitude_model(self, pid): """Set a given parameter model to the forward magnitude model """ if self.assignments['forward_model'] is None: self.assignments['forward_model'] = [None, None] self.assignments['forward_model'][0] = pid
[ "def", "register_magnitude_model", "(", "self", ",", "pid", ")", ":", "if", "self", ".", "assignments", "[", "'forward_model'", "]", "is", "None", ":", "self", ".", "assignments", "[", "'forward_model'", "]", "=", "[", "None", ",", "None", "]", "self", ".", "assignments", "[", "'forward_model'", "]", "[", "0", "]", "=", "pid" ]
Set a given parameter model to the forward magnitude model
[ "Set", "a", "given", "parameter", "model", "to", "the", "forward", "magnitude", "model" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1700-L1706
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.register_phase_model
def register_phase_model(self, pid): """Set a given parameter model to the forward phase model """ if self.assignments['forward_model'] is None: self.assignments['forward_model'] = [None, None] self.assignments['forward_model'][1] = pid
python
def register_phase_model(self, pid): """Set a given parameter model to the forward phase model """ if self.assignments['forward_model'] is None: self.assignments['forward_model'] = [None, None] self.assignments['forward_model'][1] = pid
[ "def", "register_phase_model", "(", "self", ",", "pid", ")", ":", "if", "self", ".", "assignments", "[", "'forward_model'", "]", "is", "None", ":", "self", ".", "assignments", "[", "'forward_model'", "]", "=", "[", "None", ",", "None", "]", "self", ".", "assignments", "[", "'forward_model'", "]", "[", "1", "]", "=", "pid" ]
Set a given parameter model to the forward phase model
[ "Set", "a", "given", "parameter", "model", "to", "the", "forward", "phase", "model" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1708-L1714
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.add_homogeneous_model
def add_homogeneous_model(self, magnitude, phase=0): """Add a homogeneous resistivity model to the tomodir. This is useful for synthetic measurements. Parameters ---------- magnitude : float magnitude [Ohm m] value of the homogeneous model phase : float, optional phase [mrad] value of the homogeneous model Returns ------- pid_mag : int ID value of the parameter set of the magnitude model pid_pha : int ID value of the parameter set of the phase model Note that the parameter sets are automatically registered as the forward models for magnitude and phase values. """ if self.assignments['forward_model'] is not None: print('model already set, will overwrite') # generate distributions magnitude_model = np.ones(self.grid.nr_of_elements) * magnitude phase_model = np.ones(self.grid.nr_of_elements) * phase pid_mag = self.parman.add_data(magnitude_model) pid_pha = self.parman.add_data(phase_model) self.assignments['forward_model'] = [pid_mag, pid_pha] return pid_mag, pid_pha
python
def add_homogeneous_model(self, magnitude, phase=0): """Add a homogeneous resistivity model to the tomodir. This is useful for synthetic measurements. Parameters ---------- magnitude : float magnitude [Ohm m] value of the homogeneous model phase : float, optional phase [mrad] value of the homogeneous model Returns ------- pid_mag : int ID value of the parameter set of the magnitude model pid_pha : int ID value of the parameter set of the phase model Note that the parameter sets are automatically registered as the forward models for magnitude and phase values. """ if self.assignments['forward_model'] is not None: print('model already set, will overwrite') # generate distributions magnitude_model = np.ones(self.grid.nr_of_elements) * magnitude phase_model = np.ones(self.grid.nr_of_elements) * phase pid_mag = self.parman.add_data(magnitude_model) pid_pha = self.parman.add_data(phase_model) self.assignments['forward_model'] = [pid_mag, pid_pha] return pid_mag, pid_pha
[ "def", "add_homogeneous_model", "(", "self", ",", "magnitude", ",", "phase", "=", "0", ")", ":", "if", "self", ".", "assignments", "[", "'forward_model'", "]", "is", "not", "None", ":", "print", "(", "'model already set, will overwrite'", ")", "# generate distributions", "magnitude_model", "=", "np", ".", "ones", "(", "self", ".", "grid", ".", "nr_of_elements", ")", "*", "magnitude", "phase_model", "=", "np", ".", "ones", "(", "self", ".", "grid", ".", "nr_of_elements", ")", "*", "phase", "pid_mag", "=", "self", ".", "parman", ".", "add_data", "(", "magnitude_model", ")", "pid_pha", "=", "self", ".", "parman", ".", "add_data", "(", "phase_model", ")", "self", ".", "assignments", "[", "'forward_model'", "]", "=", "[", "pid_mag", ",", "pid_pha", "]", "return", "pid_mag", ",", "pid_pha" ]
Add a homogeneous resistivity model to the tomodir. This is useful for synthetic measurements. Parameters ---------- magnitude : float magnitude [Ohm m] value of the homogeneous model phase : float, optional phase [mrad] value of the homogeneous model Returns ------- pid_mag : int ID value of the parameter set of the magnitude model pid_pha : int ID value of the parameter set of the phase model Note that the parameter sets are automatically registered as the forward models for magnitude and phase values.
[ "Add", "a", "homogeneous", "resistivity", "model", "to", "the", "tomodir", ".", "This", "is", "useful", "for", "synthetic", "measurements", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1716-L1748
train
geophysics-ubonn/crtomo_tools
lib/crtomo/tdManager.py
tdMan.show_parset
def show_parset(self, pid): """Plot a given parameter set """ fig, ax = plt.subplots() self.plot.plot_elements_to_ax(pid, ax=ax) return fig, ax
python
def show_parset(self, pid): """Plot a given parameter set """ fig, ax = plt.subplots() self.plot.plot_elements_to_ax(pid, ax=ax) return fig, ax
[ "def", "show_parset", "(", "self", ",", "pid", ")", ":", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "self", ".", "plot", ".", "plot_elements_to_ax", "(", "pid", ",", "ax", "=", "ax", ")", "return", "fig", ",", "ax" ]
Plot a given parameter set
[ "Plot", "a", "given", "parameter", "set" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/tdManager.py#L1882-L1887
train
wdv4758h/python-everywhere
setup.py
cythonize
def cythonize(*args, **kwargs): ''' dirty hack, only import cythonize at the time you use it. if you don't write Cython extension, you won't fail even if you don't install Cython. ''' global cythonize from Cython.Build import cythonize return cythonize(*args, **kwargs)
python
def cythonize(*args, **kwargs): ''' dirty hack, only import cythonize at the time you use it. if you don't write Cython extension, you won't fail even if you don't install Cython. ''' global cythonize from Cython.Build import cythonize return cythonize(*args, **kwargs)
[ "def", "cythonize", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "global", "cythonize", "from", "Cython", ".", "Build", "import", "cythonize", "return", "cythonize", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
dirty hack, only import cythonize at the time you use it. if you don't write Cython extension, you won't fail even if you don't install Cython.
[ "dirty", "hack", "only", "import", "cythonize", "at", "the", "time", "you", "use", "it", "." ]
1d1bafd9f908b08c7bdb0470c6e54181c928f32f
https://github.com/wdv4758h/python-everywhere/blob/1d1bafd9f908b08c7bdb0470c6e54181c928f32f/setup.py#L4-L13
train
nhfruchter/pgh-bustime
pghbustime/interface.py
BustimeAPI.response
def response(self, url): """Grab an API response.""" resp = requests.get(url).content return self.parseresponse(resp)
python
def response(self, url): """Grab an API response.""" resp = requests.get(url).content return self.parseresponse(resp)
[ "def", "response", "(", "self", ",", "url", ")", ":", "resp", "=", "requests", ".", "get", "(", "url", ")", ".", "content", "return", "self", ".", "parseresponse", "(", "resp", ")" ]
Grab an API response.
[ "Grab", "an", "API", "response", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/interface.py#L84-L88
train
nhfruchter/pgh-bustime
pghbustime/interface.py
BustimeAPI.errorhandle
def errorhandle(self, resp): """Parse API error responses and raise appropriate exceptions.""" if self.format == 'json': parsed = xmltodict.parse(resp) errors = parsed[self.RESPONSE_TOKEN][self.ERROR_TOKEN] # Create list of errors if more than one error response is given if type(errors) is list and len(errors) > 1: messages = ", ".join([" ".join(["{}: {}".format(k,v) for k, v in e.items()]) for e in errors]) else: overlimit = any('transaction limit' in msg.lower() for msg in errors.values()) if overlimit: raise APILimitExceeded("This API key has used up its daily quota of calls.") else: messages = " ".join(["{}: {}".format(k,v) for k, v in errors.items()]) elif self.format == 'xml': import xml.etree.ElementTree as ET errors = ET.fromstring(resp).findall(self.ERROR_TOKEN) messages = ", ".join(err.find('msg').text for err in errors) else: raise ValueError("Invalid API response format specified: {}." % self.format) raise BustimeError("API returned: {}".format(messages))
python
def errorhandle(self, resp): """Parse API error responses and raise appropriate exceptions.""" if self.format == 'json': parsed = xmltodict.parse(resp) errors = parsed[self.RESPONSE_TOKEN][self.ERROR_TOKEN] # Create list of errors if more than one error response is given if type(errors) is list and len(errors) > 1: messages = ", ".join([" ".join(["{}: {}".format(k,v) for k, v in e.items()]) for e in errors]) else: overlimit = any('transaction limit' in msg.lower() for msg in errors.values()) if overlimit: raise APILimitExceeded("This API key has used up its daily quota of calls.") else: messages = " ".join(["{}: {}".format(k,v) for k, v in errors.items()]) elif self.format == 'xml': import xml.etree.ElementTree as ET errors = ET.fromstring(resp).findall(self.ERROR_TOKEN) messages = ", ".join(err.find('msg').text for err in errors) else: raise ValueError("Invalid API response format specified: {}." % self.format) raise BustimeError("API returned: {}".format(messages))
[ "def", "errorhandle", "(", "self", ",", "resp", ")", ":", "if", "self", ".", "format", "==", "'json'", ":", "parsed", "=", "xmltodict", ".", "parse", "(", "resp", ")", "errors", "=", "parsed", "[", "self", ".", "RESPONSE_TOKEN", "]", "[", "self", ".", "ERROR_TOKEN", "]", "# Create list of errors if more than one error response is given", "if", "type", "(", "errors", ")", "is", "list", "and", "len", "(", "errors", ")", ">", "1", ":", "messages", "=", "\", \"", ".", "join", "(", "[", "\" \"", ".", "join", "(", "[", "\"{}: {}\"", ".", "format", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "e", ".", "items", "(", ")", "]", ")", "for", "e", "in", "errors", "]", ")", "else", ":", "overlimit", "=", "any", "(", "'transaction limit'", "in", "msg", ".", "lower", "(", ")", "for", "msg", "in", "errors", ".", "values", "(", ")", ")", "if", "overlimit", ":", "raise", "APILimitExceeded", "(", "\"This API key has used up its daily quota of calls.\"", ")", "else", ":", "messages", "=", "\" \"", ".", "join", "(", "[", "\"{}: {}\"", ".", "format", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "errors", ".", "items", "(", ")", "]", ")", "elif", "self", ".", "format", "==", "'xml'", ":", "import", "xml", ".", "etree", ".", "ElementTree", "as", "ET", "errors", "=", "ET", ".", "fromstring", "(", "resp", ")", ".", "findall", "(", "self", ".", "ERROR_TOKEN", ")", "messages", "=", "\", \"", ".", "join", "(", "err", ".", "find", "(", "'msg'", ")", ".", "text", "for", "err", "in", "errors", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid API response format specified: {}.\"", "%", "self", ".", "format", ")", "raise", "BustimeError", "(", "\"API returned: {}\"", ".", "format", "(", "messages", ")", ")" ]
Parse API error responses and raise appropriate exceptions.
[ "Parse", "API", "error", "responses", "and", "raise", "appropriate", "exceptions", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/interface.py#L90-L111
train
nhfruchter/pgh-bustime
pghbustime/interface.py
BustimeAPI.parseresponse
def parseresponse(self, resp): """Parse an API response.""" # Support Python 3's bytes type from socket repsonses if sys.version_info.major > 2: resp = resp.decode('utf-8') if self.RESPONSE_TOKEN not in resp: raise BustimeError("The Bustime API returned an invalid response: {}".format(resp)) elif self.ERROR_TOKEN in resp: return self.errorhandle(resp) else: if self.format == 'json': return xmltodict.parse(resp)[self.RESPONSE_TOKEN] elif self.format == 'xml': return resp
python
def parseresponse(self, resp): """Parse an API response.""" # Support Python 3's bytes type from socket repsonses if sys.version_info.major > 2: resp = resp.decode('utf-8') if self.RESPONSE_TOKEN not in resp: raise BustimeError("The Bustime API returned an invalid response: {}".format(resp)) elif self.ERROR_TOKEN in resp: return self.errorhandle(resp) else: if self.format == 'json': return xmltodict.parse(resp)[self.RESPONSE_TOKEN] elif self.format == 'xml': return resp
[ "def", "parseresponse", "(", "self", ",", "resp", ")", ":", "# Support Python 3's bytes type from socket repsonses", "if", "sys", ".", "version_info", ".", "major", ">", "2", ":", "resp", "=", "resp", ".", "decode", "(", "'utf-8'", ")", "if", "self", ".", "RESPONSE_TOKEN", "not", "in", "resp", ":", "raise", "BustimeError", "(", "\"The Bustime API returned an invalid response: {}\"", ".", "format", "(", "resp", ")", ")", "elif", "self", ".", "ERROR_TOKEN", "in", "resp", ":", "return", "self", ".", "errorhandle", "(", "resp", ")", "else", ":", "if", "self", ".", "format", "==", "'json'", ":", "return", "xmltodict", ".", "parse", "(", "resp", ")", "[", "self", ".", "RESPONSE_TOKEN", "]", "elif", "self", ".", "format", "==", "'xml'", ":", "return", "resp" ]
Parse an API response.
[ "Parse", "an", "API", "response", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/interface.py#L113-L127
train
zalando-stups/lizzy-client
lizzy_client/utils.py
get_stack_refs
def get_stack_refs(refs: list): # copy pasted from Senza """ Returns a list of stack references with name and version. """ refs = list(refs) refs.reverse() stack_refs = [] last_stack = None while refs: ref = refs.pop() if last_stack is not None and re.compile(r'v[0-9][a-zA-Z0-9-]*$').match(ref): stack_refs.append(StackReference(last_stack, ref)) else: try: with open(ref) as fd: data = yaml.safe_load(fd) ref = data['SenzaInfo']['StackName'] except (OSError, IOError): # It's still possible that the ref is a regex pass if refs: version = refs.pop() else: version = None stack_refs.append(StackReference(ref, version)) last_stack = ref return stack_refs
python
def get_stack_refs(refs: list): # copy pasted from Senza """ Returns a list of stack references with name and version. """ refs = list(refs) refs.reverse() stack_refs = [] last_stack = None while refs: ref = refs.pop() if last_stack is not None and re.compile(r'v[0-9][a-zA-Z0-9-]*$').match(ref): stack_refs.append(StackReference(last_stack, ref)) else: try: with open(ref) as fd: data = yaml.safe_load(fd) ref = data['SenzaInfo']['StackName'] except (OSError, IOError): # It's still possible that the ref is a regex pass if refs: version = refs.pop() else: version = None stack_refs.append(StackReference(ref, version)) last_stack = ref return stack_refs
[ "def", "get_stack_refs", "(", "refs", ":", "list", ")", ":", "# copy pasted from Senza", "refs", "=", "list", "(", "refs", ")", "refs", ".", "reverse", "(", ")", "stack_refs", "=", "[", "]", "last_stack", "=", "None", "while", "refs", ":", "ref", "=", "refs", ".", "pop", "(", ")", "if", "last_stack", "is", "not", "None", "and", "re", ".", "compile", "(", "r'v[0-9][a-zA-Z0-9-]*$'", ")", ".", "match", "(", "ref", ")", ":", "stack_refs", ".", "append", "(", "StackReference", "(", "last_stack", ",", "ref", ")", ")", "else", ":", "try", ":", "with", "open", "(", "ref", ")", "as", "fd", ":", "data", "=", "yaml", ".", "safe_load", "(", "fd", ")", "ref", "=", "data", "[", "'SenzaInfo'", "]", "[", "'StackName'", "]", "except", "(", "OSError", ",", "IOError", ")", ":", "# It's still possible that the ref is a regex", "pass", "if", "refs", ":", "version", "=", "refs", ".", "pop", "(", ")", "else", ":", "version", "=", "None", "stack_refs", ".", "append", "(", "StackReference", "(", "ref", ",", "version", ")", ")", "last_stack", "=", "ref", "return", "stack_refs" ]
Returns a list of stack references with name and version.
[ "Returns", "a", "list", "of", "stack", "references", "with", "name", "and", "version", "." ]
0af9733ca5a25ebd0a9dc1453f2a7592efcee56a
https://github.com/zalando-stups/lizzy-client/blob/0af9733ca5a25ebd0a9dc1453f2a7592efcee56a/lizzy_client/utils.py#L35-L62
train
rhayes777/PyAutoFit
autofit/mapper/model_mapper.py
ModelMapper.instance_for_arguments
def instance_for_arguments(self, arguments): """ Creates a ModelInstance, which has an attribute and class instance corresponding to every PriorModel attributed to this instance. Parameters ---------- arguments : dict The dictionary representation of prior and parameter values. This is created in the model_instance_from_* routines. Returns ------- model_instance : autofit.mapper.model.ModelInstance An object containing reconstructed model_mapper instances """ model_instance = ModelInstance() for prior_model_tuple in self.prior_model_tuples: setattr(model_instance, prior_model_tuple.name, prior_model_tuple.prior_model.instance_for_arguments(arguments)) return model_instance
python
def instance_for_arguments(self, arguments): """ Creates a ModelInstance, which has an attribute and class instance corresponding to every PriorModel attributed to this instance. Parameters ---------- arguments : dict The dictionary representation of prior and parameter values. This is created in the model_instance_from_* routines. Returns ------- model_instance : autofit.mapper.model.ModelInstance An object containing reconstructed model_mapper instances """ model_instance = ModelInstance() for prior_model_tuple in self.prior_model_tuples: setattr(model_instance, prior_model_tuple.name, prior_model_tuple.prior_model.instance_for_arguments(arguments)) return model_instance
[ "def", "instance_for_arguments", "(", "self", ",", "arguments", ")", ":", "model_instance", "=", "ModelInstance", "(", ")", "for", "prior_model_tuple", "in", "self", ".", "prior_model_tuples", ":", "setattr", "(", "model_instance", ",", "prior_model_tuple", ".", "name", ",", "prior_model_tuple", ".", "prior_model", ".", "instance_for_arguments", "(", "arguments", ")", ")", "return", "model_instance" ]
Creates a ModelInstance, which has an attribute and class instance corresponding to every PriorModel attributed to this instance. Parameters ---------- arguments : dict The dictionary representation of prior and parameter values. This is created in the model_instance_from_* routines. Returns ------- model_instance : autofit.mapper.model.ModelInstance An object containing reconstructed model_mapper instances
[ "Creates", "a", "ModelInstance", "which", "has", "an", "attribute", "and", "class", "instance", "corresponding", "to", "every", "PriorModel", "attributed", "to", "this", "instance", "." ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/mapper/model_mapper.py#L378-L402
train
rhayes777/PyAutoFit
autofit/mapper/model_mapper.py
ModelMapper.mapper_from_partial_prior_arguments
def mapper_from_partial_prior_arguments(self, arguments): """ Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors, keeping existing priors where no mapping is provided. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors. """ original_prior_dict = {prior: prior for prior in self.priors} return self.mapper_from_prior_arguments({**original_prior_dict, **arguments})
python
def mapper_from_partial_prior_arguments(self, arguments): """ Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors, keeping existing priors where no mapping is provided. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors. """ original_prior_dict = {prior: prior for prior in self.priors} return self.mapper_from_prior_arguments({**original_prior_dict, **arguments})
[ "def", "mapper_from_partial_prior_arguments", "(", "self", ",", "arguments", ")", ":", "original_prior_dict", "=", "{", "prior", ":", "prior", "for", "prior", "in", "self", ".", "priors", "}", "return", "self", ".", "mapper_from_prior_arguments", "(", "{", "*", "*", "original_prior_dict", ",", "*", "*", "arguments", "}", ")" ]
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors, keeping existing priors where no mapping is provided. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors.
[ "Creates", "a", "new", "model", "mapper", "from", "a", "dictionary", "mapping_matrix", "existing", "priors", "to", "new", "priors", "keeping", "existing", "priors", "where", "no", "mapping", "is", "provided", "." ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/mapper/model_mapper.py#L404-L420
train
rhayes777/PyAutoFit
autofit/mapper/model_mapper.py
ModelMapper.mapper_from_prior_arguments
def mapper_from_prior_arguments(self, arguments): """ Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors. """ mapper = copy.deepcopy(self) for prior_model_tuple in self.prior_model_tuples: setattr(mapper, prior_model_tuple.name, prior_model_tuple.prior_model.gaussian_prior_model_for_arguments(arguments)) return mapper
python
def mapper_from_prior_arguments(self, arguments): """ Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors. """ mapper = copy.deepcopy(self) for prior_model_tuple in self.prior_model_tuples: setattr(mapper, prior_model_tuple.name, prior_model_tuple.prior_model.gaussian_prior_model_for_arguments(arguments)) return mapper
[ "def", "mapper_from_prior_arguments", "(", "self", ",", "arguments", ")", ":", "mapper", "=", "copy", ".", "deepcopy", "(", "self", ")", "for", "prior_model_tuple", "in", "self", ".", "prior_model_tuples", ":", "setattr", "(", "mapper", ",", "prior_model_tuple", ".", "name", ",", "prior_model_tuple", ".", "prior_model", ".", "gaussian_prior_model_for_arguments", "(", "arguments", ")", ")", "return", "mapper" ]
Creates a new model mapper from a dictionary mapping_matrix existing priors to new priors. Parameters ---------- arguments: {Prior: Prior} A dictionary mapping_matrix priors to priors Returns ------- model_mapper: ModelMapper A new model mapper with updated priors.
[ "Creates", "a", "new", "model", "mapper", "from", "a", "dictionary", "mapping_matrix", "existing", "priors", "to", "new", "priors", "." ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/mapper/model_mapper.py#L422-L442
train
rhayes777/PyAutoFit
autofit/mapper/model_mapper.py
ModelMapper.mapper_from_gaussian_tuples
def mapper_from_gaussian_tuples(self, tuples, a=None, r=None): """ Creates a new model mapper from a list of floats describing the mean values of gaussian priors. The widths \ of the new priors are taken from the width_config. The new gaussian priors must be provided in the same \ order as the priors associated with model. If a is not None then all priors are created with an absolute width of a. If r is not None then all priors are created with a relative width of r. Parameters ---------- r The relative width to be assigned to gaussian priors a The absolute width to be assigned to gaussian priors tuples A list of tuples each containing the mean and width of a prior Returns ------- mapper: ModelMapper A new model mapper with all priors replaced by gaussian priors. """ prior_tuples = self.prior_tuples_ordered_by_id prior_class_dict = self.prior_class_dict arguments = {} for i, prior_tuple in enumerate(prior_tuples): prior = prior_tuple.prior cls = prior_class_dict[prior] mean = tuples[i][0] if a is not None and r is not None: raise exc.PriorException("Width of new priors cannot be both relative and absolute.") if a is not None: width_type = "a" value = a elif r is not None: width_type = "r" value = r else: width_type, value = conf.instance.prior_width.get_for_nearest_ancestor(cls, prior_tuple.name) if width_type == "r": width = value * mean elif width_type == "a": width = value else: raise exc.PriorException("Prior widths must be relative 'r' or absolute 'a' e.g. a, 1.0") if isinstance(prior, GaussianPrior): limits = (prior.lower_limit, prior.upper_limit) else: limits = conf.instance.prior_limit.get_for_nearest_ancestor(cls, prior_tuple.name) arguments[prior] = GaussianPrior(mean, max(tuples[i][1], width), *limits) return self.mapper_from_prior_arguments(arguments)
python
def mapper_from_gaussian_tuples(self, tuples, a=None, r=None): """ Creates a new model mapper from a list of floats describing the mean values of gaussian priors. The widths \ of the new priors are taken from the width_config. The new gaussian priors must be provided in the same \ order as the priors associated with model. If a is not None then all priors are created with an absolute width of a. If r is not None then all priors are created with a relative width of r. Parameters ---------- r The relative width to be assigned to gaussian priors a The absolute width to be assigned to gaussian priors tuples A list of tuples each containing the mean and width of a prior Returns ------- mapper: ModelMapper A new model mapper with all priors replaced by gaussian priors. """ prior_tuples = self.prior_tuples_ordered_by_id prior_class_dict = self.prior_class_dict arguments = {} for i, prior_tuple in enumerate(prior_tuples): prior = prior_tuple.prior cls = prior_class_dict[prior] mean = tuples[i][0] if a is not None and r is not None: raise exc.PriorException("Width of new priors cannot be both relative and absolute.") if a is not None: width_type = "a" value = a elif r is not None: width_type = "r" value = r else: width_type, value = conf.instance.prior_width.get_for_nearest_ancestor(cls, prior_tuple.name) if width_type == "r": width = value * mean elif width_type == "a": width = value else: raise exc.PriorException("Prior widths must be relative 'r' or absolute 'a' e.g. a, 1.0") if isinstance(prior, GaussianPrior): limits = (prior.lower_limit, prior.upper_limit) else: limits = conf.instance.prior_limit.get_for_nearest_ancestor(cls, prior_tuple.name) arguments[prior] = GaussianPrior(mean, max(tuples[i][1], width), *limits) return self.mapper_from_prior_arguments(arguments)
[ "def", "mapper_from_gaussian_tuples", "(", "self", ",", "tuples", ",", "a", "=", "None", ",", "r", "=", "None", ")", ":", "prior_tuples", "=", "self", ".", "prior_tuples_ordered_by_id", "prior_class_dict", "=", "self", ".", "prior_class_dict", "arguments", "=", "{", "}", "for", "i", ",", "prior_tuple", "in", "enumerate", "(", "prior_tuples", ")", ":", "prior", "=", "prior_tuple", ".", "prior", "cls", "=", "prior_class_dict", "[", "prior", "]", "mean", "=", "tuples", "[", "i", "]", "[", "0", "]", "if", "a", "is", "not", "None", "and", "r", "is", "not", "None", ":", "raise", "exc", ".", "PriorException", "(", "\"Width of new priors cannot be both relative and absolute.\"", ")", "if", "a", "is", "not", "None", ":", "width_type", "=", "\"a\"", "value", "=", "a", "elif", "r", "is", "not", "None", ":", "width_type", "=", "\"r\"", "value", "=", "r", "else", ":", "width_type", ",", "value", "=", "conf", ".", "instance", ".", "prior_width", ".", "get_for_nearest_ancestor", "(", "cls", ",", "prior_tuple", ".", "name", ")", "if", "width_type", "==", "\"r\"", ":", "width", "=", "value", "*", "mean", "elif", "width_type", "==", "\"a\"", ":", "width", "=", "value", "else", ":", "raise", "exc", ".", "PriorException", "(", "\"Prior widths must be relative 'r' or absolute 'a' e.g. a, 1.0\"", ")", "if", "isinstance", "(", "prior", ",", "GaussianPrior", ")", ":", "limits", "=", "(", "prior", ".", "lower_limit", ",", "prior", ".", "upper_limit", ")", "else", ":", "limits", "=", "conf", ".", "instance", ".", "prior_limit", ".", "get_for_nearest_ancestor", "(", "cls", ",", "prior_tuple", ".", "name", ")", "arguments", "[", "prior", "]", "=", "GaussianPrior", "(", "mean", ",", "max", "(", "tuples", "[", "i", "]", "[", "1", "]", ",", "width", ")", ",", "*", "limits", ")", "return", "self", ".", "mapper_from_prior_arguments", "(", "arguments", ")" ]
Creates a new model mapper from a list of floats describing the mean values of gaussian priors. The widths \ of the new priors are taken from the width_config. The new gaussian priors must be provided in the same \ order as the priors associated with model. If a is not None then all priors are created with an absolute width of a. If r is not None then all priors are created with a relative width of r. Parameters ---------- r The relative width to be assigned to gaussian priors a The absolute width to be assigned to gaussian priors tuples A list of tuples each containing the mean and width of a prior Returns ------- mapper: ModelMapper A new model mapper with all priors replaced by gaussian priors.
[ "Creates", "a", "new", "model", "mapper", "from", "a", "list", "of", "floats", "describing", "the", "mean", "values", "of", "gaussian", "priors", ".", "The", "widths", "\\", "of", "the", "new", "priors", "are", "taken", "from", "the", "width_config", ".", "The", "new", "gaussian", "priors", "must", "be", "provided", "in", "the", "same", "\\", "order", "as", "the", "priors", "associated", "with", "model", "." ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/mapper/model_mapper.py#L444-L499
train
rhayes777/PyAutoFit
autofit/mapper/model_mapper.py
ModelMapper.info
def info(self): """ Use the priors that make up the model_mapper to generate information on each parameter of the overall model. This information is extracted from each priors *model_info* property. """ info = [] for prior_model_name, prior_model in self.prior_model_tuples: info.append(prior_model.name + '\n') info.extend([f"{prior_model_name}_{item}" for item in prior_model.info]) return '\n'.join(info)
python
def info(self): """ Use the priors that make up the model_mapper to generate information on each parameter of the overall model. This information is extracted from each priors *model_info* property. """ info = [] for prior_model_name, prior_model in self.prior_model_tuples: info.append(prior_model.name + '\n') info.extend([f"{prior_model_name}_{item}" for item in prior_model.info]) return '\n'.join(info)
[ "def", "info", "(", "self", ")", ":", "info", "=", "[", "]", "for", "prior_model_name", ",", "prior_model", "in", "self", ".", "prior_model_tuples", ":", "info", ".", "append", "(", "prior_model", ".", "name", "+", "'\\n'", ")", "info", ".", "extend", "(", "[", "f\"{prior_model_name}_{item}\"", "for", "item", "in", "prior_model", ".", "info", "]", ")", "return", "'\\n'", ".", "join", "(", "info", ")" ]
Use the priors that make up the model_mapper to generate information on each parameter of the overall model. This information is extracted from each priors *model_info* property.
[ "Use", "the", "priors", "that", "make", "up", "the", "model_mapper", "to", "generate", "information", "on", "each", "parameter", "of", "the", "overall", "model", "." ]
a9e6144abb08edfc6a6906c4030d7119bf8d3e14
https://github.com/rhayes777/PyAutoFit/blob/a9e6144abb08edfc6a6906c4030d7119bf8d3e14/autofit/mapper/model_mapper.py#L520-L532
train
peterbe/gg
gg/builtins/push/gg_push.py
push
def push(config, force=False): """Create push the current branch.""" repo = config.repo active_branch = repo.active_branch if active_branch.name == "master": error_out( "Can't commit when on the master branch. " "You really ought to do work in branches." ) state = read(config.configfile) if not state.get("FORK_NAME"): info_out("Can't help you push the commit. Please run: gg config --help") return 0 try: repo.remotes[state["FORK_NAME"]] except IndexError: error_out("There is no remote called '{}'".format(state["FORK_NAME"])) destination = repo.remotes[state["FORK_NAME"]] if force: pushed, = destination.push(force=True) info_out(pushed.summary) else: pushed, = destination.push() # Was it rejected? if ( pushed.flags & git.remote.PushInfo.REJECTED or pushed.flags & git.remote.PushInfo.REMOTE_REJECTED ): error_out('The push was rejected ("{}")'.format(pushed.summary), False) try_force_push = input("Try to force push? [Y/n] ").lower().strip() if try_force_push not in ("no", "n"): pushed, = destination.push(force=True) info_out(pushed.summary) else: return 0
python
def push(config, force=False): """Create push the current branch.""" repo = config.repo active_branch = repo.active_branch if active_branch.name == "master": error_out( "Can't commit when on the master branch. " "You really ought to do work in branches." ) state = read(config.configfile) if not state.get("FORK_NAME"): info_out("Can't help you push the commit. Please run: gg config --help") return 0 try: repo.remotes[state["FORK_NAME"]] except IndexError: error_out("There is no remote called '{}'".format(state["FORK_NAME"])) destination = repo.remotes[state["FORK_NAME"]] if force: pushed, = destination.push(force=True) info_out(pushed.summary) else: pushed, = destination.push() # Was it rejected? if ( pushed.flags & git.remote.PushInfo.REJECTED or pushed.flags & git.remote.PushInfo.REMOTE_REJECTED ): error_out('The push was rejected ("{}")'.format(pushed.summary), False) try_force_push = input("Try to force push? [Y/n] ").lower().strip() if try_force_push not in ("no", "n"): pushed, = destination.push(force=True) info_out(pushed.summary) else: return 0
[ "def", "push", "(", "config", ",", "force", "=", "False", ")", ":", "repo", "=", "config", ".", "repo", "active_branch", "=", "repo", ".", "active_branch", "if", "active_branch", ".", "name", "==", "\"master\"", ":", "error_out", "(", "\"Can't commit when on the master branch. \"", "\"You really ought to do work in branches.\"", ")", "state", "=", "read", "(", "config", ".", "configfile", ")", "if", "not", "state", ".", "get", "(", "\"FORK_NAME\"", ")", ":", "info_out", "(", "\"Can't help you push the commit. Please run: gg config --help\"", ")", "return", "0", "try", ":", "repo", ".", "remotes", "[", "state", "[", "\"FORK_NAME\"", "]", "]", "except", "IndexError", ":", "error_out", "(", "\"There is no remote called '{}'\"", ".", "format", "(", "state", "[", "\"FORK_NAME\"", "]", ")", ")", "destination", "=", "repo", ".", "remotes", "[", "state", "[", "\"FORK_NAME\"", "]", "]", "if", "force", ":", "pushed", ",", "=", "destination", ".", "push", "(", "force", "=", "True", ")", "info_out", "(", "pushed", ".", "summary", ")", "else", ":", "pushed", ",", "=", "destination", ".", "push", "(", ")", "# Was it rejected?", "if", "(", "pushed", ".", "flags", "&", "git", ".", "remote", ".", "PushInfo", ".", "REJECTED", "or", "pushed", ".", "flags", "&", "git", ".", "remote", ".", "PushInfo", ".", "REMOTE_REJECTED", ")", ":", "error_out", "(", "'The push was rejected (\"{}\")'", ".", "format", "(", "pushed", ".", "summary", ")", ",", "False", ")", "try_force_push", "=", "input", "(", "\"Try to force push? [Y/n] \"", ")", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "try_force_push", "not", "in", "(", "\"no\"", ",", "\"n\"", ")", ":", "pushed", ",", "=", "destination", ".", "push", "(", "force", "=", "True", ")", "info_out", "(", "pushed", ".", "summary", ")", "else", ":", "return", "0" ]
Create push the current branch.
[ "Create", "push", "the", "current", "branch", "." ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/push/gg_push.py#L12-L52
train
tslight/treepick
treepick/__main__.py
chkpath
def chkpath(path): """ Checks if a path exists. """ if os.path.exists(path): return path else: msg = "{0} does not exist.".format(path) raise argparse.ArgumentTypeError(msg)
python
def chkpath(path): """ Checks if a path exists. """ if os.path.exists(path): return path else: msg = "{0} does not exist.".format(path) raise argparse.ArgumentTypeError(msg)
[ "def", "chkpath", "(", "path", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "path", "else", ":", "msg", "=", "\"{0} does not exist.\"", ".", "format", "(", "path", ")", "raise", "argparse", ".", "ArgumentTypeError", "(", "msg", ")" ]
Checks if a path exists.
[ "Checks", "if", "a", "path", "exists", "." ]
7adf838900f11e8845e17d8c79bb2b23617aec2c
https://github.com/tslight/treepick/blob/7adf838900f11e8845e17d8c79bb2b23617aec2c/treepick/__main__.py#L14-L22
train
geophysics-ubonn/crtomo_tools
src/volt_correct_temperature.py
readin_volt
def readin_volt(filename): """Read in measurement data from a volt.dat file and return electrodes and measured resistance. """ with open(filename, 'r') as fid: content = np.loadtxt(fid, skiprows=1, usecols=[0, 1, 2]) volt = content[:, 2] elecs = content[:, 0:2] return elecs, volt
python
def readin_volt(filename): """Read in measurement data from a volt.dat file and return electrodes and measured resistance. """ with open(filename, 'r') as fid: content = np.loadtxt(fid, skiprows=1, usecols=[0, 1, 2]) volt = content[:, 2] elecs = content[:, 0:2] return elecs, volt
[ "def", "readin_volt", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "content", "=", "np", ".", "loadtxt", "(", "fid", ",", "skiprows", "=", "1", ",", "usecols", "=", "[", "0", ",", "1", ",", "2", "]", ")", "volt", "=", "content", "[", ":", ",", "2", "]", "elecs", "=", "content", "[", ":", ",", "0", ":", "2", "]", "return", "elecs", ",", "volt" ]
Read in measurement data from a volt.dat file and return electrodes and measured resistance.
[ "Read", "in", "measurement", "data", "from", "a", "volt", ".", "dat", "file", "and", "return", "electrodes", "and", "measured", "resistance", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/volt_correct_temperature.py#L56-L64
train
geophysics-ubonn/crtomo_tools
src/volt_correct_temperature.py
save_volt
def save_volt(elecs, volt, filename): """Save the values in volt-format. """ # bring data in shape content = np.column_stack((elecs, volt, np.zeros(len(volt)))) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(content.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(content), fmt='%i %i %f %f')
python
def save_volt(elecs, volt, filename): """Save the values in volt-format. """ # bring data in shape content = np.column_stack((elecs, volt, np.zeros(len(volt)))) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(content.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(content), fmt='%i %i %f %f')
[ "def", "save_volt", "(", "elecs", ",", "volt", ",", "filename", ")", ":", "# bring data in shape", "content", "=", "np", ".", "column_stack", "(", "(", "elecs", ",", "volt", ",", "np", ".", "zeros", "(", "len", "(", "volt", ")", ")", ")", ")", "# save datapoints", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fid", ":", "fid", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "content", ".", "shape", "[", "0", "]", ")", ")", "with", "open", "(", "filename", ",", "'ab'", ")", "as", "fid", ":", "np", ".", "savetxt", "(", "fid", ",", "np", ".", "array", "(", "content", ")", ",", "fmt", "=", "'%i %i %f %f'", ")" ]
Save the values in volt-format.
[ "Save", "the", "values", "in", "volt", "-", "format", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/volt_correct_temperature.py#L90-L100
train
geophysics-ubonn/crtomo_tools
src/volt_correct_temperature.py
main
def main(): """Function to remove temperature effect from field data """ options = handle_options() # read in observed and synthetic data elecs, d_obs = readin_volt(options.d_obs) elecs, d_est = readin_volt(options.d_est) elecs, d_estTC = readin_volt(options.d_estTC) # calculate corrected data volt_corr = calc_correction(d_obs, d_est, d_estTC, ) # save data save_volt(elecs, volt_corr, options.output, )
python
def main(): """Function to remove temperature effect from field data """ options = handle_options() # read in observed and synthetic data elecs, d_obs = readin_volt(options.d_obs) elecs, d_est = readin_volt(options.d_est) elecs, d_estTC = readin_volt(options.d_estTC) # calculate corrected data volt_corr = calc_correction(d_obs, d_est, d_estTC, ) # save data save_volt(elecs, volt_corr, options.output, )
[ "def", "main", "(", ")", ":", "options", "=", "handle_options", "(", ")", "# read in observed and synthetic data", "elecs", ",", "d_obs", "=", "readin_volt", "(", "options", ".", "d_obs", ")", "elecs", ",", "d_est", "=", "readin_volt", "(", "options", ".", "d_est", ")", "elecs", ",", "d_estTC", "=", "readin_volt", "(", "options", ".", "d_estTC", ")", "# calculate corrected data", "volt_corr", "=", "calc_correction", "(", "d_obs", ",", "d_est", ",", "d_estTC", ",", ")", "# save data", "save_volt", "(", "elecs", ",", "volt_corr", ",", "options", ".", "output", ",", ")" ]
Function to remove temperature effect from field data
[ "Function", "to", "remove", "temperature", "effect", "from", "field", "data" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/volt_correct_temperature.py#L103-L121
train
Maples7/dict-recursive-update
dict_recursive_update/__init__.py
recursive_update
def recursive_update(default, custom): '''Return a dict merged from default and custom >>> recursive_update('a', 'b') Traceback (most recent call last): ... TypeError: Params of recursive_update should be dicts >>> recursive_update({'a': [1]}, {'a': [2], 'c': {'d': {'c': 3}}}) {'a': [2], 'c': {'d': {'c': 3}}} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'b': 5}) {'a': {'c': 1, 'd': {}}, 'b': 5} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'a': 2}) {'a': 2, 'b': 4} ''' if not isinstance(default, dict) or not isinstance(custom, dict): raise TypeError('Params of recursive_update should be dicts') for key in custom: if isinstance(custom[key], dict) and isinstance( default.get(key), dict): default[key] = recursive_update(default[key], custom[key]) else: default[key] = custom[key] return default
python
def recursive_update(default, custom): '''Return a dict merged from default and custom >>> recursive_update('a', 'b') Traceback (most recent call last): ... TypeError: Params of recursive_update should be dicts >>> recursive_update({'a': [1]}, {'a': [2], 'c': {'d': {'c': 3}}}) {'a': [2], 'c': {'d': {'c': 3}}} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'b': 5}) {'a': {'c': 1, 'd': {}}, 'b': 5} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'a': 2}) {'a': 2, 'b': 4} ''' if not isinstance(default, dict) or not isinstance(custom, dict): raise TypeError('Params of recursive_update should be dicts') for key in custom: if isinstance(custom[key], dict) and isinstance( default.get(key), dict): default[key] = recursive_update(default[key], custom[key]) else: default[key] = custom[key] return default
[ "def", "recursive_update", "(", "default", ",", "custom", ")", ":", "if", "not", "isinstance", "(", "default", ",", "dict", ")", "or", "not", "isinstance", "(", "custom", ",", "dict", ")", ":", "raise", "TypeError", "(", "'Params of recursive_update should be dicts'", ")", "for", "key", "in", "custom", ":", "if", "isinstance", "(", "custom", "[", "key", "]", ",", "dict", ")", "and", "isinstance", "(", "default", ".", "get", "(", "key", ")", ",", "dict", ")", ":", "default", "[", "key", "]", "=", "recursive_update", "(", "default", "[", "key", "]", ",", "custom", "[", "key", "]", ")", "else", ":", "default", "[", "key", "]", "=", "custom", "[", "key", "]", "return", "default" ]
Return a dict merged from default and custom >>> recursive_update('a', 'b') Traceback (most recent call last): ... TypeError: Params of recursive_update should be dicts >>> recursive_update({'a': [1]}, {'a': [2], 'c': {'d': {'c': 3}}}) {'a': [2], 'c': {'d': {'c': 3}}} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'b': 5}) {'a': {'c': 1, 'd': {}}, 'b': 5} >>> recursive_update({'a': {'c': 1, 'd': {}}, 'b': 4}, {'a': 2}) {'a': 2, 'b': 4}
[ "Return", "a", "dict", "merged", "from", "default", "and", "custom" ]
07204cdab891ac4123b19fe3fa148c3dd1c93992
https://github.com/Maples7/dict-recursive-update/blob/07204cdab891ac4123b19fe3fa148c3dd1c93992/dict_recursive_update/__init__.py#L11-L38
train
peterbe/gg
gg/builtins/cleanup/gg_cleanup.py
cleanup
def cleanup(config, searchstring, force=False): """Deletes a found branch locally and remotely.""" repo = config.repo branches_ = list(find(repo, searchstring)) if not branches_: error_out("No branches found") elif len(branches_) > 1: error_out( "More than one branch found.{}".format( "\n\t".join([""] + [x.name for x in branches_]) ) ) assert len(branches_) == 1 branch_name = branches_[0].name active_branch = repo.active_branch if branch_name == active_branch.name: error_out("Can't clean up the current active branch.") # branch_name = active_branch.name upstream_remote = None fork_remote = None state = read(config.configfile) origin_name = state.get("ORIGIN_NAME", "origin") for remote in repo.remotes: if remote.name == origin_name: # remote.pull() upstream_remote = remote break if not upstream_remote: error_out("No remote called {!r} found".format(origin_name)) # Check out master repo.heads.master.checkout() upstream_remote.pull(repo.heads.master) # Is this one of the merged branches?! # XXX I don't know how to do this "nativly" with GitPython. merged_branches = [ x.strip() for x in repo.git.branch("--merged").splitlines() if x.strip() and not x.strip().startswith("*") ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain: # Need to ask the user. # XXX This is where we could get smart and compare this branch # with the master. certain = ( input("Are you certain {} is actually merged? [Y/n] ".format(branch_name)) .lower() .strip() != "n" ) if not certain: return 1 if was_merged: repo.git.branch("-d", branch_name) else: repo.git.branch("-D", branch_name) fork_remote = None state = read(config.configfile) for remote in repo.remotes: if remote.name == state.get("FORK_NAME"): fork_remote = remote break if fork_remote: fork_remote.push(":" + branch_name) info_out("Remote branch on fork deleted too.")
python
def cleanup(config, searchstring, force=False): """Deletes a found branch locally and remotely.""" repo = config.repo branches_ = list(find(repo, searchstring)) if not branches_: error_out("No branches found") elif len(branches_) > 1: error_out( "More than one branch found.{}".format( "\n\t".join([""] + [x.name for x in branches_]) ) ) assert len(branches_) == 1 branch_name = branches_[0].name active_branch = repo.active_branch if branch_name == active_branch.name: error_out("Can't clean up the current active branch.") # branch_name = active_branch.name upstream_remote = None fork_remote = None state = read(config.configfile) origin_name = state.get("ORIGIN_NAME", "origin") for remote in repo.remotes: if remote.name == origin_name: # remote.pull() upstream_remote = remote break if not upstream_remote: error_out("No remote called {!r} found".format(origin_name)) # Check out master repo.heads.master.checkout() upstream_remote.pull(repo.heads.master) # Is this one of the merged branches?! # XXX I don't know how to do this "nativly" with GitPython. merged_branches = [ x.strip() for x in repo.git.branch("--merged").splitlines() if x.strip() and not x.strip().startswith("*") ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain: # Need to ask the user. # XXX This is where we could get smart and compare this branch # with the master. certain = ( input("Are you certain {} is actually merged? [Y/n] ".format(branch_name)) .lower() .strip() != "n" ) if not certain: return 1 if was_merged: repo.git.branch("-d", branch_name) else: repo.git.branch("-D", branch_name) fork_remote = None state = read(config.configfile) for remote in repo.remotes: if remote.name == state.get("FORK_NAME"): fork_remote = remote break if fork_remote: fork_remote.push(":" + branch_name) info_out("Remote branch on fork deleted too.")
[ "def", "cleanup", "(", "config", ",", "searchstring", ",", "force", "=", "False", ")", ":", "repo", "=", "config", ".", "repo", "branches_", "=", "list", "(", "find", "(", "repo", ",", "searchstring", ")", ")", "if", "not", "branches_", ":", "error_out", "(", "\"No branches found\"", ")", "elif", "len", "(", "branches_", ")", ">", "1", ":", "error_out", "(", "\"More than one branch found.{}\"", ".", "format", "(", "\"\\n\\t\"", ".", "join", "(", "[", "\"\"", "]", "+", "[", "x", ".", "name", "for", "x", "in", "branches_", "]", ")", ")", ")", "assert", "len", "(", "branches_", ")", "==", "1", "branch_name", "=", "branches_", "[", "0", "]", ".", "name", "active_branch", "=", "repo", ".", "active_branch", "if", "branch_name", "==", "active_branch", ".", "name", ":", "error_out", "(", "\"Can't clean up the current active branch.\"", ")", "# branch_name = active_branch.name", "upstream_remote", "=", "None", "fork_remote", "=", "None", "state", "=", "read", "(", "config", ".", "configfile", ")", "origin_name", "=", "state", ".", "get", "(", "\"ORIGIN_NAME\"", ",", "\"origin\"", ")", "for", "remote", "in", "repo", ".", "remotes", ":", "if", "remote", ".", "name", "==", "origin_name", ":", "# remote.pull()", "upstream_remote", "=", "remote", "break", "if", "not", "upstream_remote", ":", "error_out", "(", "\"No remote called {!r} found\"", ".", "format", "(", "origin_name", ")", ")", "# Check out master", "repo", ".", "heads", ".", "master", ".", "checkout", "(", ")", "upstream_remote", ".", "pull", "(", "repo", ".", "heads", ".", "master", ")", "# Is this one of the merged branches?!", "# XXX I don't know how to do this \"nativly\" with GitPython.", "merged_branches", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "repo", ".", "git", ".", "branch", "(", "\"--merged\"", ")", ".", "splitlines", "(", ")", "if", "x", ".", "strip", "(", ")", "and", "not", "x", ".", "strip", "(", ")", ".", "startswith", "(", "\"*\"", ")", "]", "was_merged", "=", "branch_name", "in", "merged_branches", "certain", "=", "was_merged", "or", "force", "if", "not", "certain", ":", "# Need to ask the user.", "# XXX This is where we could get smart and compare this branch", "# with the master.", "certain", "=", "(", "input", "(", "\"Are you certain {} is actually merged? [Y/n] \"", ".", "format", "(", "branch_name", ")", ")", ".", "lower", "(", ")", ".", "strip", "(", ")", "!=", "\"n\"", ")", "if", "not", "certain", ":", "return", "1", "if", "was_merged", ":", "repo", ".", "git", ".", "branch", "(", "\"-d\"", ",", "branch_name", ")", "else", ":", "repo", ".", "git", ".", "branch", "(", "\"-D\"", ",", "branch_name", ")", "fork_remote", "=", "None", "state", "=", "read", "(", "config", ".", "configfile", ")", "for", "remote", "in", "repo", ".", "remotes", ":", "if", "remote", ".", "name", "==", "state", ".", "get", "(", "\"FORK_NAME\"", ")", ":", "fork_remote", "=", "remote", "break", "if", "fork_remote", ":", "fork_remote", ".", "push", "(", "\":\"", "+", "branch_name", ")", "info_out", "(", "\"Remote branch on fork deleted too.\"", ")" ]
Deletes a found branch locally and remotely.
[ "Deletes", "a", "found", "branch", "locally", "and", "remotely", "." ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/cleanup/gg_cleanup.py#L13-L84
train
geophysics-ubonn/crtomo_tools
src/td_correct_temperature.py
calc_correction
def calc_correction(temp, mag, add=False, T_std=10, m=0.021): """Function to add or substract the temperature effect to given data. The function can be called in python scripts. For application via command line in a file system use the script td_correct_temperature.py. The data is taken and given in Ohmm. rho_std_i = (m * (T_i - 25°) + 1) / (m * (T_std - 25°) + 1) * rho_i rho_i = (m * (T_std - 25°) + 1) / (m * (T_i - 25°) + 1) * rho_std_i Hayley (2007) Parameters: temp: temperature values corresponding to the individual resistivity values mag: resistivity values to be corrected add: switch for adding instead of substracting the effect T_std: standard temperature t or from which to correct (default=10°) m:coeffcient (default=0.021) """ if mag.shape[1] == 3: if add: data_x = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 0] data_y = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 1] data_z = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 2] return np.column_stack((data_x, data_y, data_z)) else: data_x = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 0] data_y = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 1] data_z = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 2] return np.column_stack((data_x, data_y, data_z)) else: if add: data_i = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag return data_i else: data_std = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag return data_std
python
def calc_correction(temp, mag, add=False, T_std=10, m=0.021): """Function to add or substract the temperature effect to given data. The function can be called in python scripts. For application via command line in a file system use the script td_correct_temperature.py. The data is taken and given in Ohmm. rho_std_i = (m * (T_i - 25°) + 1) / (m * (T_std - 25°) + 1) * rho_i rho_i = (m * (T_std - 25°) + 1) / (m * (T_i - 25°) + 1) * rho_std_i Hayley (2007) Parameters: temp: temperature values corresponding to the individual resistivity values mag: resistivity values to be corrected add: switch for adding instead of substracting the effect T_std: standard temperature t or from which to correct (default=10°) m:coeffcient (default=0.021) """ if mag.shape[1] == 3: if add: data_x = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 0] data_y = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 1] data_z = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag[:, 2] return np.column_stack((data_x, data_y, data_z)) else: data_x = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 0] data_y = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 1] data_z = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag[:, 2] return np.column_stack((data_x, data_y, data_z)) else: if add: data_i = (m * (T_std - 25) + 1) / (m * (temp - 25) + 1) * mag return data_i else: data_std = (m * (temp - 25) + 1) / (m * (T_std - 25) + 1) * mag return data_std
[ "def", "calc_correction", "(", "temp", ",", "mag", ",", "add", "=", "False", ",", "T_std", "=", "10", ",", "m", "=", "0.021", ")", ":", "if", "mag", ".", "shape", "[", "1", "]", "==", "3", ":", "if", "add", ":", "data_x", "=", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "0", "]", "data_y", "=", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "1", "]", "data_z", "=", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "2", "]", "return", "np", ".", "column_stack", "(", "(", "data_x", ",", "data_y", ",", "data_z", ")", ")", "else", ":", "data_x", "=", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "0", "]", "data_y", "=", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "1", "]", "data_z", "=", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "*", "mag", "[", ":", ",", "2", "]", "return", "np", ".", "column_stack", "(", "(", "data_x", ",", "data_y", ",", "data_z", ")", ")", "else", ":", "if", "add", ":", "data_i", "=", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "*", "mag", "return", "data_i", "else", ":", "data_std", "=", "(", "m", "*", "(", "temp", "-", "25", ")", "+", "1", ")", "/", "(", "m", "*", "(", "T_std", "-", "25", ")", "+", "1", ")", "*", "mag", "return", "data_std" ]
Function to add or substract the temperature effect to given data. The function can be called in python scripts. For application via command line in a file system use the script td_correct_temperature.py. The data is taken and given in Ohmm. rho_std_i = (m * (T_i - 25°) + 1) / (m * (T_std - 25°) + 1) * rho_i rho_i = (m * (T_std - 25°) + 1) / (m * (T_i - 25°) + 1) * rho_std_i Hayley (2007) Parameters: temp: temperature values corresponding to the individual resistivity values mag: resistivity values to be corrected add: switch for adding instead of substracting the effect T_std: standard temperature t or from which to correct (default=10°) m:coeffcient (default=0.021)
[ "Function", "to", "add", "or", "substract", "the", "temperature", "effect", "to", "given", "data", ".", "The", "function", "can", "be", "called", "in", "python", "scripts", ".", "For", "application", "via", "command", "line", "in", "a", "file", "system", "use", "the", "script", "td_correct_temperature", ".", "py", ".", "The", "data", "is", "taken", "and", "given", "in", "Ohmm", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_correct_temperature.py#L137-L173
train
geophysics-ubonn/crtomo_tools
src/td_correct_temperature.py
save_mag_to_file
def save_mag_to_file(mag, filename, rhofile): """Save the values in rho- or mag-format. """ if rhofile: # bring data in shape null = np.zeros(len(mag)) if mag.shape[1] == 3: null = np.column_stack((null, null, null, null)) result = np.column_stack((mag, null)) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(mag.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(result), fmt='%f') else: # bring data in shape with open('inv/rho00.mag', 'r') as fid: coor = np.loadtxt(fid, skiprows=1, usecols=[0, 1]) # calculated back to log if mag.shape[1] == 3: logx = [math.log(d, 10) for d in mag[:, 0]] logy = [math.log(d, 10) for d in mag[:, 1]] logz = [math.log(d, 10) for d in mag[:, 2]] mag_log = np.column_stack((logx, logy, logz)) else: mag_log = [math.log(d, 10) for d in mag] content = np.column_stack((coor[:, 0], coor[:, 1], mag_log)) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(content.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(content), fmt='%f')
python
def save_mag_to_file(mag, filename, rhofile): """Save the values in rho- or mag-format. """ if rhofile: # bring data in shape null = np.zeros(len(mag)) if mag.shape[1] == 3: null = np.column_stack((null, null, null, null)) result = np.column_stack((mag, null)) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(mag.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(result), fmt='%f') else: # bring data in shape with open('inv/rho00.mag', 'r') as fid: coor = np.loadtxt(fid, skiprows=1, usecols=[0, 1]) # calculated back to log if mag.shape[1] == 3: logx = [math.log(d, 10) for d in mag[:, 0]] logy = [math.log(d, 10) for d in mag[:, 1]] logz = [math.log(d, 10) for d in mag[:, 2]] mag_log = np.column_stack((logx, logy, logz)) else: mag_log = [math.log(d, 10) for d in mag] content = np.column_stack((coor[:, 0], coor[:, 1], mag_log)) # save datapoints with open(filename, 'w') as fid: fid.write('{0}\n'.format(content.shape[0])) with open(filename, 'ab') as fid: np.savetxt(fid, np.array(content), fmt='%f')
[ "def", "save_mag_to_file", "(", "mag", ",", "filename", ",", "rhofile", ")", ":", "if", "rhofile", ":", "# bring data in shape", "null", "=", "np", ".", "zeros", "(", "len", "(", "mag", ")", ")", "if", "mag", ".", "shape", "[", "1", "]", "==", "3", ":", "null", "=", "np", ".", "column_stack", "(", "(", "null", ",", "null", ",", "null", ",", "null", ")", ")", "result", "=", "np", ".", "column_stack", "(", "(", "mag", ",", "null", ")", ")", "# save datapoints", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fid", ":", "fid", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "mag", ".", "shape", "[", "0", "]", ")", ")", "with", "open", "(", "filename", ",", "'ab'", ")", "as", "fid", ":", "np", ".", "savetxt", "(", "fid", ",", "np", ".", "array", "(", "result", ")", ",", "fmt", "=", "'%f'", ")", "else", ":", "# bring data in shape", "with", "open", "(", "'inv/rho00.mag'", ",", "'r'", ")", "as", "fid", ":", "coor", "=", "np", ".", "loadtxt", "(", "fid", ",", "skiprows", "=", "1", ",", "usecols", "=", "[", "0", ",", "1", "]", ")", "# calculated back to log", "if", "mag", ".", "shape", "[", "1", "]", "==", "3", ":", "logx", "=", "[", "math", ".", "log", "(", "d", ",", "10", ")", "for", "d", "in", "mag", "[", ":", ",", "0", "]", "]", "logy", "=", "[", "math", ".", "log", "(", "d", ",", "10", ")", "for", "d", "in", "mag", "[", ":", ",", "1", "]", "]", "logz", "=", "[", "math", ".", "log", "(", "d", ",", "10", ")", "for", "d", "in", "mag", "[", ":", ",", "2", "]", "]", "mag_log", "=", "np", ".", "column_stack", "(", "(", "logx", ",", "logy", ",", "logz", ")", ")", "else", ":", "mag_log", "=", "[", "math", ".", "log", "(", "d", ",", "10", ")", "for", "d", "in", "mag", "]", "content", "=", "np", ".", "column_stack", "(", "(", "coor", "[", ":", ",", "0", "]", ",", "coor", "[", ":", ",", "1", "]", ",", "mag_log", ")", ")", "# save datapoints", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fid", ":", "fid", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "content", ".", "shape", "[", "0", "]", ")", ")", "with", "open", "(", "filename", ",", "'ab'", ")", "as", "fid", ":", "np", ".", "savetxt", "(", "fid", ",", "np", ".", "array", "(", "content", ")", ",", "fmt", "=", "'%f'", ")" ]
Save the values in rho- or mag-format.
[ "Save", "the", "values", "in", "rho", "-", "or", "mag", "-", "format", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_correct_temperature.py#L176-L210
train
geophysics-ubonn/crtomo_tools
src/td_correct_temperature.py
main
def main(): """Function to add or substract the temperature effect to data in a tomodir """ options = handle_options() # read in temperature and resistivity data tempdata = readin_temp(options.temp_file) magdata = readin_rho(options.filename, options.rhofile, aniso=options.aniso) # calculate corrected data mag_corr = calc_correction(temp=tempdata, mag=magdata, add=options.add, T_std=options.T_std, m=options.m,) # save data save_mag_to_file(mag_corr, options.output, options.rhofile)
python
def main(): """Function to add or substract the temperature effect to data in a tomodir """ options = handle_options() # read in temperature and resistivity data tempdata = readin_temp(options.temp_file) magdata = readin_rho(options.filename, options.rhofile, aniso=options.aniso) # calculate corrected data mag_corr = calc_correction(temp=tempdata, mag=magdata, add=options.add, T_std=options.T_std, m=options.m,) # save data save_mag_to_file(mag_corr, options.output, options.rhofile)
[ "def", "main", "(", ")", ":", "options", "=", "handle_options", "(", ")", "# read in temperature and resistivity data", "tempdata", "=", "readin_temp", "(", "options", ".", "temp_file", ")", "magdata", "=", "readin_rho", "(", "options", ".", "filename", ",", "options", ".", "rhofile", ",", "aniso", "=", "options", ".", "aniso", ")", "# calculate corrected data", "mag_corr", "=", "calc_correction", "(", "temp", "=", "tempdata", ",", "mag", "=", "magdata", ",", "add", "=", "options", ".", "add", ",", "T_std", "=", "options", ".", "T_std", ",", "m", "=", "options", ".", "m", ",", ")", "# save data", "save_mag_to_file", "(", "mag_corr", ",", "options", ".", "output", ",", "options", ".", "rhofile", ")" ]
Function to add or substract the temperature effect to data in a tomodir
[ "Function", "to", "add", "or", "substract", "the", "temperature", "effect", "to", "data", "in", "a", "tomodir" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_correct_temperature.py#L213-L232
train
lalinsky/mbdata
mbdata/api/utils.py
singular
def singular(plural): """ Take a plural English word and turn it into singular Obviously, this doesn't work in general. It know just enough words to generate XML tag names for list items. For example, if we have an element called 'tracks' in the response, it will be serialized as a list without named items in JSON, but we need names for items in XML, so those will be called 'track'. """ if plural.endswith('ies'): return plural[:-3] + 'y' if plural.endswith('s'): return plural[:-1] raise ValueError('unknown plural form %r' % (plural,))
python
def singular(plural): """ Take a plural English word and turn it into singular Obviously, this doesn't work in general. It know just enough words to generate XML tag names for list items. For example, if we have an element called 'tracks' in the response, it will be serialized as a list without named items in JSON, but we need names for items in XML, so those will be called 'track'. """ if plural.endswith('ies'): return plural[:-3] + 'y' if plural.endswith('s'): return plural[:-1] raise ValueError('unknown plural form %r' % (plural,))
[ "def", "singular", "(", "plural", ")", ":", "if", "plural", ".", "endswith", "(", "'ies'", ")", ":", "return", "plural", "[", ":", "-", "3", "]", "+", "'y'", "if", "plural", ".", "endswith", "(", "'s'", ")", ":", "return", "plural", "[", ":", "-", "1", "]", "raise", "ValueError", "(", "'unknown plural form %r'", "%", "(", "plural", ",", ")", ")" ]
Take a plural English word and turn it into singular Obviously, this doesn't work in general. It know just enough words to generate XML tag names for list items. For example, if we have an element called 'tracks' in the response, it will be serialized as a list without named items in JSON, but we need names for items in XML, so those will be called 'track'.
[ "Take", "a", "plural", "English", "word", "and", "turn", "it", "into", "singular" ]
1ec788834047ced8614ad9763e430afe1d1e65e7
https://github.com/lalinsky/mbdata/blob/1ec788834047ced8614ad9763e430afe1d1e65e7/mbdata/api/utils.py#L56-L70
train
geophysics-ubonn/crtomo_tools
src/sens_center_plot.py
sens_center.plot_single_configuration
def plot_single_configuration(self, config_nr, sens_file): """ plot sensitivity distribution with center of mass for a single configuration. The electrodes used are colored. Parameters ---------- config_nr: int number of configuration sens_file: string, file path filename to sensitvity file """ indices = elem.load_column_file_to_elements_advanced( sens_file, [2, 3], False, False ) elem.plt_opt.title = '' elem.plt_opt.reverse = True elem.plt_opt.cbmin = -1 elem.plt_opt.cbmax = 1 elem.plt_opt.cblabel = r'fill' elem.plt_opt.xlabel = 'x (m)' elem.plt_opt.ylabel = 'z (m)' fig = plt.figure(figsize=(5, 7)) ax = fig.add_subplot(111) ax, pm, cb = elem.plot_element_data_to_ax( indices[0], ax, scale='asinh', no_cb=False, ) ax.scatter( self.sens_centers[config_nr, 0], self.sens_centers[config_nr, 1], marker='*', s=50, color='w', edgecolors='w', ) self.color_electrodes(config_nr, ax) # Output sensf = sens_file.split('sens')[-1] sensf = sensf.split('.')[0] out = 'sens_center_' + sensf + '.png' fig.savefig(out, bbox_inches='tight', dpi=300) fig.clf() plt.close(fig)
python
def plot_single_configuration(self, config_nr, sens_file): """ plot sensitivity distribution with center of mass for a single configuration. The electrodes used are colored. Parameters ---------- config_nr: int number of configuration sens_file: string, file path filename to sensitvity file """ indices = elem.load_column_file_to_elements_advanced( sens_file, [2, 3], False, False ) elem.plt_opt.title = '' elem.plt_opt.reverse = True elem.plt_opt.cbmin = -1 elem.plt_opt.cbmax = 1 elem.plt_opt.cblabel = r'fill' elem.plt_opt.xlabel = 'x (m)' elem.plt_opt.ylabel = 'z (m)' fig = plt.figure(figsize=(5, 7)) ax = fig.add_subplot(111) ax, pm, cb = elem.plot_element_data_to_ax( indices[0], ax, scale='asinh', no_cb=False, ) ax.scatter( self.sens_centers[config_nr, 0], self.sens_centers[config_nr, 1], marker='*', s=50, color='w', edgecolors='w', ) self.color_electrodes(config_nr, ax) # Output sensf = sens_file.split('sens')[-1] sensf = sensf.split('.')[0] out = 'sens_center_' + sensf + '.png' fig.savefig(out, bbox_inches='tight', dpi=300) fig.clf() plt.close(fig)
[ "def", "plot_single_configuration", "(", "self", ",", "config_nr", ",", "sens_file", ")", ":", "indices", "=", "elem", ".", "load_column_file_to_elements_advanced", "(", "sens_file", ",", "[", "2", ",", "3", "]", ",", "False", ",", "False", ")", "elem", ".", "plt_opt", ".", "title", "=", "''", "elem", ".", "plt_opt", ".", "reverse", "=", "True", "elem", ".", "plt_opt", ".", "cbmin", "=", "-", "1", "elem", ".", "plt_opt", ".", "cbmax", "=", "1", "elem", ".", "plt_opt", ".", "cblabel", "=", "r'fill'", "elem", ".", "plt_opt", ".", "xlabel", "=", "'x (m)'", "elem", ".", "plt_opt", ".", "ylabel", "=", "'z (m)'", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "5", ",", "7", ")", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ",", "pm", ",", "cb", "=", "elem", ".", "plot_element_data_to_ax", "(", "indices", "[", "0", "]", ",", "ax", ",", "scale", "=", "'asinh'", ",", "no_cb", "=", "False", ",", ")", "ax", ".", "scatter", "(", "self", ".", "sens_centers", "[", "config_nr", ",", "0", "]", ",", "self", ".", "sens_centers", "[", "config_nr", ",", "1", "]", ",", "marker", "=", "'*'", ",", "s", "=", "50", ",", "color", "=", "'w'", ",", "edgecolors", "=", "'w'", ",", ")", "self", ".", "color_electrodes", "(", "config_nr", ",", "ax", ")", "# Output", "sensf", "=", "sens_file", ".", "split", "(", "'sens'", ")", "[", "-", "1", "]", "sensf", "=", "sensf", ".", "split", "(", "'.'", ")", "[", "0", "]", "out", "=", "'sens_center_'", "+", "sensf", "+", "'.png'", "fig", ".", "savefig", "(", "out", ",", "bbox_inches", "=", "'tight'", ",", "dpi", "=", "300", ")", "fig", ".", "clf", "(", ")", "plt", ".", "close", "(", "fig", ")" ]
plot sensitivity distribution with center of mass for a single configuration. The electrodes used are colored. Parameters ---------- config_nr: int number of configuration sens_file: string, file path filename to sensitvity file
[ "plot", "sensitivity", "distribution", "with", "center", "of", "mass", "for", "a", "single", "configuration", ".", "The", "electrodes", "used", "are", "colored", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/sens_center_plot.py#L168-L220
train
geophysics-ubonn/crtomo_tools
src/sens_center_plot.py
sens_center.plot_sens_center
def plot_sens_center(self, frequency=2): """ plot sensitivity center distribution for all configurations in config.dat. The centers of mass are colored by the data given in volt_file. """ try: colors = np.loadtxt(self.volt_file, skiprows=1) except IOError: print('IOError opening {0}'.format(volt_file)) exit() # check for 1-dimensionality if(len(colors.shape) > 1): print('Artificial or Multi frequency data') colors = colors[:, frequency].flatten() colors = colors[~np.isnan(colors)] elem.load_elem_file(self.elem_file) elem.load_elec_file(self.elec_file) nr_elements = len(elem.element_type_list[0]) elem.element_data = np.zeros((nr_elements, 1)) * np.nan elem.plt_opt.title = ' ' elem.plt_opt.reverse = True elem.plt_opt.cbmin = -1 elem.plt_opt.cbmax = 1 elem.plt_opt.cblabel = self.cblabel elem.plt_opt.xlabel = 'x (m)' elem.plt_opt.ylabel = 'z (m)' fig = plt.figure(figsize=(5, 7)) ax = fig.add_subplot(111) ax, pm, cb = elem.plot_element_data_to_ax(0, ax, scale='linear', no_cb=True) ax.scatter(self.sens_centers[:, 0], self.sens_centers[:, 1], c=colors, s=100, edgecolors='none') cb_pos = mpl_get_cb_bound_next_to_plot(ax) ax1 = fig.add_axes(cb_pos, frame_on=True) cmap = mpl.cm.jet_r norm = mpl.colors.Normalize(vmin=np.nanmin(colors), vmax=np.nanmax(colors)) mpl.colorbar.ColorbarBase(ax1, cmap=cmap, norm=norm, orientation='vertical') fig.savefig(self.output_file, bbox_inches='tight', dpi=300)
python
def plot_sens_center(self, frequency=2): """ plot sensitivity center distribution for all configurations in config.dat. The centers of mass are colored by the data given in volt_file. """ try: colors = np.loadtxt(self.volt_file, skiprows=1) except IOError: print('IOError opening {0}'.format(volt_file)) exit() # check for 1-dimensionality if(len(colors.shape) > 1): print('Artificial or Multi frequency data') colors = colors[:, frequency].flatten() colors = colors[~np.isnan(colors)] elem.load_elem_file(self.elem_file) elem.load_elec_file(self.elec_file) nr_elements = len(elem.element_type_list[0]) elem.element_data = np.zeros((nr_elements, 1)) * np.nan elem.plt_opt.title = ' ' elem.plt_opt.reverse = True elem.plt_opt.cbmin = -1 elem.plt_opt.cbmax = 1 elem.plt_opt.cblabel = self.cblabel elem.plt_opt.xlabel = 'x (m)' elem.plt_opt.ylabel = 'z (m)' fig = plt.figure(figsize=(5, 7)) ax = fig.add_subplot(111) ax, pm, cb = elem.plot_element_data_to_ax(0, ax, scale='linear', no_cb=True) ax.scatter(self.sens_centers[:, 0], self.sens_centers[:, 1], c=colors, s=100, edgecolors='none') cb_pos = mpl_get_cb_bound_next_to_plot(ax) ax1 = fig.add_axes(cb_pos, frame_on=True) cmap = mpl.cm.jet_r norm = mpl.colors.Normalize(vmin=np.nanmin(colors), vmax=np.nanmax(colors)) mpl.colorbar.ColorbarBase(ax1, cmap=cmap, norm=norm, orientation='vertical') fig.savefig(self.output_file, bbox_inches='tight', dpi=300)
[ "def", "plot_sens_center", "(", "self", ",", "frequency", "=", "2", ")", ":", "try", ":", "colors", "=", "np", ".", "loadtxt", "(", "self", ".", "volt_file", ",", "skiprows", "=", "1", ")", "except", "IOError", ":", "print", "(", "'IOError opening {0}'", ".", "format", "(", "volt_file", ")", ")", "exit", "(", ")", "# check for 1-dimensionality", "if", "(", "len", "(", "colors", ".", "shape", ")", ">", "1", ")", ":", "print", "(", "'Artificial or Multi frequency data'", ")", "colors", "=", "colors", "[", ":", ",", "frequency", "]", ".", "flatten", "(", ")", "colors", "=", "colors", "[", "~", "np", ".", "isnan", "(", "colors", ")", "]", "elem", ".", "load_elem_file", "(", "self", ".", "elem_file", ")", "elem", ".", "load_elec_file", "(", "self", ".", "elec_file", ")", "nr_elements", "=", "len", "(", "elem", ".", "element_type_list", "[", "0", "]", ")", "elem", ".", "element_data", "=", "np", ".", "zeros", "(", "(", "nr_elements", ",", "1", ")", ")", "*", "np", ".", "nan", "elem", ".", "plt_opt", ".", "title", "=", "' '", "elem", ".", "plt_opt", ".", "reverse", "=", "True", "elem", ".", "plt_opt", ".", "cbmin", "=", "-", "1", "elem", ".", "plt_opt", ".", "cbmax", "=", "1", "elem", ".", "plt_opt", ".", "cblabel", "=", "self", ".", "cblabel", "elem", ".", "plt_opt", ".", "xlabel", "=", "'x (m)'", "elem", ".", "plt_opt", ".", "ylabel", "=", "'z (m)'", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "5", ",", "7", ")", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ",", "pm", ",", "cb", "=", "elem", ".", "plot_element_data_to_ax", "(", "0", ",", "ax", ",", "scale", "=", "'linear'", ",", "no_cb", "=", "True", ")", "ax", ".", "scatter", "(", "self", ".", "sens_centers", "[", ":", ",", "0", "]", ",", "self", ".", "sens_centers", "[", ":", ",", "1", "]", ",", "c", "=", "colors", ",", "s", "=", "100", ",", "edgecolors", "=", "'none'", ")", "cb_pos", "=", "mpl_get_cb_bound_next_to_plot", "(", "ax", ")", "ax1", "=", "fig", ".", "add_axes", "(", "cb_pos", ",", "frame_on", "=", "True", ")", "cmap", "=", "mpl", ".", "cm", ".", "jet_r", "norm", "=", "mpl", ".", "colors", ".", "Normalize", "(", "vmin", "=", "np", ".", "nanmin", "(", "colors", ")", ",", "vmax", "=", "np", ".", "nanmax", "(", "colors", ")", ")", "mpl", ".", "colorbar", ".", "ColorbarBase", "(", "ax1", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ",", "orientation", "=", "'vertical'", ")", "fig", ".", "savefig", "(", "self", ".", "output_file", ",", "bbox_inches", "=", "'tight'", ",", "dpi", "=", "300", ")" ]
plot sensitivity center distribution for all configurations in config.dat. The centers of mass are colored by the data given in volt_file.
[ "plot", "sensitivity", "center", "distribution", "for", "all", "configurations", "in", "config", ".", "dat", ".", "The", "centers", "of", "mass", "are", "colored", "by", "the", "data", "given", "in", "volt_file", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/sens_center_plot.py#L222-L269
train
geophysics-ubonn/crtomo_tools
src/sens_center_plot.py
sens_center.color_electrodes
def color_electrodes(self, config_nr, ax): """ Color the electrodes used in specific configuration. Voltage electrodes are yellow, Current electrodes are red ?! """ electrodes = np.loadtxt(options.config_file, skiprows=1) electrodes = self.configs[~np.isnan(self.configs).any(1)] electrodes = electrodes.astype(int) conf = [] for dim in range(0, electrodes.shape[1]): c = electrodes[config_nr, dim] # c = c.partition('0') a = np.round(c / 10000) - 1 b = np.mod(c, 10000) - 1 conf.append(a) conf.append(b) Ex, Ez = elem.get_electrodes() color = ['#ffed00', '#ffed00', '#ff0000', '#ff0000'] ax.scatter(Ex[conf], Ez[conf], c=color, marker='s', s=60, clip_on=False, edgecolors='k')
python
def color_electrodes(self, config_nr, ax): """ Color the electrodes used in specific configuration. Voltage electrodes are yellow, Current electrodes are red ?! """ electrodes = np.loadtxt(options.config_file, skiprows=1) electrodes = self.configs[~np.isnan(self.configs).any(1)] electrodes = electrodes.astype(int) conf = [] for dim in range(0, electrodes.shape[1]): c = electrodes[config_nr, dim] # c = c.partition('0') a = np.round(c / 10000) - 1 b = np.mod(c, 10000) - 1 conf.append(a) conf.append(b) Ex, Ez = elem.get_electrodes() color = ['#ffed00', '#ffed00', '#ff0000', '#ff0000'] ax.scatter(Ex[conf], Ez[conf], c=color, marker='s', s=60, clip_on=False, edgecolors='k')
[ "def", "color_electrodes", "(", "self", ",", "config_nr", ",", "ax", ")", ":", "electrodes", "=", "np", ".", "loadtxt", "(", "options", ".", "config_file", ",", "skiprows", "=", "1", ")", "electrodes", "=", "self", ".", "configs", "[", "~", "np", ".", "isnan", "(", "self", ".", "configs", ")", ".", "any", "(", "1", ")", "]", "electrodes", "=", "electrodes", ".", "astype", "(", "int", ")", "conf", "=", "[", "]", "for", "dim", "in", "range", "(", "0", ",", "electrodes", ".", "shape", "[", "1", "]", ")", ":", "c", "=", "electrodes", "[", "config_nr", ",", "dim", "]", "# c = c.partition('0')", "a", "=", "np", ".", "round", "(", "c", "/", "10000", ")", "-", "1", "b", "=", "np", ".", "mod", "(", "c", ",", "10000", ")", "-", "1", "conf", ".", "append", "(", "a", ")", "conf", ".", "append", "(", "b", ")", "Ex", ",", "Ez", "=", "elem", ".", "get_electrodes", "(", ")", "color", "=", "[", "'#ffed00'", ",", "'#ffed00'", ",", "'#ff0000'", ",", "'#ff0000'", "]", "ax", ".", "scatter", "(", "Ex", "[", "conf", "]", ",", "Ez", "[", "conf", "]", ",", "c", "=", "color", ",", "marker", "=", "'s'", ",", "s", "=", "60", ",", "clip_on", "=", "False", ",", "edgecolors", "=", "'k'", ")" ]
Color the electrodes used in specific configuration. Voltage electrodes are yellow, Current electrodes are red ?!
[ "Color", "the", "electrodes", "used", "in", "specific", "configuration", ".", "Voltage", "electrodes", "are", "yellow", "Current", "electrodes", "are", "red", "?!" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/sens_center_plot.py#L271-L291
train
geophysics-ubonn/crtomo_tools
src/sens_center_plot.py
sens_center.compute_sens
def compute_sens(self, elem_file, elec_file, configs): """ Compute the sensitivities for the given input data. A CRMod instance is called to create the sensitivity files. """ CRMod_config = CRMod.config() # activate 2D mode and set sink nr if self.options.sink is not None: print('2D mode with sink {0}'.format(self.options.sink)) CRMod_config['2D'] = 0 CRMod_config['fictitious_sink'] = 'T' CRMod_config['sink_node'] = self.options.sink CRMod_config['write_sens'] = 'T' CRMod_instance = CRMod.CRMod(CRMod_config) CRMod_instance.elemfile = elem_file CRMod_instance.elecfile = elec_file CRMod_instance.configdata = configs resistivity = 100 # get number of elements fid = open(elem_file, 'r') fid.readline() elements = int(fid.readline().strip().split()[1]) fid.close() # create rho.dat file rhodata = '{0}\n'.format(elements) for i in range(0, elements): rhodata += '{0} 0\n'.format(resistivity) CRMod_instance.rhodata = rhodata CRMod_instance.run_in_tempdir() volt_file = CRMod_instance.volt_file sens_files = CRMod_instance.sens_files return sens_files, volt_file, CRMod_instance.temp_dir
python
def compute_sens(self, elem_file, elec_file, configs): """ Compute the sensitivities for the given input data. A CRMod instance is called to create the sensitivity files. """ CRMod_config = CRMod.config() # activate 2D mode and set sink nr if self.options.sink is not None: print('2D mode with sink {0}'.format(self.options.sink)) CRMod_config['2D'] = 0 CRMod_config['fictitious_sink'] = 'T' CRMod_config['sink_node'] = self.options.sink CRMod_config['write_sens'] = 'T' CRMod_instance = CRMod.CRMod(CRMod_config) CRMod_instance.elemfile = elem_file CRMod_instance.elecfile = elec_file CRMod_instance.configdata = configs resistivity = 100 # get number of elements fid = open(elem_file, 'r') fid.readline() elements = int(fid.readline().strip().split()[1]) fid.close() # create rho.dat file rhodata = '{0}\n'.format(elements) for i in range(0, elements): rhodata += '{0} 0\n'.format(resistivity) CRMod_instance.rhodata = rhodata CRMod_instance.run_in_tempdir() volt_file = CRMod_instance.volt_file sens_files = CRMod_instance.sens_files return sens_files, volt_file, CRMod_instance.temp_dir
[ "def", "compute_sens", "(", "self", ",", "elem_file", ",", "elec_file", ",", "configs", ")", ":", "CRMod_config", "=", "CRMod", ".", "config", "(", ")", "# activate 2D mode and set sink nr", "if", "self", ".", "options", ".", "sink", "is", "not", "None", ":", "print", "(", "'2D mode with sink {0}'", ".", "format", "(", "self", ".", "options", ".", "sink", ")", ")", "CRMod_config", "[", "'2D'", "]", "=", "0", "CRMod_config", "[", "'fictitious_sink'", "]", "=", "'T'", "CRMod_config", "[", "'sink_node'", "]", "=", "self", ".", "options", ".", "sink", "CRMod_config", "[", "'write_sens'", "]", "=", "'T'", "CRMod_instance", "=", "CRMod", ".", "CRMod", "(", "CRMod_config", ")", "CRMod_instance", ".", "elemfile", "=", "elem_file", "CRMod_instance", ".", "elecfile", "=", "elec_file", "CRMod_instance", ".", "configdata", "=", "configs", "resistivity", "=", "100", "# get number of elements", "fid", "=", "open", "(", "elem_file", ",", "'r'", ")", "fid", ".", "readline", "(", ")", "elements", "=", "int", "(", "fid", ".", "readline", "(", ")", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "1", "]", ")", "fid", ".", "close", "(", ")", "# create rho.dat file", "rhodata", "=", "'{0}\\n'", ".", "format", "(", "elements", ")", "for", "i", "in", "range", "(", "0", ",", "elements", ")", ":", "rhodata", "+=", "'{0} 0\\n'", ".", "format", "(", "resistivity", ")", "CRMod_instance", ".", "rhodata", "=", "rhodata", "CRMod_instance", ".", "run_in_tempdir", "(", ")", "volt_file", "=", "CRMod_instance", ".", "volt_file", "sens_files", "=", "CRMod_instance", ".", "sens_files", "return", "sens_files", ",", "volt_file", ",", "CRMod_instance", ".", "temp_dir" ]
Compute the sensitivities for the given input data. A CRMod instance is called to create the sensitivity files.
[ "Compute", "the", "sensitivities", "for", "the", "given", "input", "data", ".", "A", "CRMod", "instance", "is", "called", "to", "create", "the", "sensitivity", "files", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/sens_center_plot.py#L293-L328
train
geophysics-ubonn/crtomo_tools
src/sens_center_plot.py
sens_center.compute_center_of_mass
def compute_center_of_mass(self, filename): """ Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options """ sens = np.loadtxt(filename, skiprows=1) X = sens[:, 0] Z = sens[:, 1] # C = (np.abs(sens[:,2]))# ./ np.max(np.abs(sens[:,2])) C = sens[:, 2] x_center = 0 z_center = 0 sens_sum = 0 for i in range(0, C.shape[0]): # unweighted if(self.weight == 0): weight = (C[i]) # abs if(self.weight == 1): weight = np.abs(C[i]) # log10 if(self.weight == 2): weight = np.log10(np.abs(C[i])) # sqrt if(self.weight == 3): weight = np.sqrt(np.abs(C[i])) x_center += (X[i] * weight) z_center += (Z[i] * weight) sens_sum += weight x_center /= sens_sum z_center /= sens_sum return (x_center, z_center)
python
def compute_center_of_mass(self, filename): """ Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options """ sens = np.loadtxt(filename, skiprows=1) X = sens[:, 0] Z = sens[:, 1] # C = (np.abs(sens[:,2]))# ./ np.max(np.abs(sens[:,2])) C = sens[:, 2] x_center = 0 z_center = 0 sens_sum = 0 for i in range(0, C.shape[0]): # unweighted if(self.weight == 0): weight = (C[i]) # abs if(self.weight == 1): weight = np.abs(C[i]) # log10 if(self.weight == 2): weight = np.log10(np.abs(C[i])) # sqrt if(self.weight == 3): weight = np.sqrt(np.abs(C[i])) x_center += (X[i] * weight) z_center += (Z[i] * weight) sens_sum += weight x_center /= sens_sum z_center /= sens_sum return (x_center, z_center)
[ "def", "compute_center_of_mass", "(", "self", ",", "filename", ")", ":", "sens", "=", "np", ".", "loadtxt", "(", "filename", ",", "skiprows", "=", "1", ")", "X", "=", "sens", "[", ":", ",", "0", "]", "Z", "=", "sens", "[", ":", ",", "1", "]", "# C = (np.abs(sens[:,2]))# ./ np.max(np.abs(sens[:,2]))", "C", "=", "sens", "[", ":", ",", "2", "]", "x_center", "=", "0", "z_center", "=", "0", "sens_sum", "=", "0", "for", "i", "in", "range", "(", "0", ",", "C", ".", "shape", "[", "0", "]", ")", ":", "# unweighted", "if", "(", "self", ".", "weight", "==", "0", ")", ":", "weight", "=", "(", "C", "[", "i", "]", ")", "# abs", "if", "(", "self", ".", "weight", "==", "1", ")", ":", "weight", "=", "np", ".", "abs", "(", "C", "[", "i", "]", ")", "# log10", "if", "(", "self", ".", "weight", "==", "2", ")", ":", "weight", "=", "np", ".", "log10", "(", "np", ".", "abs", "(", "C", "[", "i", "]", ")", ")", "# sqrt", "if", "(", "self", ".", "weight", "==", "3", ")", ":", "weight", "=", "np", ".", "sqrt", "(", "np", ".", "abs", "(", "C", "[", "i", "]", ")", ")", "x_center", "+=", "(", "X", "[", "i", "]", "*", "weight", ")", "z_center", "+=", "(", "Z", "[", "i", "]", "*", "weight", ")", "sens_sum", "+=", "weight", "x_center", "/=", "sens_sum", "z_center", "/=", "sens_sum", "return", "(", "x_center", ",", "z_center", ")" ]
Center of mass is computed using the sensitivity data output from CRMod Data weights can be applied using command line options
[ "Center", "of", "mass", "is", "computed", "using", "the", "sensitivity", "data", "output", "from", "CRMod", "Data", "weights", "can", "be", "applied", "using", "command", "line", "options" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/sens_center_plot.py#L330-L366
train
geophysics-ubonn/crtomo_tools
src/td_init.py
handle_cmd_options
def handle_cmd_options(): ''' Get the options from the command line. ''' parser = OptionParser() parser.add_option("-s", "--silent", action="store_true", dest="silent", help="print any warnings", default=False) (options, args) = parser.parse_args() return options, args
python
def handle_cmd_options(): ''' Get the options from the command line. ''' parser = OptionParser() parser.add_option("-s", "--silent", action="store_true", dest="silent", help="print any warnings", default=False) (options, args) = parser.parse_args() return options, args
[ "def", "handle_cmd_options", "(", ")", ":", "parser", "=", "OptionParser", "(", ")", "parser", ".", "add_option", "(", "\"-s\"", ",", "\"--silent\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"silent\"", ",", "help", "=", "\"print any warnings\"", ",", "default", "=", "False", ")", "(", "options", ",", "args", ")", "=", "parser", ".", "parse_args", "(", ")", "return", "options", ",", "args" ]
Get the options from the command line.
[ "Get", "the", "options", "from", "the", "command", "line", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_init.py#L18-L26
train
geophysics-ubonn/crtomo_tools
src/td_init.py
move
def move(fname, folder, options): """Move file to dir if existing """ if os.path.isfile(fname): shutil.move(fname, folder) else: if options.silent is False: print('{0} missing'.format(fname))
python
def move(fname, folder, options): """Move file to dir if existing """ if os.path.isfile(fname): shutil.move(fname, folder) else: if options.silent is False: print('{0} missing'.format(fname))
[ "def", "move", "(", "fname", ",", "folder", ",", "options", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "fname", ")", ":", "shutil", ".", "move", "(", "fname", ",", "folder", ")", "else", ":", "if", "options", ".", "silent", "is", "False", ":", "print", "(", "'{0} missing'", ".", "format", "(", "fname", ")", ")" ]
Move file to dir if existing
[ "Move", "file", "to", "dir", "if", "existing" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/td_init.py#L29-L36
train
Infinidat/infi.traceback
src/infi/traceback/__init__.py
print_tb
def print_tb(tb, limit=None, file=None): """Print up to 'limit' stack trace entries from the traceback 'tb'. If 'limit' is omitted or None, all entries are printed. If 'file' is omitted or None, the output goes to sys.stderr; otherwise 'file' should be an open file or file-like object with a write() method. """ if file is None: file = sys.stderr if limit is None: if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit file.write('\n'.join(format_tb(tb, limit)) + '\n')
python
def print_tb(tb, limit=None, file=None): """Print up to 'limit' stack trace entries from the traceback 'tb'. If 'limit' is omitted or None, all entries are printed. If 'file' is omitted or None, the output goes to sys.stderr; otherwise 'file' should be an open file or file-like object with a write() method. """ if file is None: file = sys.stderr if limit is None: if hasattr(sys, 'tracebacklimit'): limit = sys.tracebacklimit file.write('\n'.join(format_tb(tb, limit)) + '\n')
[ "def", "print_tb", "(", "tb", ",", "limit", "=", "None", ",", "file", "=", "None", ")", ":", "if", "file", "is", "None", ":", "file", "=", "sys", ".", "stderr", "if", "limit", "is", "None", ":", "if", "hasattr", "(", "sys", ",", "'tracebacklimit'", ")", ":", "limit", "=", "sys", ".", "tracebacklimit", "file", ".", "write", "(", "'\\n'", ".", "join", "(", "format_tb", "(", "tb", ",", "limit", ")", ")", "+", "'\\n'", ")" ]
Print up to 'limit' stack trace entries from the traceback 'tb'. If 'limit' is omitted or None, all entries are printed. If 'file' is omitted or None, the output goes to sys.stderr; otherwise 'file' should be an open file or file-like object with a write() method.
[ "Print", "up", "to", "limit", "stack", "trace", "entries", "from", "the", "traceback", "tb", "." ]
ae25455ec451c136458d2781fea876964375bf8b
https://github.com/Infinidat/infi.traceback/blob/ae25455ec451c136458d2781fea876964375bf8b/src/infi/traceback/__init__.py#L85-L98
train
Infinidat/infi.traceback
src/infi/traceback/__init__.py
print_exception
def print_exception(etype, value, tb, limit=None, file=None, chain=True): """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError and value has the appropriate format, it prints the line where the syntax error occurred with a caret on the next line indicating the approximate position of the error. """ import traceback if file is None: file = sys.stderr if tb: file.write('Traceback (most recent call last):\n') print_tb(tb, limit, file) lines = traceback.format_exception_only(etype, value) for line in lines: file.write(line)
python
def print_exception(etype, value, tb, limit=None, file=None, chain=True): """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError and value has the appropriate format, it prints the line where the syntax error occurred with a caret on the next line indicating the approximate position of the error. """ import traceback if file is None: file = sys.stderr if tb: file.write('Traceback (most recent call last):\n') print_tb(tb, limit, file) lines = traceback.format_exception_only(etype, value) for line in lines: file.write(line)
[ "def", "print_exception", "(", "etype", ",", "value", ",", "tb", ",", "limit", "=", "None", ",", "file", "=", "None", ",", "chain", "=", "True", ")", ":", "import", "traceback", "if", "file", "is", "None", ":", "file", "=", "sys", ".", "stderr", "if", "tb", ":", "file", ".", "write", "(", "'Traceback (most recent call last):\\n'", ")", "print_tb", "(", "tb", ",", "limit", ",", "file", ")", "lines", "=", "traceback", ".", "format_exception_only", "(", "etype", ",", "value", ")", "for", "line", "in", "lines", ":", "file", ".", "write", "(", "line", ")" ]
Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError and value has the appropriate format, it prints the line where the syntax error occurred with a caret on the next line indicating the approximate position of the error.
[ "Print", "exception", "up", "to", "limit", "stack", "trace", "entries", "from", "tb", "to", "file", "." ]
ae25455ec451c136458d2781fea876964375bf8b
https://github.com/Infinidat/infi.traceback/blob/ae25455ec451c136458d2781fea876964375bf8b/src/infi/traceback/__init__.py#L168-L187
train
gofed/gofedlib
gofedlib/go/projectinfo.py
ProjectInfo.construct
def construct(self, data): """Construct info about a project from artefact :param data: golang-project-packages artefact :type data: json/dict """ occurrences = {} main_occurrences = {} # occurrences of devel packages for pkg in data["data"]["dependencies"]: package = pkg["package"] for item in pkg["dependencies"]: dep = item["name"] if package != ".": deps = map(lambda l: "%s/%s" % (package, l), item["location"]) else: deps = item["location"] if dep not in occurrences: occurrences[dep] = deps else: occurrences[dep] = occurrences[dep] + deps self.occurrences = occurrences # occurrences of main packages for main in data["data"]["main"]: filename = main["filename"] for dep in main["dependencies"]: if dep not in main_occurrences: main_occurrences[dep] = [filename] else: main_occurrences[dep].append(filename) self.main_occurrences = main_occurrences # test directories self.test_directories = sorted(map(lambda l: l["test"], data["data"]["tests"])) # provided devel packages self.provided_packages = sorted(data["data"]["packages"]) # imported paths in devel packages imported_packages = [] imported_native_packages = [] for path in occurrences: try: self.ipparser.parse(path) except ValueError: continue if self.ipparser.isNative(): imported_native_packages.append(path) else: imported_packages.append(path) self.imported_packages = sorted(imported_packages) self.imported_native_packages = sorted(imported_native_packages) # main packages self.main_packages = map(lambda l: l["filename"], data["data"]["main"])
python
def construct(self, data): """Construct info about a project from artefact :param data: golang-project-packages artefact :type data: json/dict """ occurrences = {} main_occurrences = {} # occurrences of devel packages for pkg in data["data"]["dependencies"]: package = pkg["package"] for item in pkg["dependencies"]: dep = item["name"] if package != ".": deps = map(lambda l: "%s/%s" % (package, l), item["location"]) else: deps = item["location"] if dep not in occurrences: occurrences[dep] = deps else: occurrences[dep] = occurrences[dep] + deps self.occurrences = occurrences # occurrences of main packages for main in data["data"]["main"]: filename = main["filename"] for dep in main["dependencies"]: if dep not in main_occurrences: main_occurrences[dep] = [filename] else: main_occurrences[dep].append(filename) self.main_occurrences = main_occurrences # test directories self.test_directories = sorted(map(lambda l: l["test"], data["data"]["tests"])) # provided devel packages self.provided_packages = sorted(data["data"]["packages"]) # imported paths in devel packages imported_packages = [] imported_native_packages = [] for path in occurrences: try: self.ipparser.parse(path) except ValueError: continue if self.ipparser.isNative(): imported_native_packages.append(path) else: imported_packages.append(path) self.imported_packages = sorted(imported_packages) self.imported_native_packages = sorted(imported_native_packages) # main packages self.main_packages = map(lambda l: l["filename"], data["data"]["main"])
[ "def", "construct", "(", "self", ",", "data", ")", ":", "occurrences", "=", "{", "}", "main_occurrences", "=", "{", "}", "# occurrences of devel packages", "for", "pkg", "in", "data", "[", "\"data\"", "]", "[", "\"dependencies\"", "]", ":", "package", "=", "pkg", "[", "\"package\"", "]", "for", "item", "in", "pkg", "[", "\"dependencies\"", "]", ":", "dep", "=", "item", "[", "\"name\"", "]", "if", "package", "!=", "\".\"", ":", "deps", "=", "map", "(", "lambda", "l", ":", "\"%s/%s\"", "%", "(", "package", ",", "l", ")", ",", "item", "[", "\"location\"", "]", ")", "else", ":", "deps", "=", "item", "[", "\"location\"", "]", "if", "dep", "not", "in", "occurrences", ":", "occurrences", "[", "dep", "]", "=", "deps", "else", ":", "occurrences", "[", "dep", "]", "=", "occurrences", "[", "dep", "]", "+", "deps", "self", ".", "occurrences", "=", "occurrences", "# occurrences of main packages", "for", "main", "in", "data", "[", "\"data\"", "]", "[", "\"main\"", "]", ":", "filename", "=", "main", "[", "\"filename\"", "]", "for", "dep", "in", "main", "[", "\"dependencies\"", "]", ":", "if", "dep", "not", "in", "main_occurrences", ":", "main_occurrences", "[", "dep", "]", "=", "[", "filename", "]", "else", ":", "main_occurrences", "[", "dep", "]", ".", "append", "(", "filename", ")", "self", ".", "main_occurrences", "=", "main_occurrences", "# test directories", "self", ".", "test_directories", "=", "sorted", "(", "map", "(", "lambda", "l", ":", "l", "[", "\"test\"", "]", ",", "data", "[", "\"data\"", "]", "[", "\"tests\"", "]", ")", ")", "# provided devel packages", "self", ".", "provided_packages", "=", "sorted", "(", "data", "[", "\"data\"", "]", "[", "\"packages\"", "]", ")", "# imported paths in devel packages", "imported_packages", "=", "[", "]", "imported_native_packages", "=", "[", "]", "for", "path", "in", "occurrences", ":", "try", ":", "self", ".", "ipparser", ".", "parse", "(", "path", ")", "except", "ValueError", ":", "continue", "if", "self", ".", "ipparser", ".", "isNative", "(", ")", ":", "imported_native_packages", ".", "append", "(", "path", ")", "else", ":", "imported_packages", ".", "append", "(", "path", ")", "self", ".", "imported_packages", "=", "sorted", "(", "imported_packages", ")", "self", ".", "imported_native_packages", "=", "sorted", "(", "imported_native_packages", ")", "# main packages", "self", ".", "main_packages", "=", "map", "(", "lambda", "l", ":", "l", "[", "\"filename\"", "]", ",", "data", "[", "\"data\"", "]", "[", "\"main\"", "]", ")" ]
Construct info about a project from artefact :param data: golang-project-packages artefact :type data: json/dict
[ "Construct", "info", "about", "a", "project", "from", "artefact" ]
0674c248fe3d8706f98f912996b65af469f96b10
https://github.com/gofed/gofedlib/blob/0674c248fe3d8706f98f912996b65af469f96b10/gofedlib/go/projectinfo.py#L43-L103
train
NiklasRosenstein/py-bundler
bundler/modules.py
ModuleInfo.join_import_from
def join_import_from(self, import_spec): """ Joins a relative import like `from .foo import bar` with this module as its parent module. If the module is not a root module or package root, it will be joined with the package root. """ if not self.isroot and not self.ispkg: parent = self.name.rpartition('.')[0] else: parent = self.name return join_import_from(import_spec, parent)
python
def join_import_from(self, import_spec): """ Joins a relative import like `from .foo import bar` with this module as its parent module. If the module is not a root module or package root, it will be joined with the package root. """ if not self.isroot and not self.ispkg: parent = self.name.rpartition('.')[0] else: parent = self.name return join_import_from(import_spec, parent)
[ "def", "join_import_from", "(", "self", ",", "import_spec", ")", ":", "if", "not", "self", ".", "isroot", "and", "not", "self", ".", "ispkg", ":", "parent", "=", "self", ".", "name", ".", "rpartition", "(", "'.'", ")", "[", "0", "]", "else", ":", "parent", "=", "self", ".", "name", "return", "join_import_from", "(", "import_spec", ",", "parent", ")" ]
Joins a relative import like `from .foo import bar` with this module as its parent module. If the module is not a root module or package root, it will be joined with the package root.
[ "Joins", "a", "relative", "import", "like", "from", ".", "foo", "import", "bar", "with", "this", "module", "as", "its", "parent", "module", ".", "If", "the", "module", "is", "not", "a", "root", "module", "or", "package", "root", "it", "will", "be", "joined", "with", "the", "package", "root", "." ]
80dd6dc971667ba015f7f67481417c45cc757231
https://github.com/NiklasRosenstein/py-bundler/blob/80dd6dc971667ba015f7f67481417c45cc757231/bundler/modules.py#L80-L91
train
geophysics-ubonn/crtomo_tools
lib/crtomo/status.py
is_tomodir
def is_tomodir(directory): """Check if the supplied directory is a tomodir Parameters ---------- directory: string Check if the supplied path is a valid tomodir Returns ------- is_tomodir: bool True if the supplied directory is a tomodir directory """ if os.path.isdir(directory): if(os.path.isdir(directory + "/exe") and os.path.isdir(directory + "/config") and os.path.isdir(directory + "/rho") and os.path.isdir(directory + "/inv") and os.path.isdir(directory + "/mod")): return True else: return False else: return False
python
def is_tomodir(directory): """Check if the supplied directory is a tomodir Parameters ---------- directory: string Check if the supplied path is a valid tomodir Returns ------- is_tomodir: bool True if the supplied directory is a tomodir directory """ if os.path.isdir(directory): if(os.path.isdir(directory + "/exe") and os.path.isdir(directory + "/config") and os.path.isdir(directory + "/rho") and os.path.isdir(directory + "/inv") and os.path.isdir(directory + "/mod")): return True else: return False else: return False
[ "def", "is_tomodir", "(", "directory", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "if", "(", "os", ".", "path", ".", "isdir", "(", "directory", "+", "\"/exe\"", ")", "and", "os", ".", "path", ".", "isdir", "(", "directory", "+", "\"/config\"", ")", "and", "os", ".", "path", ".", "isdir", "(", "directory", "+", "\"/rho\"", ")", "and", "os", ".", "path", ".", "isdir", "(", "directory", "+", "\"/inv\"", ")", "and", "os", ".", "path", ".", "isdir", "(", "directory", "+", "\"/mod\"", ")", ")", ":", "return", "True", "else", ":", "return", "False", "else", ":", "return", "False" ]
Check if the supplied directory is a tomodir Parameters ---------- directory: string Check if the supplied path is a valid tomodir Returns ------- is_tomodir: bool True if the supplied directory is a tomodir directory
[ "Check", "if", "the", "supplied", "directory", "is", "a", "tomodir" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/status.py#L9-L32
train
geophysics-ubonn/crtomo_tools
lib/crtomo/status.py
td_is_finished
def td_is_finished(tomodir): """Return the state of modeling and inversion for a given tomodir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- tomodir: string Directory to check Returns ------- crmod_is_finished: bool True if a successful CRMod result is contained in the tomodir directory. crtomo_is_finished: bool True if a successful CRTomo inversion results is contained in the tomodir directory. """ if not is_tomodir(tomodir): raise Exception('Supplied directory is not a tomodir!') # crmod finished is determined by: # config.dat/rho.dat/crmod.cfg are present # volt.dat is present if(os.path.isfile(tomodir + os.sep + 'config/config.dat') and os.path.isfile(tomodir + os.sep + 'rho/rho.dat') and os.path.isfile(tomodir + os.sep + 'grid/elem.dat') and os.path.isfile(tomodir + os.sep + 'grid/elec.dat') and os.path.isfile(tomodir + os.sep + 'exe/crmod.cfg') and os.path.isfile(tomodir + os.sep + 'mod/volt.dat')): crmod_is_finished = True else: crmod_is_finished = False # crtomo is finished if # crtomo.cfg/volt.dat/elem.dat/elec.dat are present # inv/run.ctr contains the word "CPU" in the last line if(os.path.isfile(tomodir + os.sep + 'grid/elem.dat') and os.path.isfile(tomodir + os.sep + 'grid/elec.dat') and os.path.isfile(tomodir + os.sep + 'exe/crtomo.cfg') and os.path.isfile(tomodir + os.sep + 'inv/inv.ctr') and os.path.isfile(tomodir + os.sep + 'inv/run.ctr') and os.path.isfile(tomodir + os.sep + 'mod/volt.dat')): with open(tomodir + os.sep + 'inv/run.ctr', 'r') as fid: lines = fid.readlines() crtomo_is_finished = False # check the last 5 lines for line in lines[-5:]: test_line = line.strip() regex = re.compile('CPU') result = regex.match(test_line) if result is not None: crtomo_is_finished = True else: crtomo_is_finished = False return crmod_is_finished, crtomo_is_finished
python
def td_is_finished(tomodir): """Return the state of modeling and inversion for a given tomodir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- tomodir: string Directory to check Returns ------- crmod_is_finished: bool True if a successful CRMod result is contained in the tomodir directory. crtomo_is_finished: bool True if a successful CRTomo inversion results is contained in the tomodir directory. """ if not is_tomodir(tomodir): raise Exception('Supplied directory is not a tomodir!') # crmod finished is determined by: # config.dat/rho.dat/crmod.cfg are present # volt.dat is present if(os.path.isfile(tomodir + os.sep + 'config/config.dat') and os.path.isfile(tomodir + os.sep + 'rho/rho.dat') and os.path.isfile(tomodir + os.sep + 'grid/elem.dat') and os.path.isfile(tomodir + os.sep + 'grid/elec.dat') and os.path.isfile(tomodir + os.sep + 'exe/crmod.cfg') and os.path.isfile(tomodir + os.sep + 'mod/volt.dat')): crmod_is_finished = True else: crmod_is_finished = False # crtomo is finished if # crtomo.cfg/volt.dat/elem.dat/elec.dat are present # inv/run.ctr contains the word "CPU" in the last line if(os.path.isfile(tomodir + os.sep + 'grid/elem.dat') and os.path.isfile(tomodir + os.sep + 'grid/elec.dat') and os.path.isfile(tomodir + os.sep + 'exe/crtomo.cfg') and os.path.isfile(tomodir + os.sep + 'inv/inv.ctr') and os.path.isfile(tomodir + os.sep + 'inv/run.ctr') and os.path.isfile(tomodir + os.sep + 'mod/volt.dat')): with open(tomodir + os.sep + 'inv/run.ctr', 'r') as fid: lines = fid.readlines() crtomo_is_finished = False # check the last 5 lines for line in lines[-5:]: test_line = line.strip() regex = re.compile('CPU') result = regex.match(test_line) if result is not None: crtomo_is_finished = True else: crtomo_is_finished = False return crmod_is_finished, crtomo_is_finished
[ "def", "td_is_finished", "(", "tomodir", ")", ":", "if", "not", "is_tomodir", "(", "tomodir", ")", ":", "raise", "Exception", "(", "'Supplied directory is not a tomodir!'", ")", "# crmod finished is determined by:", "# config.dat/rho.dat/crmod.cfg are present", "# volt.dat is present", "if", "(", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'config/config.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'rho/rho.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'grid/elem.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'grid/elec.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'exe/crmod.cfg'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'mod/volt.dat'", ")", ")", ":", "crmod_is_finished", "=", "True", "else", ":", "crmod_is_finished", "=", "False", "# crtomo is finished if", "# crtomo.cfg/volt.dat/elem.dat/elec.dat are present", "# inv/run.ctr contains the word \"CPU\" in the last line", "if", "(", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'grid/elem.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'grid/elec.dat'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'exe/crtomo.cfg'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'inv/inv.ctr'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'inv/run.ctr'", ")", "and", "os", ".", "path", ".", "isfile", "(", "tomodir", "+", "os", ".", "sep", "+", "'mod/volt.dat'", ")", ")", ":", "with", "open", "(", "tomodir", "+", "os", ".", "sep", "+", "'inv/run.ctr'", ",", "'r'", ")", "as", "fid", ":", "lines", "=", "fid", ".", "readlines", "(", ")", "crtomo_is_finished", "=", "False", "# check the last 5 lines", "for", "line", "in", "lines", "[", "-", "5", ":", "]", ":", "test_line", "=", "line", ".", "strip", "(", ")", "regex", "=", "re", ".", "compile", "(", "'CPU'", ")", "result", "=", "regex", ".", "match", "(", "test_line", ")", "if", "result", "is", "not", "None", ":", "crtomo_is_finished", "=", "True", "else", ":", "crtomo_is_finished", "=", "False", "return", "crmod_is_finished", ",", "crtomo_is_finished" ]
Return the state of modeling and inversion for a given tomodir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- tomodir: string Directory to check Returns ------- crmod_is_finished: bool True if a successful CRMod result is contained in the tomodir directory. crtomo_is_finished: bool True if a successful CRTomo inversion results is contained in the tomodir directory.
[ "Return", "the", "state", "of", "modeling", "and", "inversion", "for", "a", "given", "tomodir", ".", "The", "result", "does", "not", "take", "into", "account", "sensitivities", "or", "potentials", "as", "optionally", "generated", "by", "CRMod", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/status.py#L35-L91
train
geophysics-ubonn/crtomo_tools
lib/crtomo/status.py
is_sipdir
def is_sipdir(directory): """ Simple check if the supplied directory is a SIP directory. Parameters ---------- directory: string Check if the supplied path is a valid SIP directory Returns ------- is_sipdir: bool True if the supplied directory is a SIP directory """ is_sipdir = True if(not os.path.isfile(directory + os.sep + 'frequencies.dat')): is_sipdir = False if(not os.path.isdir(directory + os.sep + 'invmod')): is_sipdir = False return is_sipdir
python
def is_sipdir(directory): """ Simple check if the supplied directory is a SIP directory. Parameters ---------- directory: string Check if the supplied path is a valid SIP directory Returns ------- is_sipdir: bool True if the supplied directory is a SIP directory """ is_sipdir = True if(not os.path.isfile(directory + os.sep + 'frequencies.dat')): is_sipdir = False if(not os.path.isdir(directory + os.sep + 'invmod')): is_sipdir = False return is_sipdir
[ "def", "is_sipdir", "(", "directory", ")", ":", "is_sipdir", "=", "True", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "directory", "+", "os", ".", "sep", "+", "'frequencies.dat'", ")", ")", ":", "is_sipdir", "=", "False", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "directory", "+", "os", ".", "sep", "+", "'invmod'", ")", ")", ":", "is_sipdir", "=", "False", "return", "is_sipdir" ]
Simple check if the supplied directory is a SIP directory. Parameters ---------- directory: string Check if the supplied path is a valid SIP directory Returns ------- is_sipdir: bool True if the supplied directory is a SIP directory
[ "Simple", "check", "if", "the", "supplied", "directory", "is", "a", "SIP", "directory", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/status.py#L94-L116
train
geophysics-ubonn/crtomo_tools
lib/crtomo/status.py
sipdir_is_finished
def sipdir_is_finished(sipdir): """Return the state of modeling and inversion for a given SIP dir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- sipdir: string Directory to check Returns ------- crmod_is_finished: bool True if all tomodirs of this SIP directory contain finished modeling results. crtomo_is_finished: bool True if all tomodirs of this SIP directory contain finished inversion results. """ if not is_sipdir(sipdir): raise Exception('Directory is not a valid SIP directory!') subdirs_raw = sorted(glob.glob(sipdir + os.sep + 'invmod' + os.sep + '*')) subdirs = [x for x in subdirs_raw if os.path.isdir(x)] crmod_finished = True crtomo_finished = True for subdir in subdirs: subcrmod, subcrtomo = td_is_finished(subdir) if not subcrmod: crmod_finished = False if not subcrtomo: crtomo_finished = False return crmod_finished, crtomo_finished
python
def sipdir_is_finished(sipdir): """Return the state of modeling and inversion for a given SIP dir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- sipdir: string Directory to check Returns ------- crmod_is_finished: bool True if all tomodirs of this SIP directory contain finished modeling results. crtomo_is_finished: bool True if all tomodirs of this SIP directory contain finished inversion results. """ if not is_sipdir(sipdir): raise Exception('Directory is not a valid SIP directory!') subdirs_raw = sorted(glob.glob(sipdir + os.sep + 'invmod' + os.sep + '*')) subdirs = [x for x in subdirs_raw if os.path.isdir(x)] crmod_finished = True crtomo_finished = True for subdir in subdirs: subcrmod, subcrtomo = td_is_finished(subdir) if not subcrmod: crmod_finished = False if not subcrtomo: crtomo_finished = False return crmod_finished, crtomo_finished
[ "def", "sipdir_is_finished", "(", "sipdir", ")", ":", "if", "not", "is_sipdir", "(", "sipdir", ")", ":", "raise", "Exception", "(", "'Directory is not a valid SIP directory!'", ")", "subdirs_raw", "=", "sorted", "(", "glob", ".", "glob", "(", "sipdir", "+", "os", ".", "sep", "+", "'invmod'", "+", "os", ".", "sep", "+", "'*'", ")", ")", "subdirs", "=", "[", "x", "for", "x", "in", "subdirs_raw", "if", "os", ".", "path", ".", "isdir", "(", "x", ")", "]", "crmod_finished", "=", "True", "crtomo_finished", "=", "True", "for", "subdir", "in", "subdirs", ":", "subcrmod", ",", "subcrtomo", "=", "td_is_finished", "(", "subdir", ")", "if", "not", "subcrmod", ":", "crmod_finished", "=", "False", "if", "not", "subcrtomo", ":", "crtomo_finished", "=", "False", "return", "crmod_finished", ",", "crtomo_finished" ]
Return the state of modeling and inversion for a given SIP dir. The result does not take into account sensitivities or potentials, as optionally generated by CRMod. Parameters ---------- sipdir: string Directory to check Returns ------- crmod_is_finished: bool True if all tomodirs of this SIP directory contain finished modeling results. crtomo_is_finished: bool True if all tomodirs of this SIP directory contain finished inversion results.
[ "Return", "the", "state", "of", "modeling", "and", "inversion", "for", "a", "given", "SIP", "dir", ".", "The", "result", "does", "not", "take", "into", "account", "sensitivities", "or", "potentials", "as", "optionally", "generated", "by", "CRMod", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/status.py#L119-L154
train
redhat-openstack/python-tripleo-helper
tripleohelper/ovb_undercloud.py
OVBUndercloud.enable_neutron_hack
def enable_neutron_hack(self, os_username, os_password, os_project_id, os_auth_url): """Enable the neutron hack on the undercloud. This script will watch the undercloud and copy any relevant network configuration in the host OpenStack. This is required to avoid the firewall limitations (no-spoofing and DHCP restriction). """ self.yum_install(['python-neutronclient']) self.send_file(pkg_data_filename('static', 'ovb_fix_neutron_addr'), '/usr/local/bin/ovb_fix_neutron_addr', unix_mode=0o755) content = """ [Unit] Description=OVB neutron hack Service [Service] ExecStart=/usr/local/bin/ovb_fix_neutron_addr --os-user {os_username} --os-password {os_password} --os-project-id {os_project_id} --os-auth-url {os_auth_url} User=root StandardOutput=kmsg+console StandardError=inherit Restart=always [Install] WantedBy=multi-user.target """ unit = 'ovb_fix_neutron_addr.service' self.create_file( '/usr/lib/systemd/system/%s' % unit, content.format( os_username=os_username, os_password=protect_password(os_password), os_project_id=os_project_id, os_auth_url=os_auth_url)) self.run('systemctl enable %s' % unit) self.run('systemctl start %s' % unit)
python
def enable_neutron_hack(self, os_username, os_password, os_project_id, os_auth_url): """Enable the neutron hack on the undercloud. This script will watch the undercloud and copy any relevant network configuration in the host OpenStack. This is required to avoid the firewall limitations (no-spoofing and DHCP restriction). """ self.yum_install(['python-neutronclient']) self.send_file(pkg_data_filename('static', 'ovb_fix_neutron_addr'), '/usr/local/bin/ovb_fix_neutron_addr', unix_mode=0o755) content = """ [Unit] Description=OVB neutron hack Service [Service] ExecStart=/usr/local/bin/ovb_fix_neutron_addr --os-user {os_username} --os-password {os_password} --os-project-id {os_project_id} --os-auth-url {os_auth_url} User=root StandardOutput=kmsg+console StandardError=inherit Restart=always [Install] WantedBy=multi-user.target """ unit = 'ovb_fix_neutron_addr.service' self.create_file( '/usr/lib/systemd/system/%s' % unit, content.format( os_username=os_username, os_password=protect_password(os_password), os_project_id=os_project_id, os_auth_url=os_auth_url)) self.run('systemctl enable %s' % unit) self.run('systemctl start %s' % unit)
[ "def", "enable_neutron_hack", "(", "self", ",", "os_username", ",", "os_password", ",", "os_project_id", ",", "os_auth_url", ")", ":", "self", ".", "yum_install", "(", "[", "'python-neutronclient'", "]", ")", "self", ".", "send_file", "(", "pkg_data_filename", "(", "'static'", ",", "'ovb_fix_neutron_addr'", ")", ",", "'/usr/local/bin/ovb_fix_neutron_addr'", ",", "unix_mode", "=", "0o755", ")", "content", "=", "\"\"\"\n[Unit]\nDescription=OVB neutron hack Service\n[Service]\nExecStart=/usr/local/bin/ovb_fix_neutron_addr --os-user {os_username} --os-password {os_password} --os-project-id {os_project_id} --os-auth-url {os_auth_url}\nUser=root\nStandardOutput=kmsg+console\nStandardError=inherit\nRestart=always\n[Install]\nWantedBy=multi-user.target\n\"\"\"", "unit", "=", "'ovb_fix_neutron_addr.service'", "self", ".", "create_file", "(", "'/usr/lib/systemd/system/%s'", "%", "unit", ",", "content", ".", "format", "(", "os_username", "=", "os_username", ",", "os_password", "=", "protect_password", "(", "os_password", ")", ",", "os_project_id", "=", "os_project_id", ",", "os_auth_url", "=", "os_auth_url", ")", ")", "self", ".", "run", "(", "'systemctl enable %s'", "%", "unit", ")", "self", ".", "run", "(", "'systemctl start %s'", "%", "unit", ")" ]
Enable the neutron hack on the undercloud. This script will watch the undercloud and copy any relevant network configuration in the host OpenStack. This is required to avoid the firewall limitations (no-spoofing and DHCP restriction).
[ "Enable", "the", "neutron", "hack", "on", "the", "undercloud", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/ovb_undercloud.py#L96-L126
train
redhat-openstack/python-tripleo-helper
tripleohelper/ovb_undercloud.py
OVBUndercloud.patch_ironic_ramdisk
def patch_ironic_ramdisk(self): """Clean the disk before flushing the new image. See: https://bugs.launchpad.net/ironic-lib/+bug/1550604 """ tmpdir = self.run('mktemp -d')[0].rstrip('\n') self.run('cd {tmpdir}; zcat /home/stack/ironic-python-agent.initramfs| cpio -id'.format(tmpdir=tmpdir)) self.send_file(pkg_data_filename('static', 'ironic-wipefs.patch'), '/tmp/ironic-wipefs.patch') self.run('cd {tmpdir}; patch -p0 < /tmp/ironic-wipefs.patch'.format(tmpdir=tmpdir)) self.run('cd {tmpdir}; find . | cpio --create --format=newc > /home/stack/ironic-python-agent.initramfs'.format(tmpdir=tmpdir))
python
def patch_ironic_ramdisk(self): """Clean the disk before flushing the new image. See: https://bugs.launchpad.net/ironic-lib/+bug/1550604 """ tmpdir = self.run('mktemp -d')[0].rstrip('\n') self.run('cd {tmpdir}; zcat /home/stack/ironic-python-agent.initramfs| cpio -id'.format(tmpdir=tmpdir)) self.send_file(pkg_data_filename('static', 'ironic-wipefs.patch'), '/tmp/ironic-wipefs.patch') self.run('cd {tmpdir}; patch -p0 < /tmp/ironic-wipefs.patch'.format(tmpdir=tmpdir)) self.run('cd {tmpdir}; find . | cpio --create --format=newc > /home/stack/ironic-python-agent.initramfs'.format(tmpdir=tmpdir))
[ "def", "patch_ironic_ramdisk", "(", "self", ")", ":", "tmpdir", "=", "self", ".", "run", "(", "'mktemp -d'", ")", "[", "0", "]", ".", "rstrip", "(", "'\\n'", ")", "self", ".", "run", "(", "'cd {tmpdir}; zcat /home/stack/ironic-python-agent.initramfs| cpio -id'", ".", "format", "(", "tmpdir", "=", "tmpdir", ")", ")", "self", ".", "send_file", "(", "pkg_data_filename", "(", "'static'", ",", "'ironic-wipefs.patch'", ")", ",", "'/tmp/ironic-wipefs.patch'", ")", "self", ".", "run", "(", "'cd {tmpdir}; patch -p0 < /tmp/ironic-wipefs.patch'", ".", "format", "(", "tmpdir", "=", "tmpdir", ")", ")", "self", ".", "run", "(", "'cd {tmpdir}; find . | cpio --create --format=newc > /home/stack/ironic-python-agent.initramfs'", ".", "format", "(", "tmpdir", "=", "tmpdir", ")", ")" ]
Clean the disk before flushing the new image. See: https://bugs.launchpad.net/ironic-lib/+bug/1550604
[ "Clean", "the", "disk", "before", "flushing", "the", "new", "image", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/ovb_undercloud.py#L128-L137
train
elifiner/termenu
termenu/menu.py
show_menu
def show_menu(title, options, default=None, height=None, width=None, multiselect=False, precolored=False): """ Shows an interactive menu in the terminal. Arguments: options: list of menu options default: initial option to highlight height: maximum height of the menu width: maximum width of the menu multiselect: allow multiple items to be selected? precolored: allow strings with embedded ANSI commands Returns: * If multiselect is True, returns a list of selected options. * If mutliselect is False, returns the selected option. * If an option is a 2-tuple, the first item will be displayed and the second item will be returned. * If menu is cancelled (Esc pressed), returns None. * Notes: * You can pass OptionGroup objects to `options` to create sub-headers in the menu. """ plugins = [FilterPlugin()] if any(isinstance(opt, OptionGroup) for opt in options): plugins.append(OptionGroupPlugin()) if title: plugins.append(TitlePlugin(title)) if precolored: plugins.append(PrecoloredPlugin()) menu = Termenu(options, default=default, height=height, width=width, multiselect=multiselect, plugins=plugins) return menu.show()
python
def show_menu(title, options, default=None, height=None, width=None, multiselect=False, precolored=False): """ Shows an interactive menu in the terminal. Arguments: options: list of menu options default: initial option to highlight height: maximum height of the menu width: maximum width of the menu multiselect: allow multiple items to be selected? precolored: allow strings with embedded ANSI commands Returns: * If multiselect is True, returns a list of selected options. * If mutliselect is False, returns the selected option. * If an option is a 2-tuple, the first item will be displayed and the second item will be returned. * If menu is cancelled (Esc pressed), returns None. * Notes: * You can pass OptionGroup objects to `options` to create sub-headers in the menu. """ plugins = [FilterPlugin()] if any(isinstance(opt, OptionGroup) for opt in options): plugins.append(OptionGroupPlugin()) if title: plugins.append(TitlePlugin(title)) if precolored: plugins.append(PrecoloredPlugin()) menu = Termenu(options, default=default, height=height, width=width, multiselect=multiselect, plugins=plugins) return menu.show()
[ "def", "show_menu", "(", "title", ",", "options", ",", "default", "=", "None", ",", "height", "=", "None", ",", "width", "=", "None", ",", "multiselect", "=", "False", ",", "precolored", "=", "False", ")", ":", "plugins", "=", "[", "FilterPlugin", "(", ")", "]", "if", "any", "(", "isinstance", "(", "opt", ",", "OptionGroup", ")", "for", "opt", "in", "options", ")", ":", "plugins", ".", "append", "(", "OptionGroupPlugin", "(", ")", ")", "if", "title", ":", "plugins", ".", "append", "(", "TitlePlugin", "(", "title", ")", ")", "if", "precolored", ":", "plugins", ".", "append", "(", "PrecoloredPlugin", "(", ")", ")", "menu", "=", "Termenu", "(", "options", ",", "default", "=", "default", ",", "height", "=", "height", ",", "width", "=", "width", ",", "multiselect", "=", "multiselect", ",", "plugins", "=", "plugins", ")", "return", "menu", ".", "show", "(", ")" ]
Shows an interactive menu in the terminal. Arguments: options: list of menu options default: initial option to highlight height: maximum height of the menu width: maximum width of the menu multiselect: allow multiple items to be selected? precolored: allow strings with embedded ANSI commands Returns: * If multiselect is True, returns a list of selected options. * If mutliselect is False, returns the selected option. * If an option is a 2-tuple, the first item will be displayed and the second item will be returned. * If menu is cancelled (Esc pressed), returns None. * Notes: * You can pass OptionGroup objects to `options` to create sub-headers in the menu.
[ "Shows", "an", "interactive", "menu", "in", "the", "terminal", "." ]
a7a57a1b07d8451003ee750704cdf0d904e9e272
https://github.com/elifiner/termenu/blob/a7a57a1b07d8451003ee750704cdf0d904e9e272/termenu/menu.py#L6-L39
train
elifiner/termenu
termenu/menu.py
pluggable
def pluggable(method): """ Mark a class method as extendable with plugins. """ def wrapped(self, *args, **kwargs): if hasattr(self, "_plugins"): # call the last plugin, it may call the previous via self.parent.method # creating a call call chain return getattr(self._plugins[-1], method.__name__)(*args, **kwargs) else: return method(self, *args, **kwargs) wrapped.original = method return wrapped
python
def pluggable(method): """ Mark a class method as extendable with plugins. """ def wrapped(self, *args, **kwargs): if hasattr(self, "_plugins"): # call the last plugin, it may call the previous via self.parent.method # creating a call call chain return getattr(self._plugins[-1], method.__name__)(*args, **kwargs) else: return method(self, *args, **kwargs) wrapped.original = method return wrapped
[ "def", "pluggable", "(", "method", ")", ":", "def", "wrapped", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "hasattr", "(", "self", ",", "\"_plugins\"", ")", ":", "# call the last plugin, it may call the previous via self.parent.method", "# creating a call call chain", "return", "getattr", "(", "self", ".", "_plugins", "[", "-", "1", "]", ",", "method", ".", "__name__", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "wrapped", ".", "original", "=", "method", "return", "wrapped" ]
Mark a class method as extendable with plugins.
[ "Mark", "a", "class", "method", "as", "extendable", "with", "plugins", "." ]
a7a57a1b07d8451003ee750704cdf0d904e9e272
https://github.com/elifiner/termenu/blob/a7a57a1b07d8451003ee750704cdf0d904e9e272/termenu/menu.py#L46-L58
train
elifiner/termenu
termenu/menu.py
register_plugin
def register_plugin(host, plugin): """ Register a plugin with a host object. Some @pluggable methods in the host will have their behaviour altered by the plugin. """ class OriginalMethods(object): def __getattr__(self, name): return lambda *args, **kwargs: getattr(host, name).original(host, *args, **kwargs) if not hasattr(host, "_plugins"): host._plugins = [OriginalMethods()] plugin.parent = host._plugins[-1] plugin.host = host host._plugins.append(plugin)
python
def register_plugin(host, plugin): """ Register a plugin with a host object. Some @pluggable methods in the host will have their behaviour altered by the plugin. """ class OriginalMethods(object): def __getattr__(self, name): return lambda *args, **kwargs: getattr(host, name).original(host, *args, **kwargs) if not hasattr(host, "_plugins"): host._plugins = [OriginalMethods()] plugin.parent = host._plugins[-1] plugin.host = host host._plugins.append(plugin)
[ "def", "register_plugin", "(", "host", ",", "plugin", ")", ":", "class", "OriginalMethods", "(", "object", ")", ":", "def", "__getattr__", "(", "self", ",", "name", ")", ":", "return", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "getattr", "(", "host", ",", "name", ")", ".", "original", "(", "host", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "hasattr", "(", "host", ",", "\"_plugins\"", ")", ":", "host", ".", "_plugins", "=", "[", "OriginalMethods", "(", ")", "]", "plugin", ".", "parent", "=", "host", ".", "_plugins", "[", "-", "1", "]", "plugin", ".", "host", "=", "host", "host", ".", "_plugins", ".", "append", "(", "plugin", ")" ]
Register a plugin with a host object. Some @pluggable methods in the host will have their behaviour altered by the plugin.
[ "Register", "a", "plugin", "with", "a", "host", "object", ".", "Some" ]
a7a57a1b07d8451003ee750704cdf0d904e9e272
https://github.com/elifiner/termenu/blob/a7a57a1b07d8451003ee750704cdf0d904e9e272/termenu/menu.py#L60-L72
train
NikolayDachev/jadm
lib/paramiko-1.14.1/paramiko/sftp_client.py
SFTPClient.chdir
def chdir(self, path=None): """ Change the "current directory" of this SFTP session. Since SFTP doesn't really have the concept of a current working directory, this is emulated by Paramiko. Once you use this method to set a working directory, all operations on this `.SFTPClient` object will be relative to that path. You can pass in ``None`` to stop using a current working directory. :param str path: new current working directory :raises IOError: if the requested path doesn't exist on the server .. versionadded:: 1.4 """ if path is None: self._cwd = None return if not stat.S_ISDIR(self.stat(path).st_mode): raise SFTPError(errno.ENOTDIR, "%s: %s" % (os.strerror(errno.ENOTDIR), path)) self._cwd = b(self.normalize(path))
python
def chdir(self, path=None): """ Change the "current directory" of this SFTP session. Since SFTP doesn't really have the concept of a current working directory, this is emulated by Paramiko. Once you use this method to set a working directory, all operations on this `.SFTPClient` object will be relative to that path. You can pass in ``None`` to stop using a current working directory. :param str path: new current working directory :raises IOError: if the requested path doesn't exist on the server .. versionadded:: 1.4 """ if path is None: self._cwd = None return if not stat.S_ISDIR(self.stat(path).st_mode): raise SFTPError(errno.ENOTDIR, "%s: %s" % (os.strerror(errno.ENOTDIR), path)) self._cwd = b(self.normalize(path))
[ "def", "chdir", "(", "self", ",", "path", "=", "None", ")", ":", "if", "path", "is", "None", ":", "self", ".", "_cwd", "=", "None", "return", "if", "not", "stat", ".", "S_ISDIR", "(", "self", ".", "stat", "(", "path", ")", ".", "st_mode", ")", ":", "raise", "SFTPError", "(", "errno", ".", "ENOTDIR", ",", "\"%s: %s\"", "%", "(", "os", ".", "strerror", "(", "errno", ".", "ENOTDIR", ")", ",", "path", ")", ")", "self", ".", "_cwd", "=", "b", "(", "self", ".", "normalize", "(", "path", ")", ")" ]
Change the "current directory" of this SFTP session. Since SFTP doesn't really have the concept of a current working directory, this is emulated by Paramiko. Once you use this method to set a working directory, all operations on this `.SFTPClient` object will be relative to that path. You can pass in ``None`` to stop using a current working directory. :param str path: new current working directory :raises IOError: if the requested path doesn't exist on the server .. versionadded:: 1.4
[ "Change", "the", "current", "directory", "of", "this", "SFTP", "session", ".", "Since", "SFTP", "doesn", "t", "really", "have", "the", "concept", "of", "a", "current", "working", "directory", "this", "is", "emulated", "by", "Paramiko", ".", "Once", "you", "use", "this", "method", "to", "set", "a", "working", "directory", "all", "operations", "on", "this", ".", "SFTPClient", "object", "will", "be", "relative", "to", "that", "path", ".", "You", "can", "pass", "in", "None", "to", "stop", "using", "a", "current", "working", "directory", "." ]
12bb550445edfcd87506f7cba7a6a35d413c5511
https://github.com/NikolayDachev/jadm/blob/12bb550445edfcd87506f7cba7a6a35d413c5511/lib/paramiko-1.14.1/paramiko/sftp_client.py#L480-L500
train
NikolayDachev/jadm
lib/paramiko-1.14.1/paramiko/message.py
Message.get_int
def get_int(self): """ Fetch an int from the stream. :return: a 32-bit unsigned `int`. """ byte = self.get_bytes(1) if byte == max_byte: return util.inflate_long(self.get_binary()) byte += self.get_bytes(3) return struct.unpack('>I', byte)[0]
python
def get_int(self): """ Fetch an int from the stream. :return: a 32-bit unsigned `int`. """ byte = self.get_bytes(1) if byte == max_byte: return util.inflate_long(self.get_binary()) byte += self.get_bytes(3) return struct.unpack('>I', byte)[0]
[ "def", "get_int", "(", "self", ")", ":", "byte", "=", "self", ".", "get_bytes", "(", "1", ")", "if", "byte", "==", "max_byte", ":", "return", "util", ".", "inflate_long", "(", "self", ".", "get_binary", "(", ")", ")", "byte", "+=", "self", ".", "get_bytes", "(", "3", ")", "return", "struct", ".", "unpack", "(", "'>I'", ",", "byte", ")", "[", "0", "]" ]
Fetch an int from the stream. :return: a 32-bit unsigned `int`.
[ "Fetch", "an", "int", "from", "the", "stream", "." ]
12bb550445edfcd87506f7cba7a6a35d413c5511
https://github.com/NikolayDachev/jadm/blob/12bb550445edfcd87506f7cba7a6a35d413c5511/lib/paramiko-1.14.1/paramiko/message.py#L132-L142
train
reorx/torext
torext/log.py
set_logger
def set_logger(name, level='INFO', fmt=None, datefmt=None, propagate=1, remove_handlers=False): """ This function will clear the previous handlers and set only one handler, which will only be StreamHandler for the logger. This function is designed to be able to called multiple times in a context. Note that if a logger has no handlers, it will be added a handler automatically when it is used. """ logger = logging.getLogger(name) logger.setLevel(getattr(logging, level)) logger.propagate = propagate if remove_handlers: logger.handlers = [] return handler = None for h in logger.handlers: if isinstance(h, logging.StreamHandler): # use existing instead of clean and create handler = h break if not handler: handler = logging.StreamHandler() logger.addHandler(handler) formatter_kwgs = {} for i in ('fmt', 'datefmt'): if locals()[i] is not None: formatter_kwgs[i] = locals()[i] handler.setFormatter(BaseFormatter(**formatter_kwgs))
python
def set_logger(name, level='INFO', fmt=None, datefmt=None, propagate=1, remove_handlers=False): """ This function will clear the previous handlers and set only one handler, which will only be StreamHandler for the logger. This function is designed to be able to called multiple times in a context. Note that if a logger has no handlers, it will be added a handler automatically when it is used. """ logger = logging.getLogger(name) logger.setLevel(getattr(logging, level)) logger.propagate = propagate if remove_handlers: logger.handlers = [] return handler = None for h in logger.handlers: if isinstance(h, logging.StreamHandler): # use existing instead of clean and create handler = h break if not handler: handler = logging.StreamHandler() logger.addHandler(handler) formatter_kwgs = {} for i in ('fmt', 'datefmt'): if locals()[i] is not None: formatter_kwgs[i] = locals()[i] handler.setFormatter(BaseFormatter(**formatter_kwgs))
[ "def", "set_logger", "(", "name", ",", "level", "=", "'INFO'", ",", "fmt", "=", "None", ",", "datefmt", "=", "None", ",", "propagate", "=", "1", ",", "remove_handlers", "=", "False", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "name", ")", "logger", ".", "setLevel", "(", "getattr", "(", "logging", ",", "level", ")", ")", "logger", ".", "propagate", "=", "propagate", "if", "remove_handlers", ":", "logger", ".", "handlers", "=", "[", "]", "return", "handler", "=", "None", "for", "h", "in", "logger", ".", "handlers", ":", "if", "isinstance", "(", "h", ",", "logging", ".", "StreamHandler", ")", ":", "# use existing instead of clean and create", "handler", "=", "h", "break", "if", "not", "handler", ":", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "logger", ".", "addHandler", "(", "handler", ")", "formatter_kwgs", "=", "{", "}", "for", "i", "in", "(", "'fmt'", ",", "'datefmt'", ")", ":", "if", "locals", "(", ")", "[", "i", "]", "is", "not", "None", ":", "formatter_kwgs", "[", "i", "]", "=", "locals", "(", ")", "[", "i", "]", "handler", ".", "setFormatter", "(", "BaseFormatter", "(", "*", "*", "formatter_kwgs", ")", ")" ]
This function will clear the previous handlers and set only one handler, which will only be StreamHandler for the logger. This function is designed to be able to called multiple times in a context. Note that if a logger has no handlers, it will be added a handler automatically when it is used.
[ "This", "function", "will", "clear", "the", "previous", "handlers", "and", "set", "only", "one", "handler", "which", "will", "only", "be", "StreamHandler", "for", "the", "logger", "." ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/log.py#L152-L188
train
reorx/torext
torext/log.py
BaseFormatter.format
def format(self, record): """ return log in unicode """ self._format_record(record) record_dict = {} for k, v in record.__dict__.items(): if isinstance(k, str): k = decode_(k, 'utf8') if isinstance(v, str): v = decode_(v, 'utf8', 'replace') record_dict[k] = v if 'color' in self.fmt or 'end_color' in self.fmt: record_dict['color'], record_dict['end_color'] = _color(record.levelno) log = self.ufmt % record_dict if record.exc_text: if log[-1:] != '\n': log += '\n' log += decode_(record.exc_text, 'utf8', 'replace') log = log.replace('\n', '\n' + self.tab) return log
python
def format(self, record): """ return log in unicode """ self._format_record(record) record_dict = {} for k, v in record.__dict__.items(): if isinstance(k, str): k = decode_(k, 'utf8') if isinstance(v, str): v = decode_(v, 'utf8', 'replace') record_dict[k] = v if 'color' in self.fmt or 'end_color' in self.fmt: record_dict['color'], record_dict['end_color'] = _color(record.levelno) log = self.ufmt % record_dict if record.exc_text: if log[-1:] != '\n': log += '\n' log += decode_(record.exc_text, 'utf8', 'replace') log = log.replace('\n', '\n' + self.tab) return log
[ "def", "format", "(", "self", ",", "record", ")", ":", "self", ".", "_format_record", "(", "record", ")", "record_dict", "=", "{", "}", "for", "k", ",", "v", "in", "record", ".", "__dict__", ".", "items", "(", ")", ":", "if", "isinstance", "(", "k", ",", "str", ")", ":", "k", "=", "decode_", "(", "k", ",", "'utf8'", ")", "if", "isinstance", "(", "v", ",", "str", ")", ":", "v", "=", "decode_", "(", "v", ",", "'utf8'", ",", "'replace'", ")", "record_dict", "[", "k", "]", "=", "v", "if", "'color'", "in", "self", ".", "fmt", "or", "'end_color'", "in", "self", ".", "fmt", ":", "record_dict", "[", "'color'", "]", ",", "record_dict", "[", "'end_color'", "]", "=", "_color", "(", "record", ".", "levelno", ")", "log", "=", "self", ".", "ufmt", "%", "record_dict", "if", "record", ".", "exc_text", ":", "if", "log", "[", "-", "1", ":", "]", "!=", "'\\n'", ":", "log", "+=", "'\\n'", "log", "+=", "decode_", "(", "record", ".", "exc_text", ",", "'utf8'", ",", "'replace'", ")", "log", "=", "log", ".", "replace", "(", "'\\n'", ",", "'\\n'", "+", "self", ".", "tab", ")", "return", "log" ]
return log in unicode
[ "return", "log", "in", "unicode" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/log.py#L108-L134
train
Riminder/python-riminder-api
riminder/profile.py
Profile.list
def list(self, source_ids=None, seniority="all", stage=None, date_start="1494539999", date_end=TIMESTAMP_NOW, filter_id=None, page=1, limit=30, sort_by='ranking', filter_reference=None, order_by=None): """ Retreive all profiles that match the query param. Args: date_end: <string> REQUIRED (default to timestamp of now) profiles' last date of reception date_start: <string> REQUIRED (default to "1494539999") profiles' first date of reception filter_id: <string> limit: <int> (default to 30) number of fetched profiles/page page: <int> REQUIRED default to 1 number of the page associated to the pagination seniority: <string> defaut to "all" profiles' seniority ("all", "senior", "junior") sort_by: <string> source_ids: <array of strings> REQUIRED stage: <string> Returns Retrieve the profiles data as <dict> """ query_params = {} query_params["date_end"] = _validate_timestamp(date_end, "date_end") query_params["date_start"] = _validate_timestamp(date_start, "date_start") if filter_id: query_params["filter_id"] = _validate_filter_id(filter_id) if filter_reference: query_params["filter_reference"] = _validate_filter_reference(filter_reference) query_params["limit"] = _validate_limit(limit) query_params["page"] = _validate_page(page) query_params["seniority"] = _validate_seniority(seniority) query_params["sort_by"] = _validate_sort_by(sort_by) query_params["source_ids"] = json.dumps(_validate_source_ids(source_ids)) query_params["stage"] = _validate_stage(stage) query_params["order_by"] = order_by response = self.client.get("profiles", query_params) return response.json()
python
def list(self, source_ids=None, seniority="all", stage=None, date_start="1494539999", date_end=TIMESTAMP_NOW, filter_id=None, page=1, limit=30, sort_by='ranking', filter_reference=None, order_by=None): """ Retreive all profiles that match the query param. Args: date_end: <string> REQUIRED (default to timestamp of now) profiles' last date of reception date_start: <string> REQUIRED (default to "1494539999") profiles' first date of reception filter_id: <string> limit: <int> (default to 30) number of fetched profiles/page page: <int> REQUIRED default to 1 number of the page associated to the pagination seniority: <string> defaut to "all" profiles' seniority ("all", "senior", "junior") sort_by: <string> source_ids: <array of strings> REQUIRED stage: <string> Returns Retrieve the profiles data as <dict> """ query_params = {} query_params["date_end"] = _validate_timestamp(date_end, "date_end") query_params["date_start"] = _validate_timestamp(date_start, "date_start") if filter_id: query_params["filter_id"] = _validate_filter_id(filter_id) if filter_reference: query_params["filter_reference"] = _validate_filter_reference(filter_reference) query_params["limit"] = _validate_limit(limit) query_params["page"] = _validate_page(page) query_params["seniority"] = _validate_seniority(seniority) query_params["sort_by"] = _validate_sort_by(sort_by) query_params["source_ids"] = json.dumps(_validate_source_ids(source_ids)) query_params["stage"] = _validate_stage(stage) query_params["order_by"] = order_by response = self.client.get("profiles", query_params) return response.json()
[ "def", "list", "(", "self", ",", "source_ids", "=", "None", ",", "seniority", "=", "\"all\"", ",", "stage", "=", "None", ",", "date_start", "=", "\"1494539999\"", ",", "date_end", "=", "TIMESTAMP_NOW", ",", "filter_id", "=", "None", ",", "page", "=", "1", ",", "limit", "=", "30", ",", "sort_by", "=", "'ranking'", ",", "filter_reference", "=", "None", ",", "order_by", "=", "None", ")", ":", "query_params", "=", "{", "}", "query_params", "[", "\"date_end\"", "]", "=", "_validate_timestamp", "(", "date_end", ",", "\"date_end\"", ")", "query_params", "[", "\"date_start\"", "]", "=", "_validate_timestamp", "(", "date_start", ",", "\"date_start\"", ")", "if", "filter_id", ":", "query_params", "[", "\"filter_id\"", "]", "=", "_validate_filter_id", "(", "filter_id", ")", "if", "filter_reference", ":", "query_params", "[", "\"filter_reference\"", "]", "=", "_validate_filter_reference", "(", "filter_reference", ")", "query_params", "[", "\"limit\"", "]", "=", "_validate_limit", "(", "limit", ")", "query_params", "[", "\"page\"", "]", "=", "_validate_page", "(", "page", ")", "query_params", "[", "\"seniority\"", "]", "=", "_validate_seniority", "(", "seniority", ")", "query_params", "[", "\"sort_by\"", "]", "=", "_validate_sort_by", "(", "sort_by", ")", "query_params", "[", "\"source_ids\"", "]", "=", "json", ".", "dumps", "(", "_validate_source_ids", "(", "source_ids", ")", ")", "query_params", "[", "\"stage\"", "]", "=", "_validate_stage", "(", "stage", ")", "query_params", "[", "\"order_by\"", "]", "=", "order_by", "response", "=", "self", ".", "client", ".", "get", "(", "\"profiles\"", ",", "query_params", ")", "return", "response", ".", "json", "(", ")" ]
Retreive all profiles that match the query param. Args: date_end: <string> REQUIRED (default to timestamp of now) profiles' last date of reception date_start: <string> REQUIRED (default to "1494539999") profiles' first date of reception filter_id: <string> limit: <int> (default to 30) number of fetched profiles/page page: <int> REQUIRED default to 1 number of the page associated to the pagination seniority: <string> defaut to "all" profiles' seniority ("all", "senior", "junior") sort_by: <string> source_ids: <array of strings> REQUIRED stage: <string> Returns Retrieve the profiles data as <dict>
[ "Retreive", "all", "profiles", "that", "match", "the", "query", "param", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L68-L110
train
Riminder/python-riminder-api
riminder/profile.py
Profile.add
def add(self, source_id=None, file_path=None, profile_reference="", timestamp_reception=None, training_metadata=[]): """ Add a profile resume to a sourced id. Args: source_id: <string> source id file_path: <string> local path to resume file profile_reference: <string> (default to "") reference to assign to the profile timestamp_reception: <string> original date of the application of the profile Returns Response that contains code 201 if successful Other status codes otherwise. """ data = {} data["source_id"] = _validate_source_id(source_id) data["profile_reference"] = _validate_profile_reference(profile_reference) data["timestamp_reception"] = _validate_timestamp(timestamp_reception, "timestamp_reception") data["training_metadata"] = _validate_training_metadata(training_metadata) files = _get_file_metadata(file_path, profile_reference) response = None with open(file_path, 'rb') as in_file: files = (files[0], in_file, files[2]) response = self.client.post("profile", data=data, files={"file": files}) return response.json()
python
def add(self, source_id=None, file_path=None, profile_reference="", timestamp_reception=None, training_metadata=[]): """ Add a profile resume to a sourced id. Args: source_id: <string> source id file_path: <string> local path to resume file profile_reference: <string> (default to "") reference to assign to the profile timestamp_reception: <string> original date of the application of the profile Returns Response that contains code 201 if successful Other status codes otherwise. """ data = {} data["source_id"] = _validate_source_id(source_id) data["profile_reference"] = _validate_profile_reference(profile_reference) data["timestamp_reception"] = _validate_timestamp(timestamp_reception, "timestamp_reception") data["training_metadata"] = _validate_training_metadata(training_metadata) files = _get_file_metadata(file_path, profile_reference) response = None with open(file_path, 'rb') as in_file: files = (files[0], in_file, files[2]) response = self.client.post("profile", data=data, files={"file": files}) return response.json()
[ "def", "add", "(", "self", ",", "source_id", "=", "None", ",", "file_path", "=", "None", ",", "profile_reference", "=", "\"\"", ",", "timestamp_reception", "=", "None", ",", "training_metadata", "=", "[", "]", ")", ":", "data", "=", "{", "}", "data", "[", "\"source_id\"", "]", "=", "_validate_source_id", "(", "source_id", ")", "data", "[", "\"profile_reference\"", "]", "=", "_validate_profile_reference", "(", "profile_reference", ")", "data", "[", "\"timestamp_reception\"", "]", "=", "_validate_timestamp", "(", "timestamp_reception", ",", "\"timestamp_reception\"", ")", "data", "[", "\"training_metadata\"", "]", "=", "_validate_training_metadata", "(", "training_metadata", ")", "files", "=", "_get_file_metadata", "(", "file_path", ",", "profile_reference", ")", "response", "=", "None", "with", "open", "(", "file_path", ",", "'rb'", ")", "as", "in_file", ":", "files", "=", "(", "files", "[", "0", "]", ",", "in_file", ",", "files", "[", "2", "]", ")", "response", "=", "self", ".", "client", ".", "post", "(", "\"profile\"", ",", "data", "=", "data", ",", "files", "=", "{", "\"file\"", ":", "files", "}", ")", "return", "response", ".", "json", "(", ")" ]
Add a profile resume to a sourced id. Args: source_id: <string> source id file_path: <string> local path to resume file profile_reference: <string> (default to "") reference to assign to the profile timestamp_reception: <string> original date of the application of the profile Returns Response that contains code 201 if successful Other status codes otherwise.
[ "Add", "a", "profile", "resume", "to", "a", "sourced", "id", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L112-L142
train
Riminder/python-riminder-api
riminder/profile.py
Profile.addList
def addList(self, source_id, dir_path, is_recurcive=False, timestamp_reception=None, training_metadata=[]): """Add all profile from a given directory.""" if not path.isdir(dir_path): raise ValueError(dir_path + ' is not a directory') files_to_send = _get_files_from_dir(dir_path, is_recurcive) succeed_upload = {} failed_upload = {} for file_path in files_to_send: try: resp = self.add(source_id=source_id, file_path=file_path, profile_reference="", timestamp_reception=timestamp_reception, training_metadata=training_metadata) if resp['code'] != 200 and resp['code'] != 201: failed_upload[file_path] = ValueError('Invalid response: ' + str(resp)) else: succeed_upload[file_path] = resp except BaseException as e: failed_upload[file_path] = e result = { 'success': succeed_upload, 'fail': failed_upload } return result
python
def addList(self, source_id, dir_path, is_recurcive=False, timestamp_reception=None, training_metadata=[]): """Add all profile from a given directory.""" if not path.isdir(dir_path): raise ValueError(dir_path + ' is not a directory') files_to_send = _get_files_from_dir(dir_path, is_recurcive) succeed_upload = {} failed_upload = {} for file_path in files_to_send: try: resp = self.add(source_id=source_id, file_path=file_path, profile_reference="", timestamp_reception=timestamp_reception, training_metadata=training_metadata) if resp['code'] != 200 and resp['code'] != 201: failed_upload[file_path] = ValueError('Invalid response: ' + str(resp)) else: succeed_upload[file_path] = resp except BaseException as e: failed_upload[file_path] = e result = { 'success': succeed_upload, 'fail': failed_upload } return result
[ "def", "addList", "(", "self", ",", "source_id", ",", "dir_path", ",", "is_recurcive", "=", "False", ",", "timestamp_reception", "=", "None", ",", "training_metadata", "=", "[", "]", ")", ":", "if", "not", "path", ".", "isdir", "(", "dir_path", ")", ":", "raise", "ValueError", "(", "dir_path", "+", "' is not a directory'", ")", "files_to_send", "=", "_get_files_from_dir", "(", "dir_path", ",", "is_recurcive", ")", "succeed_upload", "=", "{", "}", "failed_upload", "=", "{", "}", "for", "file_path", "in", "files_to_send", ":", "try", ":", "resp", "=", "self", ".", "add", "(", "source_id", "=", "source_id", ",", "file_path", "=", "file_path", ",", "profile_reference", "=", "\"\"", ",", "timestamp_reception", "=", "timestamp_reception", ",", "training_metadata", "=", "training_metadata", ")", "if", "resp", "[", "'code'", "]", "!=", "200", "and", "resp", "[", "'code'", "]", "!=", "201", ":", "failed_upload", "[", "file_path", "]", "=", "ValueError", "(", "'Invalid response: '", "+", "str", "(", "resp", ")", ")", "else", ":", "succeed_upload", "[", "file_path", "]", "=", "resp", "except", "BaseException", "as", "e", ":", "failed_upload", "[", "file_path", "]", "=", "e", "result", "=", "{", "'success'", ":", "succeed_upload", ",", "'fail'", ":", "failed_upload", "}", "return", "result" ]
Add all profile from a given directory.
[ "Add", "all", "profile", "from", "a", "given", "directory", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L144-L166
train
Riminder/python-riminder-api
riminder/profile.py
Profile.get
def get(self, source_id=None, profile_id=None, profile_reference=None): """ Retrieve the profile information associated with profile id. Args: source_id: <string> source id profile_id: <string> profile id Returns profile information """ query_params = {} query_params["source_id"] = _validate_source_id(source_id) if profile_id: query_params["profile_id"] = _validate_profile_id(profile_id) if profile_reference: query_params["profile_reference"] = _validate_profile_reference(profile_reference) response = self.client.get('profile', query_params) return response.json()
python
def get(self, source_id=None, profile_id=None, profile_reference=None): """ Retrieve the profile information associated with profile id. Args: source_id: <string> source id profile_id: <string> profile id Returns profile information """ query_params = {} query_params["source_id"] = _validate_source_id(source_id) if profile_id: query_params["profile_id"] = _validate_profile_id(profile_id) if profile_reference: query_params["profile_reference"] = _validate_profile_reference(profile_reference) response = self.client.get('profile', query_params) return response.json()
[ "def", "get", "(", "self", ",", "source_id", "=", "None", ",", "profile_id", "=", "None", ",", "profile_reference", "=", "None", ")", ":", "query_params", "=", "{", "}", "query_params", "[", "\"source_id\"", "]", "=", "_validate_source_id", "(", "source_id", ")", "if", "profile_id", ":", "query_params", "[", "\"profile_id\"", "]", "=", "_validate_profile_id", "(", "profile_id", ")", "if", "profile_reference", ":", "query_params", "[", "\"profile_reference\"", "]", "=", "_validate_profile_reference", "(", "profile_reference", ")", "response", "=", "self", ".", "client", ".", "get", "(", "'profile'", ",", "query_params", ")", "return", "response", ".", "json", "(", ")" ]
Retrieve the profile information associated with profile id. Args: source_id: <string> source id profile_id: <string> profile id Returns profile information
[ "Retrieve", "the", "profile", "information", "associated", "with", "profile", "id", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L168-L189
train
Riminder/python-riminder-api
riminder/profile.py
ProfileStage.set
def set(self, source_id=None, profile_id=None, filter_id=None, stage=None, profile_reference=None, filter_reference=None): """ Edit the profile stage given a filter. Args: profile_id: <string> profile id body params: source_id: <string> source id associated to the profile filter_id: <string> filter id stage: <string> profiles' stage associated to the filter ( null for all, NEW, YES, LATER or NO). Returns Response that contains code 201 if successful Other status codes otherwise. """ data = {} data["source_id"] = _validate_source_id(source_id) if profile_id: data["profile_id"] = _validate_profile_id(profile_id) if filter_id: data["filter_id"] = _validate_filter_id(filter_id) if profile_reference: data["profile_reference"] = _validate_profile_reference(profile_reference) if filter_reference: data["filter_reference"] = _validate_filter_reference(filter_reference) data["stage"] = _validate_stage(stage) response = self.client.patch('profile/stage', data=data) return response.json()
python
def set(self, source_id=None, profile_id=None, filter_id=None, stage=None, profile_reference=None, filter_reference=None): """ Edit the profile stage given a filter. Args: profile_id: <string> profile id body params: source_id: <string> source id associated to the profile filter_id: <string> filter id stage: <string> profiles' stage associated to the filter ( null for all, NEW, YES, LATER or NO). Returns Response that contains code 201 if successful Other status codes otherwise. """ data = {} data["source_id"] = _validate_source_id(source_id) if profile_id: data["profile_id"] = _validate_profile_id(profile_id) if filter_id: data["filter_id"] = _validate_filter_id(filter_id) if profile_reference: data["profile_reference"] = _validate_profile_reference(profile_reference) if filter_reference: data["filter_reference"] = _validate_filter_reference(filter_reference) data["stage"] = _validate_stage(stage) response = self.client.patch('profile/stage', data=data) return response.json()
[ "def", "set", "(", "self", ",", "source_id", "=", "None", ",", "profile_id", "=", "None", ",", "filter_id", "=", "None", ",", "stage", "=", "None", ",", "profile_reference", "=", "None", ",", "filter_reference", "=", "None", ")", ":", "data", "=", "{", "}", "data", "[", "\"source_id\"", "]", "=", "_validate_source_id", "(", "source_id", ")", "if", "profile_id", ":", "data", "[", "\"profile_id\"", "]", "=", "_validate_profile_id", "(", "profile_id", ")", "if", "filter_id", ":", "data", "[", "\"filter_id\"", "]", "=", "_validate_filter_id", "(", "filter_id", ")", "if", "profile_reference", ":", "data", "[", "\"profile_reference\"", "]", "=", "_validate_profile_reference", "(", "profile_reference", ")", "if", "filter_reference", ":", "data", "[", "\"filter_reference\"", "]", "=", "_validate_filter_reference", "(", "filter_reference", ")", "data", "[", "\"stage\"", "]", "=", "_validate_stage", "(", "stage", ")", "response", "=", "self", ".", "client", ".", "patch", "(", "'profile/stage'", ",", "data", "=", "data", ")", "return", "response", ".", "json", "(", ")" ]
Edit the profile stage given a filter. Args: profile_id: <string> profile id body params: source_id: <string> source id associated to the profile filter_id: <string> filter id stage: <string> profiles' stage associated to the filter ( null for all, NEW, YES, LATER or NO). Returns Response that contains code 201 if successful Other status codes otherwise.
[ "Edit", "the", "profile", "stage", "given", "a", "filter", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L292-L326
train
Riminder/python-riminder-api
riminder/profile.py
ProfileRevealing.get
def get(self, source_id=None, profile_id=None, profile_reference=None, filter_id=None, filter_reference=None): """ Retrieve the interpretability information. Args: source_id: <string> source id profile_id: <string> profile id filter_id: <string> filter id Returns interpretability information """ query_params = {} query_params["source_id"] = _validate_source_id(source_id) if profile_id: query_params["profile_id"] = _validate_profile_id(profile_id) if profile_reference: query_params["profile_reference"] = _validate_profile_reference(profile_reference) if filter_id: query_params["filter_id"] = _validate_filter_id(filter_id) if filter_reference: query_params["filter_reference"] = _validate_filter_reference(filter_reference) response = self.client.get('profile/revealing', query_params) return response
python
def get(self, source_id=None, profile_id=None, profile_reference=None, filter_id=None, filter_reference=None): """ Retrieve the interpretability information. Args: source_id: <string> source id profile_id: <string> profile id filter_id: <string> filter id Returns interpretability information """ query_params = {} query_params["source_id"] = _validate_source_id(source_id) if profile_id: query_params["profile_id"] = _validate_profile_id(profile_id) if profile_reference: query_params["profile_reference"] = _validate_profile_reference(profile_reference) if filter_id: query_params["filter_id"] = _validate_filter_id(filter_id) if filter_reference: query_params["filter_reference"] = _validate_filter_reference(filter_reference) response = self.client.get('profile/revealing', query_params) return response
[ "def", "get", "(", "self", ",", "source_id", "=", "None", ",", "profile_id", "=", "None", ",", "profile_reference", "=", "None", ",", "filter_id", "=", "None", ",", "filter_reference", "=", "None", ")", ":", "query_params", "=", "{", "}", "query_params", "[", "\"source_id\"", "]", "=", "_validate_source_id", "(", "source_id", ")", "if", "profile_id", ":", "query_params", "[", "\"profile_id\"", "]", "=", "_validate_profile_id", "(", "profile_id", ")", "if", "profile_reference", ":", "query_params", "[", "\"profile_reference\"", "]", "=", "_validate_profile_reference", "(", "profile_reference", ")", "if", "filter_id", ":", "query_params", "[", "\"filter_id\"", "]", "=", "_validate_filter_id", "(", "filter_id", ")", "if", "filter_reference", ":", "query_params", "[", "\"filter_reference\"", "]", "=", "_validate_filter_reference", "(", "filter_reference", ")", "response", "=", "self", ".", "client", ".", "get", "(", "'profile/revealing'", ",", "query_params", ")", "return", "response" ]
Retrieve the interpretability information. Args: source_id: <string> source id profile_id: <string> profile id filter_id: <string> filter id Returns interpretability information
[ "Retrieve", "the", "interpretability", "information", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L380-L407
train
Riminder/python-riminder-api
riminder/profile.py
ProfileJson.check
def check(self, profile_data, training_metadata=[]): """Use the api to check weither the profile_data are valid.""" data = { "profile_json": _validate_dict(profile_data, "profile_data"), "training_metadata": _validate_training_metadata(training_metadata), } response = self.client.post("profile/json/check", data=data) return response.json()
python
def check(self, profile_data, training_metadata=[]): """Use the api to check weither the profile_data are valid.""" data = { "profile_json": _validate_dict(profile_data, "profile_data"), "training_metadata": _validate_training_metadata(training_metadata), } response = self.client.post("profile/json/check", data=data) return response.json()
[ "def", "check", "(", "self", ",", "profile_data", ",", "training_metadata", "=", "[", "]", ")", ":", "data", "=", "{", "\"profile_json\"", ":", "_validate_dict", "(", "profile_data", ",", "\"profile_data\"", ")", ",", "\"training_metadata\"", ":", "_validate_training_metadata", "(", "training_metadata", ")", ",", "}", "response", "=", "self", ".", "client", ".", "post", "(", "\"profile/json/check\"", ",", "data", "=", "data", ")", "return", "response", ".", "json", "(", ")" ]
Use the api to check weither the profile_data are valid.
[ "Use", "the", "api", "to", "check", "weither", "the", "profile_data", "are", "valid", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L417-L424
train
Riminder/python-riminder-api
riminder/profile.py
ProfileJson.add
def add(self, source_id, profile_data, training_metadata=[], profile_reference=None, timestamp_reception=None): """Use the api to add a new profile using profile_data.""" data = { "source_id": _validate_source_id(source_id), "profile_json": _validate_dict(profile_data, "profile_data"), "training_metadata": _validate_training_metadata(training_metadata), "profile_reference": profile_reference } # some enrichement for profile_json if timestamp_reception is not None: data['timestamp_reception'] = _validate_timestamp(timestamp_reception, 'timestamp_reception') response = self.client.post("profile/json", data=data) return response.json()
python
def add(self, source_id, profile_data, training_metadata=[], profile_reference=None, timestamp_reception=None): """Use the api to add a new profile using profile_data.""" data = { "source_id": _validate_source_id(source_id), "profile_json": _validate_dict(profile_data, "profile_data"), "training_metadata": _validate_training_metadata(training_metadata), "profile_reference": profile_reference } # some enrichement for profile_json if timestamp_reception is not None: data['timestamp_reception'] = _validate_timestamp(timestamp_reception, 'timestamp_reception') response = self.client.post("profile/json", data=data) return response.json()
[ "def", "add", "(", "self", ",", "source_id", ",", "profile_data", ",", "training_metadata", "=", "[", "]", ",", "profile_reference", "=", "None", ",", "timestamp_reception", "=", "None", ")", ":", "data", "=", "{", "\"source_id\"", ":", "_validate_source_id", "(", "source_id", ")", ",", "\"profile_json\"", ":", "_validate_dict", "(", "profile_data", ",", "\"profile_data\"", ")", ",", "\"training_metadata\"", ":", "_validate_training_metadata", "(", "training_metadata", ")", ",", "\"profile_reference\"", ":", "profile_reference", "}", "# some enrichement for profile_json", "if", "timestamp_reception", "is", "not", "None", ":", "data", "[", "'timestamp_reception'", "]", "=", "_validate_timestamp", "(", "timestamp_reception", ",", "'timestamp_reception'", ")", "response", "=", "self", ".", "client", ".", "post", "(", "\"profile/json\"", ",", "data", "=", "data", ")", "return", "response", ".", "json", "(", ")" ]
Use the api to add a new profile using profile_data.
[ "Use", "the", "api", "to", "add", "a", "new", "profile", "using", "profile_data", "." ]
01279f0ece08cf3d1dd45f76de6d9edf7fafec90
https://github.com/Riminder/python-riminder-api/blob/01279f0ece08cf3d1dd45f76de6d9edf7fafec90/riminder/profile.py#L426-L440
train
kwlzn/blast
blast/scanner.py
DirScanner.md5sum
def md5sum(self, f): ''' md5sums a file, returning the hex digest Parameters: - f filename string ''' m = hashlib.md5() fh = open(f, 'r') while 1: chunk = fh.read(BUF_SIZE) if not chunk: break m.update(chunk) fh.close() return m.hexdigest()
python
def md5sum(self, f): ''' md5sums a file, returning the hex digest Parameters: - f filename string ''' m = hashlib.md5() fh = open(f, 'r') while 1: chunk = fh.read(BUF_SIZE) if not chunk: break m.update(chunk) fh.close() return m.hexdigest()
[ "def", "md5sum", "(", "self", ",", "f", ")", ":", "m", "=", "hashlib", ".", "md5", "(", ")", "fh", "=", "open", "(", "f", ",", "'r'", ")", "while", "1", ":", "chunk", "=", "fh", ".", "read", "(", "BUF_SIZE", ")", "if", "not", "chunk", ":", "break", "m", ".", "update", "(", "chunk", ")", "fh", ".", "close", "(", ")", "return", "m", ".", "hexdigest", "(", ")" ]
md5sums a file, returning the hex digest Parameters: - f filename string
[ "md5sums", "a", "file", "returning", "the", "hex", "digest" ]
ae18a19182a6884c453bf9b2a3c6386bd3b2655a
https://github.com/kwlzn/blast/blob/ae18a19182a6884c453bf9b2a3c6386bd3b2655a/blast/scanner.py#L16-L29
train
kwlzn/blast
blast/scanner.py
DirScanner.iterdupes
def iterdupes(self, compare=None, filt=None): ''' streaming item iterator with low overhead duplicate file detection Parameters: - compare compare function between files (defaults to md5sum) ''' if not compare: compare = self.md5sum seen_siz = {} ## store size -> first seen filename seen_sum = {} ## store chksum -> first seen filename size_func = lambda x: os.stat(x).st_size for (fsize, f) in self.iteritems(want_dirs=False, func=size_func, filt=filt): if fsize not in seen_siz: ## state 1: no previous size collisions seen_siz[fsize] = f continue else: if seen_siz[fsize]: ## state 2: defined key => str (initial, unscanned path) chksum = compare(seen_siz[fsize]) if chksum in seen_sum: yield (chksum, seen_siz[fsize]) else: seen_sum[chksum] = seen_siz[fsize] seen_siz[fsize] = None ## state 3: defined key => None (already scanned path, no-op) chksum = compare(f) if chksum in seen_sum: ## if it's a dupe, check if the first one was ever yielded then yield if seen_sum[chksum]: yield (chksum, seen_sum[chksum]) seen_sum[chksum] = None yield (chksum, f) else: ## if not, set the initial filename seen_sum[chksum] = f
python
def iterdupes(self, compare=None, filt=None): ''' streaming item iterator with low overhead duplicate file detection Parameters: - compare compare function between files (defaults to md5sum) ''' if not compare: compare = self.md5sum seen_siz = {} ## store size -> first seen filename seen_sum = {} ## store chksum -> first seen filename size_func = lambda x: os.stat(x).st_size for (fsize, f) in self.iteritems(want_dirs=False, func=size_func, filt=filt): if fsize not in seen_siz: ## state 1: no previous size collisions seen_siz[fsize] = f continue else: if seen_siz[fsize]: ## state 2: defined key => str (initial, unscanned path) chksum = compare(seen_siz[fsize]) if chksum in seen_sum: yield (chksum, seen_siz[fsize]) else: seen_sum[chksum] = seen_siz[fsize] seen_siz[fsize] = None ## state 3: defined key => None (already scanned path, no-op) chksum = compare(f) if chksum in seen_sum: ## if it's a dupe, check if the first one was ever yielded then yield if seen_sum[chksum]: yield (chksum, seen_sum[chksum]) seen_sum[chksum] = None yield (chksum, f) else: ## if not, set the initial filename seen_sum[chksum] = f
[ "def", "iterdupes", "(", "self", ",", "compare", "=", "None", ",", "filt", "=", "None", ")", ":", "if", "not", "compare", ":", "compare", "=", "self", ".", "md5sum", "seen_siz", "=", "{", "}", "## store size -> first seen filename", "seen_sum", "=", "{", "}", "## store chksum -> first seen filename", "size_func", "=", "lambda", "x", ":", "os", ".", "stat", "(", "x", ")", ".", "st_size", "for", "(", "fsize", ",", "f", ")", "in", "self", ".", "iteritems", "(", "want_dirs", "=", "False", ",", "func", "=", "size_func", ",", "filt", "=", "filt", ")", ":", "if", "fsize", "not", "in", "seen_siz", ":", "## state 1: no previous size collisions", "seen_siz", "[", "fsize", "]", "=", "f", "continue", "else", ":", "if", "seen_siz", "[", "fsize", "]", ":", "## state 2: defined key => str (initial, unscanned path)", "chksum", "=", "compare", "(", "seen_siz", "[", "fsize", "]", ")", "if", "chksum", "in", "seen_sum", ":", "yield", "(", "chksum", ",", "seen_siz", "[", "fsize", "]", ")", "else", ":", "seen_sum", "[", "chksum", "]", "=", "seen_siz", "[", "fsize", "]", "seen_siz", "[", "fsize", "]", "=", "None", "## state 3: defined key => None (already scanned path, no-op)", "chksum", "=", "compare", "(", "f", ")", "if", "chksum", "in", "seen_sum", ":", "## if it's a dupe, check if the first one was ever yielded then yield", "if", "seen_sum", "[", "chksum", "]", ":", "yield", "(", "chksum", ",", "seen_sum", "[", "chksum", "]", ")", "seen_sum", "[", "chksum", "]", "=", "None", "yield", "(", "chksum", ",", "f", ")", "else", ":", "## if not, set the initial filename", "seen_sum", "[", "chksum", "]", "=", "f" ]
streaming item iterator with low overhead duplicate file detection Parameters: - compare compare function between files (defaults to md5sum)
[ "streaming", "item", "iterator", "with", "low", "overhead", "duplicate", "file", "detection" ]
ae18a19182a6884c453bf9b2a3c6386bd3b2655a
https://github.com/kwlzn/blast/blob/ae18a19182a6884c453bf9b2a3c6386bd3b2655a/blast/scanner.py#L71-L102
train
gebn/wood
wood/integrations/s3.py
objects_to_root
def objects_to_root(objects: List) -> Root: """ Convert a list of s3 ObjectSummaries into a directory tree. :param objects: The list of objects, e.g. the result of calling `.objects.all()` on a bucket. :return: The tree structure, contained within a root node. """ def _to_tree(objs: Iterable) -> Dict: """ Build a tree structure from a flat list of objects. :param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a bucket listing. :return: The listing as a nested dictionary where keys are directory and file names. The values of directories will in turn be a dict. The values of keys representing files will be the `ObjectSummary` instance. """ path_tree = {} for obj in objs: is_dir = obj.key.endswith('/') chunks = [chunk for chunk in obj.key.split('/') if chunk] chunk_count = len(chunks) tmp = path_tree for i, chunk in enumerate(chunks): is_last_chunk = i == chunk_count - 1 if is_last_chunk and not is_dir: tmp[chunk] = obj else: # must be a directory if chunk not in tmp: # it doesn't exist - create it tmp[chunk] = {} tmp = tmp[chunk] return path_tree def _to_entity(key: str, value: Union[Dict, Any]) -> Entity: """ Turn a nested dictionary representing an S3 bucket into the correct `Entity` object. :param key: The name of the entity. :param value: If the entity is a directory, the nested dict representing its contents. Otherwise, the `ObjectSummary` instance representing the file. :return: The entity representing the entity name and value pair. """ if isinstance(value, dict): return Directory( key, {key_: _to_entity(key_, value_) for key_, value_ in value.items()}) return File(pathlib.PurePath(value.key).name, value.size, value.e_tag.strip('"')) tree = _to_tree(objects) return Root({pathlib.PurePath(key).name: _to_entity(key, value) for key, value in tree.items()})
python
def objects_to_root(objects: List) -> Root: """ Convert a list of s3 ObjectSummaries into a directory tree. :param objects: The list of objects, e.g. the result of calling `.objects.all()` on a bucket. :return: The tree structure, contained within a root node. """ def _to_tree(objs: Iterable) -> Dict: """ Build a tree structure from a flat list of objects. :param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a bucket listing. :return: The listing as a nested dictionary where keys are directory and file names. The values of directories will in turn be a dict. The values of keys representing files will be the `ObjectSummary` instance. """ path_tree = {} for obj in objs: is_dir = obj.key.endswith('/') chunks = [chunk for chunk in obj.key.split('/') if chunk] chunk_count = len(chunks) tmp = path_tree for i, chunk in enumerate(chunks): is_last_chunk = i == chunk_count - 1 if is_last_chunk and not is_dir: tmp[chunk] = obj else: # must be a directory if chunk not in tmp: # it doesn't exist - create it tmp[chunk] = {} tmp = tmp[chunk] return path_tree def _to_entity(key: str, value: Union[Dict, Any]) -> Entity: """ Turn a nested dictionary representing an S3 bucket into the correct `Entity` object. :param key: The name of the entity. :param value: If the entity is a directory, the nested dict representing its contents. Otherwise, the `ObjectSummary` instance representing the file. :return: The entity representing the entity name and value pair. """ if isinstance(value, dict): return Directory( key, {key_: _to_entity(key_, value_) for key_, value_ in value.items()}) return File(pathlib.PurePath(value.key).name, value.size, value.e_tag.strip('"')) tree = _to_tree(objects) return Root({pathlib.PurePath(key).name: _to_entity(key, value) for key, value in tree.items()})
[ "def", "objects_to_root", "(", "objects", ":", "List", ")", "->", "Root", ":", "def", "_to_tree", "(", "objs", ":", "Iterable", ")", "->", "Dict", ":", "\"\"\"\n Build a tree structure from a flat list of objects.\n\n :param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a\n bucket listing.\n :return: The listing as a nested dictionary where keys are directory\n and file names. The values of directories will in turn be a\n dict. The values of keys representing files will be the\n `ObjectSummary` instance.\n \"\"\"", "path_tree", "=", "{", "}", "for", "obj", "in", "objs", ":", "is_dir", "=", "obj", ".", "key", ".", "endswith", "(", "'/'", ")", "chunks", "=", "[", "chunk", "for", "chunk", "in", "obj", ".", "key", ".", "split", "(", "'/'", ")", "if", "chunk", "]", "chunk_count", "=", "len", "(", "chunks", ")", "tmp", "=", "path_tree", "for", "i", ",", "chunk", "in", "enumerate", "(", "chunks", ")", ":", "is_last_chunk", "=", "i", "==", "chunk_count", "-", "1", "if", "is_last_chunk", "and", "not", "is_dir", ":", "tmp", "[", "chunk", "]", "=", "obj", "else", ":", "# must be a directory", "if", "chunk", "not", "in", "tmp", ":", "# it doesn't exist - create it", "tmp", "[", "chunk", "]", "=", "{", "}", "tmp", "=", "tmp", "[", "chunk", "]", "return", "path_tree", "def", "_to_entity", "(", "key", ":", "str", ",", "value", ":", "Union", "[", "Dict", ",", "Any", "]", ")", "->", "Entity", ":", "\"\"\"\n Turn a nested dictionary representing an S3 bucket into the correct\n `Entity` object.\n\n :param key: The name of the entity.\n :param value: If the entity is a directory, the nested dict\n representing its contents. Otherwise, the `ObjectSummary`\n instance representing the file.\n :return: The entity representing the entity name and value pair.\n \"\"\"", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "return", "Directory", "(", "key", ",", "{", "key_", ":", "_to_entity", "(", "key_", ",", "value_", ")", "for", "key_", ",", "value_", "in", "value", ".", "items", "(", ")", "}", ")", "return", "File", "(", "pathlib", ".", "PurePath", "(", "value", ".", "key", ")", ".", "name", ",", "value", ".", "size", ",", "value", ".", "e_tag", ".", "strip", "(", "'\"'", ")", ")", "tree", "=", "_to_tree", "(", "objects", ")", "return", "Root", "(", "{", "pathlib", ".", "PurePath", "(", "key", ")", ".", "name", ":", "_to_entity", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "tree", ".", "items", "(", ")", "}", ")" ]
Convert a list of s3 ObjectSummaries into a directory tree. :param objects: The list of objects, e.g. the result of calling `.objects.all()` on a bucket. :return: The tree structure, contained within a root node.
[ "Convert", "a", "list", "of", "s3", "ObjectSummaries", "into", "a", "directory", "tree", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/integrations/s3.py#L16-L77
train
gebn/wood
wood/integrations/s3.py
Syncer._delete
def _delete(self, paths: Iterable[str]) -> None: """ Delete a collection of paths from S3. :param paths: The paths to delete. The prefix will be prepended to each one. :raises ClientError: If any request fails. """ for chunk in util.chunk(paths, self._MAX_DELETES_PER_REQUEST): keys = list([self._prefix + key for key in chunk]) logger.info('Deleting %d objects (%s)', len(keys), ', '.join(keys)) response = self._bucket.delete_objects(Delete={ 'Objects': [{'Key': key} for key in keys], 'Quiet': True }) logger.debug('Delete objects response: %s', response)
python
def _delete(self, paths: Iterable[str]) -> None: """ Delete a collection of paths from S3. :param paths: The paths to delete. The prefix will be prepended to each one. :raises ClientError: If any request fails. """ for chunk in util.chunk(paths, self._MAX_DELETES_PER_REQUEST): keys = list([self._prefix + key for key in chunk]) logger.info('Deleting %d objects (%s)', len(keys), ', '.join(keys)) response = self._bucket.delete_objects(Delete={ 'Objects': [{'Key': key} for key in keys], 'Quiet': True }) logger.debug('Delete objects response: %s', response)
[ "def", "_delete", "(", "self", ",", "paths", ":", "Iterable", "[", "str", "]", ")", "->", "None", ":", "for", "chunk", "in", "util", ".", "chunk", "(", "paths", ",", "self", ".", "_MAX_DELETES_PER_REQUEST", ")", ":", "keys", "=", "list", "(", "[", "self", ".", "_prefix", "+", "key", "for", "key", "in", "chunk", "]", ")", "logger", ".", "info", "(", "'Deleting %d objects (%s)'", ",", "len", "(", "keys", ")", ",", "', '", ".", "join", "(", "keys", ")", ")", "response", "=", "self", ".", "_bucket", ".", "delete_objects", "(", "Delete", "=", "{", "'Objects'", ":", "[", "{", "'Key'", ":", "key", "}", "for", "key", "in", "keys", "]", ",", "'Quiet'", ":", "True", "}", ")", "logger", ".", "debug", "(", "'Delete objects response: %s'", ",", "response", ")" ]
Delete a collection of paths from S3. :param paths: The paths to delete. The prefix will be prepended to each one. :raises ClientError: If any request fails.
[ "Delete", "a", "collection", "of", "paths", "from", "S3", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/integrations/s3.py#L101-L116
train
gebn/wood
wood/integrations/s3.py
Syncer._upload
def _upload(self, items: Iterable[Tuple[str, str]]) -> None: """ Upload a collection of paths to S3. :param items: An iterable of pairs containing the local path of the file to upload, and the remote path to upload it to. The prefix will be appended to each remote path. """ for src, key in items: logger.info(f'Uploading {src} to {key}') mimetype, _ = mimetypes.guess_type(src) if mimetype is None: logger.warning(f'Could not guess MIME type for {src}') mimetype = 'application/octet-stream' logger.debug(f'Deduced MIME type: {mimetype}') self._bucket.upload_file(src, key, ExtraArgs={ 'ContentType': mimetype })
python
def _upload(self, items: Iterable[Tuple[str, str]]) -> None: """ Upload a collection of paths to S3. :param items: An iterable of pairs containing the local path of the file to upload, and the remote path to upload it to. The prefix will be appended to each remote path. """ for src, key in items: logger.info(f'Uploading {src} to {key}') mimetype, _ = mimetypes.guess_type(src) if mimetype is None: logger.warning(f'Could not guess MIME type for {src}') mimetype = 'application/octet-stream' logger.debug(f'Deduced MIME type: {mimetype}') self._bucket.upload_file(src, key, ExtraArgs={ 'ContentType': mimetype })
[ "def", "_upload", "(", "self", ",", "items", ":", "Iterable", "[", "Tuple", "[", "str", ",", "str", "]", "]", ")", "->", "None", ":", "for", "src", ",", "key", "in", "items", ":", "logger", ".", "info", "(", "f'Uploading {src} to {key}'", ")", "mimetype", ",", "_", "=", "mimetypes", ".", "guess_type", "(", "src", ")", "if", "mimetype", "is", "None", ":", "logger", ".", "warning", "(", "f'Could not guess MIME type for {src}'", ")", "mimetype", "=", "'application/octet-stream'", "logger", ".", "debug", "(", "f'Deduced MIME type: {mimetype}'", ")", "self", ".", "_bucket", ".", "upload_file", "(", "src", ",", "key", ",", "ExtraArgs", "=", "{", "'ContentType'", ":", "mimetype", "}", ")" ]
Upload a collection of paths to S3. :param items: An iterable of pairs containing the local path of the file to upload, and the remote path to upload it to. The prefix will be appended to each remote path.
[ "Upload", "a", "collection", "of", "paths", "to", "S3", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/integrations/s3.py#L118-L136
train
geophysics-ubonn/crtomo_tools
src/grid_rotate.py
rotmat
def rotmat(alpha): """Rotate around z-axis """ R = np.array(((np.cos(alpha), -np.sin(alpha)), (np.sin(alpha), np.cos(alpha)))) return R
python
def rotmat(alpha): """Rotate around z-axis """ R = np.array(((np.cos(alpha), -np.sin(alpha)), (np.sin(alpha), np.cos(alpha)))) return R
[ "def", "rotmat", "(", "alpha", ")", ":", "R", "=", "np", ".", "array", "(", "(", "(", "np", ".", "cos", "(", "alpha", ")", ",", "-", "np", ".", "sin", "(", "alpha", ")", ")", ",", "(", "np", ".", "sin", "(", "alpha", ")", ",", "np", ".", "cos", "(", "alpha", ")", ")", ")", ")", "return", "R" ]
Rotate around z-axis
[ "Rotate", "around", "z", "-", "axis" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/grid_rotate.py#L37-L43
train
edx/edx-celeryutils
celery_utils/logged_task.py
LoggedTask.apply_async
def apply_async(self, args=None, kwargs=None, **options): # pylint: disable=arguments-differ """ Emit a log statement when the task is submitted. """ result = super(LoggedTask, self).apply_async(args=args, kwargs=kwargs, **options) log.info('Task {}[{}] submitted with arguments {}, {}'.format( self.name, result.id, args, kwargs )) return result
python
def apply_async(self, args=None, kwargs=None, **options): # pylint: disable=arguments-differ """ Emit a log statement when the task is submitted. """ result = super(LoggedTask, self).apply_async(args=args, kwargs=kwargs, **options) log.info('Task {}[{}] submitted with arguments {}, {}'.format( self.name, result.id, args, kwargs )) return result
[ "def", "apply_async", "(", "self", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "*", "*", "options", ")", ":", "# pylint: disable=arguments-differ", "result", "=", "super", "(", "LoggedTask", ",", "self", ")", ".", "apply_async", "(", "args", "=", "args", ",", "kwargs", "=", "kwargs", ",", "*", "*", "options", ")", "log", ".", "info", "(", "'Task {}[{}] submitted with arguments {}, {}'", ".", "format", "(", "self", ".", "name", ",", "result", ".", "id", ",", "args", ",", "kwargs", ")", ")", "return", "result" ]
Emit a log statement when the task is submitted.
[ "Emit", "a", "log", "statement", "when", "the", "task", "is", "submitted", "." ]
d8745f5f0929ad154fad779a19fbefe7f51e9498
https://github.com/edx/edx-celeryutils/blob/d8745f5f0929ad154fad779a19fbefe7f51e9498/celery_utils/logged_task.py#L22-L33
train
edx/edx-celeryutils
celery_utils/logged_task.py
LoggedTask.on_retry
def on_retry(self, exc, task_id, args, kwargs, einfo): """ Capture the exception that caused the task to be retried, if any. """ super(LoggedTask, self).on_retry(exc, task_id, args, kwargs, einfo) log.warning('[{}] retried due to {}'.format(task_id, getattr(einfo, 'traceback', None)))
python
def on_retry(self, exc, task_id, args, kwargs, einfo): """ Capture the exception that caused the task to be retried, if any. """ super(LoggedTask, self).on_retry(exc, task_id, args, kwargs, einfo) log.warning('[{}] retried due to {}'.format(task_id, getattr(einfo, 'traceback', None)))
[ "def", "on_retry", "(", "self", ",", "exc", ",", "task_id", ",", "args", ",", "kwargs", ",", "einfo", ")", ":", "super", "(", "LoggedTask", ",", "self", ")", ".", "on_retry", "(", "exc", ",", "task_id", ",", "args", ",", "kwargs", ",", "einfo", ")", "log", ".", "warning", "(", "'[{}] retried due to {}'", ".", "format", "(", "task_id", ",", "getattr", "(", "einfo", ",", "'traceback'", ",", "None", ")", ")", ")" ]
Capture the exception that caused the task to be retried, if any.
[ "Capture", "the", "exception", "that", "caused", "the", "task", "to", "be", "retried", "if", "any", "." ]
d8745f5f0929ad154fad779a19fbefe7f51e9498
https://github.com/edx/edx-celeryutils/blob/d8745f5f0929ad154fad779a19fbefe7f51e9498/celery_utils/logged_task.py#L35-L40
train
edx/edx-celeryutils
celery_utils/logged_task.py
LoggedTask.on_failure
def on_failure(self, exc, task_id, args, kwargs, einfo): """ Capture the exception that caused the task to fail, if any. """ log.error('[{}] failed due to {}'.format(task_id, getattr(einfo, 'traceback', None))) super(LoggedTask, self).on_failure(exc, task_id, args, kwargs, einfo)
python
def on_failure(self, exc, task_id, args, kwargs, einfo): """ Capture the exception that caused the task to fail, if any. """ log.error('[{}] failed due to {}'.format(task_id, getattr(einfo, 'traceback', None))) super(LoggedTask, self).on_failure(exc, task_id, args, kwargs, einfo)
[ "def", "on_failure", "(", "self", ",", "exc", ",", "task_id", ",", "args", ",", "kwargs", ",", "einfo", ")", ":", "log", ".", "error", "(", "'[{}] failed due to {}'", ".", "format", "(", "task_id", ",", "getattr", "(", "einfo", ",", "'traceback'", ",", "None", ")", ")", ")", "super", "(", "LoggedTask", ",", "self", ")", ".", "on_failure", "(", "exc", ",", "task_id", ",", "args", ",", "kwargs", ",", "einfo", ")" ]
Capture the exception that caused the task to fail, if any.
[ "Capture", "the", "exception", "that", "caused", "the", "task", "to", "fail", "if", "any", "." ]
d8745f5f0929ad154fad779a19fbefe7f51e9498
https://github.com/edx/edx-celeryutils/blob/d8745f5f0929ad154fad779a19fbefe7f51e9498/celery_utils/logged_task.py#L42-L47
train
hotzenklotz/pybeerxml
pybeerxml/parser.py
Parser.nodes_to_object
def nodes_to_object(self, node, object): "Map all child nodes to one object's attributes" for n in list(node): self.node_to_object(n, object)
python
def nodes_to_object(self, node, object): "Map all child nodes to one object's attributes" for n in list(node): self.node_to_object(n, object)
[ "def", "nodes_to_object", "(", "self", ",", "node", ",", "object", ")", ":", "for", "n", "in", "list", "(", "node", ")", ":", "self", ".", "node_to_object", "(", "n", ",", "object", ")" ]
Map all child nodes to one object's attributes
[ "Map", "all", "child", "nodes", "to", "one", "object", "s", "attributes" ]
e9cf8d6090b1e01e5bbb101e255792b134affbe0
https://github.com/hotzenklotz/pybeerxml/blob/e9cf8d6090b1e01e5bbb101e255792b134affbe0/pybeerxml/parser.py#L15-L19
train
hotzenklotz/pybeerxml
pybeerxml/parser.py
Parser.node_to_object
def node_to_object(self, node, object): "Map a single node to one object's attributes" attribute = self.to_lower(node.tag) # Yield is a protected keyword in Python, so let's rename it attribute = "_yield" if attribute == "yield" else attribute try: valueString = node.text or "" value = float(valueString) except ValueError: value = node.text try: setattr(object, attribute, value) except AttributeError(): sys.stderr.write("Attribute <%s> not supported." % attribute)
python
def node_to_object(self, node, object): "Map a single node to one object's attributes" attribute = self.to_lower(node.tag) # Yield is a protected keyword in Python, so let's rename it attribute = "_yield" if attribute == "yield" else attribute try: valueString = node.text or "" value = float(valueString) except ValueError: value = node.text try: setattr(object, attribute, value) except AttributeError(): sys.stderr.write("Attribute <%s> not supported." % attribute)
[ "def", "node_to_object", "(", "self", ",", "node", ",", "object", ")", ":", "attribute", "=", "self", ".", "to_lower", "(", "node", ".", "tag", ")", "# Yield is a protected keyword in Python, so let's rename it", "attribute", "=", "\"_yield\"", "if", "attribute", "==", "\"yield\"", "else", "attribute", "try", ":", "valueString", "=", "node", ".", "text", "or", "\"\"", "value", "=", "float", "(", "valueString", ")", "except", "ValueError", ":", "value", "=", "node", ".", "text", "try", ":", "setattr", "(", "object", ",", "attribute", ",", "value", ")", "except", "AttributeError", "(", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"Attribute <%s> not supported.\"", "%", "attribute", ")" ]
Map a single node to one object's attributes
[ "Map", "a", "single", "node", "to", "one", "object", "s", "attributes" ]
e9cf8d6090b1e01e5bbb101e255792b134affbe0
https://github.com/hotzenklotz/pybeerxml/blob/e9cf8d6090b1e01e5bbb101e255792b134affbe0/pybeerxml/parser.py#L21-L38
train
hotzenklotz/pybeerxml
pybeerxml/parser.py
Parser.parse
def parse(self, xml_file): "Get a list of parsed recipes from BeerXML input" recipes = [] with open(xml_file, "rt") as f: tree = ElementTree.parse(f) for recipeNode in tree.iter(): if self.to_lower(recipeNode.tag) != "recipe": continue recipe = Recipe() recipes.append(recipe) for recipeProperty in list(recipeNode): tag_name = self.to_lower(recipeProperty.tag) if tag_name == "fermentables": for fermentable_node in list(recipeProperty): fermentable = Fermentable() self.nodes_to_object(fermentable_node, fermentable) recipe.fermentables.append(fermentable) elif tag_name == "yeasts": for yeast_node in list(recipeProperty): yeast = Yeast() self.nodes_to_object(yeast_node, yeast) recipe.yeasts.append(yeast) elif tag_name == "hops": for hop_node in list(recipeProperty): hop = Hop() self.nodes_to_object(hop_node, hop) recipe.hops.append(hop) elif tag_name == "miscs": for misc_node in list(recipeProperty): misc = Misc() self.nodes_to_object(misc_node, misc) recipe.miscs.append(misc) elif tag_name == "style": style = Style() recipe.style = style self.nodes_to_object(recipeProperty, style) elif tag_name == "mash": for mash_node in list(recipeProperty): mash = Mash() recipe.mash = mash if self.to_lower(mash_node.tag) == "mash_steps": for mash_step_node in list(mash_node): mash_step = MashStep() self.nodes_to_object(mash_step_node, mash_step) mash.steps.append(mash_step) else: self.nodes_to_object(mash_node, mash) else: self.node_to_object(recipeProperty, recipe) return recipes
python
def parse(self, xml_file): "Get a list of parsed recipes from BeerXML input" recipes = [] with open(xml_file, "rt") as f: tree = ElementTree.parse(f) for recipeNode in tree.iter(): if self.to_lower(recipeNode.tag) != "recipe": continue recipe = Recipe() recipes.append(recipe) for recipeProperty in list(recipeNode): tag_name = self.to_lower(recipeProperty.tag) if tag_name == "fermentables": for fermentable_node in list(recipeProperty): fermentable = Fermentable() self.nodes_to_object(fermentable_node, fermentable) recipe.fermentables.append(fermentable) elif tag_name == "yeasts": for yeast_node in list(recipeProperty): yeast = Yeast() self.nodes_to_object(yeast_node, yeast) recipe.yeasts.append(yeast) elif tag_name == "hops": for hop_node in list(recipeProperty): hop = Hop() self.nodes_to_object(hop_node, hop) recipe.hops.append(hop) elif tag_name == "miscs": for misc_node in list(recipeProperty): misc = Misc() self.nodes_to_object(misc_node, misc) recipe.miscs.append(misc) elif tag_name == "style": style = Style() recipe.style = style self.nodes_to_object(recipeProperty, style) elif tag_name == "mash": for mash_node in list(recipeProperty): mash = Mash() recipe.mash = mash if self.to_lower(mash_node.tag) == "mash_steps": for mash_step_node in list(mash_node): mash_step = MashStep() self.nodes_to_object(mash_step_node, mash_step) mash.steps.append(mash_step) else: self.nodes_to_object(mash_node, mash) else: self.node_to_object(recipeProperty, recipe) return recipes
[ "def", "parse", "(", "self", ",", "xml_file", ")", ":", "recipes", "=", "[", "]", "with", "open", "(", "xml_file", ",", "\"rt\"", ")", "as", "f", ":", "tree", "=", "ElementTree", ".", "parse", "(", "f", ")", "for", "recipeNode", "in", "tree", ".", "iter", "(", ")", ":", "if", "self", ".", "to_lower", "(", "recipeNode", ".", "tag", ")", "!=", "\"recipe\"", ":", "continue", "recipe", "=", "Recipe", "(", ")", "recipes", ".", "append", "(", "recipe", ")", "for", "recipeProperty", "in", "list", "(", "recipeNode", ")", ":", "tag_name", "=", "self", ".", "to_lower", "(", "recipeProperty", ".", "tag", ")", "if", "tag_name", "==", "\"fermentables\"", ":", "for", "fermentable_node", "in", "list", "(", "recipeProperty", ")", ":", "fermentable", "=", "Fermentable", "(", ")", "self", ".", "nodes_to_object", "(", "fermentable_node", ",", "fermentable", ")", "recipe", ".", "fermentables", ".", "append", "(", "fermentable", ")", "elif", "tag_name", "==", "\"yeasts\"", ":", "for", "yeast_node", "in", "list", "(", "recipeProperty", ")", ":", "yeast", "=", "Yeast", "(", ")", "self", ".", "nodes_to_object", "(", "yeast_node", ",", "yeast", ")", "recipe", ".", "yeasts", ".", "append", "(", "yeast", ")", "elif", "tag_name", "==", "\"hops\"", ":", "for", "hop_node", "in", "list", "(", "recipeProperty", ")", ":", "hop", "=", "Hop", "(", ")", "self", ".", "nodes_to_object", "(", "hop_node", ",", "hop", ")", "recipe", ".", "hops", ".", "append", "(", "hop", ")", "elif", "tag_name", "==", "\"miscs\"", ":", "for", "misc_node", "in", "list", "(", "recipeProperty", ")", ":", "misc", "=", "Misc", "(", ")", "self", ".", "nodes_to_object", "(", "misc_node", ",", "misc", ")", "recipe", ".", "miscs", ".", "append", "(", "misc", ")", "elif", "tag_name", "==", "\"style\"", ":", "style", "=", "Style", "(", ")", "recipe", ".", "style", "=", "style", "self", ".", "nodes_to_object", "(", "recipeProperty", ",", "style", ")", "elif", "tag_name", "==", "\"mash\"", ":", "for", "mash_node", "in", "list", "(", "recipeProperty", ")", ":", "mash", "=", "Mash", "(", ")", "recipe", ".", "mash", "=", "mash", "if", "self", ".", "to_lower", "(", "mash_node", ".", "tag", ")", "==", "\"mash_steps\"", ":", "for", "mash_step_node", "in", "list", "(", "mash_node", ")", ":", "mash_step", "=", "MashStep", "(", ")", "self", ".", "nodes_to_object", "(", "mash_step_node", ",", "mash_step", ")", "mash", ".", "steps", ".", "append", "(", "mash_step", ")", "else", ":", "self", ".", "nodes_to_object", "(", "mash_node", ",", "mash", ")", "else", ":", "self", ".", "node_to_object", "(", "recipeProperty", ",", "recipe", ")", "return", "recipes" ]
Get a list of parsed recipes from BeerXML input
[ "Get", "a", "list", "of", "parsed", "recipes", "from", "BeerXML", "input" ]
e9cf8d6090b1e01e5bbb101e255792b134affbe0
https://github.com/hotzenklotz/pybeerxml/blob/e9cf8d6090b1e01e5bbb101e255792b134affbe0/pybeerxml/parser.py#L40-L104
train
hotzenklotz/pybeerxml
pybeerxml/parser.py
Parser.to_lower
def to_lower(self, string): "Helper function to transform strings to lower case" value = None try: value = string.lower() except AttributeError: value = "" finally: return value
python
def to_lower(self, string): "Helper function to transform strings to lower case" value = None try: value = string.lower() except AttributeError: value = "" finally: return value
[ "def", "to_lower", "(", "self", ",", "string", ")", ":", "value", "=", "None", "try", ":", "value", "=", "string", ".", "lower", "(", ")", "except", "AttributeError", ":", "value", "=", "\"\"", "finally", ":", "return", "value" ]
Helper function to transform strings to lower case
[ "Helper", "function", "to", "transform", "strings", "to", "lower", "case" ]
e9cf8d6090b1e01e5bbb101e255792b134affbe0
https://github.com/hotzenklotz/pybeerxml/blob/e9cf8d6090b1e01e5bbb101e255792b134affbe0/pybeerxml/parser.py#L106-L114
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig._to_dot_key
def _to_dot_key(cls, section, key=None): """ Return the section and key in dot notation format. """ if key: return (NON_ALPHA_NUM.sub('_', section.lower()), NON_ALPHA_NUM.sub('_', key.lower())) else: return NON_ALPHA_NUM.sub('_', section.lower())
python
def _to_dot_key(cls, section, key=None): """ Return the section and key in dot notation format. """ if key: return (NON_ALPHA_NUM.sub('_', section.lower()), NON_ALPHA_NUM.sub('_', key.lower())) else: return NON_ALPHA_NUM.sub('_', section.lower())
[ "def", "_to_dot_key", "(", "cls", ",", "section", ",", "key", "=", "None", ")", ":", "if", "key", ":", "return", "(", "NON_ALPHA_NUM", ".", "sub", "(", "'_'", ",", "section", ".", "lower", "(", ")", ")", ",", "NON_ALPHA_NUM", ".", "sub", "(", "'_'", ",", "key", ".", "lower", "(", ")", ")", ")", "else", ":", "return", "NON_ALPHA_NUM", ".", "sub", "(", "'_'", ",", "section", ".", "lower", "(", ")", ")" ]
Return the section and key in dot notation format.
[ "Return", "the", "section", "and", "key", "in", "dot", "notation", "format", "." ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L102-L107
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig.save
def save(self, target_file=None, as_template=False): """ Save the config :param str target_file: File to save to. Defaults to `self._last_source` if set :param bool as_template: Save the config with all keys and sections commented out for user to modify :raise AttributeError: if target file is not provided and `self._last_source` is not set """ self._read_sources() if not target_file: if not self._last_source: raise AttributeError('Target file is required when last source is not set during instantiation') target_file = self._last_source output = str(self) if as_template: output_tmpl = [] for line in output.split('\n'): if line and not line.startswith('#'): line = '# %s' % line output_tmpl.append(line) output = '\n'.join(output_tmpl) with open(target_file, 'w') as fp: fp.write(output)
python
def save(self, target_file=None, as_template=False): """ Save the config :param str target_file: File to save to. Defaults to `self._last_source` if set :param bool as_template: Save the config with all keys and sections commented out for user to modify :raise AttributeError: if target file is not provided and `self._last_source` is not set """ self._read_sources() if not target_file: if not self._last_source: raise AttributeError('Target file is required when last source is not set during instantiation') target_file = self._last_source output = str(self) if as_template: output_tmpl = [] for line in output.split('\n'): if line and not line.startswith('#'): line = '# %s' % line output_tmpl.append(line) output = '\n'.join(output_tmpl) with open(target_file, 'w') as fp: fp.write(output)
[ "def", "save", "(", "self", ",", "target_file", "=", "None", ",", "as_template", "=", "False", ")", ":", "self", ".", "_read_sources", "(", ")", "if", "not", "target_file", ":", "if", "not", "self", ".", "_last_source", ":", "raise", "AttributeError", "(", "'Target file is required when last source is not set during instantiation'", ")", "target_file", "=", "self", ".", "_last_source", "output", "=", "str", "(", "self", ")", "if", "as_template", ":", "output_tmpl", "=", "[", "]", "for", "line", "in", "output", ".", "split", "(", "'\\n'", ")", ":", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", ":", "line", "=", "'# %s'", "%", "line", "output_tmpl", ".", "append", "(", "line", ")", "output", "=", "'\\n'", ".", "join", "(", "output_tmpl", ")", "with", "open", "(", "target_file", ",", "'w'", ")", "as", "fp", ":", "fp", ".", "write", "(", "output", ")" ]
Save the config :param str target_file: File to save to. Defaults to `self._last_source` if set :param bool as_template: Save the config with all keys and sections commented out for user to modify :raise AttributeError: if target file is not provided and `self._last_source` is not set
[ "Save", "the", "config" ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L203-L229
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig._parse_extra
def _parse_extra(self, fp): """ Parse and store the config comments and create maps for dot notion lookup """ comment = '' section = '' fp.seek(0) for line in fp: line = line.rstrip() if not line: if comment: comment += '\n' continue if line.startswith('#'): # Comment comment += line + '\n' continue if line.startswith('['): # Section section = line.strip('[]') self._add_dot_key(section) if comment: self._comments[section] = comment.rstrip() elif CONFIG_KEY_RE.match(line): # Config key = line.split('=', 1)[0].strip() self._add_dot_key(section, key) if comment: self._comments[(section, key)] = comment.rstrip() comment = '' if comment: self._comments[self.LAST_COMMENT_KEY] = comment
python
def _parse_extra(self, fp): """ Parse and store the config comments and create maps for dot notion lookup """ comment = '' section = '' fp.seek(0) for line in fp: line = line.rstrip() if not line: if comment: comment += '\n' continue if line.startswith('#'): # Comment comment += line + '\n' continue if line.startswith('['): # Section section = line.strip('[]') self._add_dot_key(section) if comment: self._comments[section] = comment.rstrip() elif CONFIG_KEY_RE.match(line): # Config key = line.split('=', 1)[0].strip() self._add_dot_key(section, key) if comment: self._comments[(section, key)] = comment.rstrip() comment = '' if comment: self._comments[self.LAST_COMMENT_KEY] = comment
[ "def", "_parse_extra", "(", "self", ",", "fp", ")", ":", "comment", "=", "''", "section", "=", "''", "fp", ".", "seek", "(", "0", ")", "for", "line", "in", "fp", ":", "line", "=", "line", ".", "rstrip", "(", ")", "if", "not", "line", ":", "if", "comment", ":", "comment", "+=", "'\\n'", "continue", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "# Comment", "comment", "+=", "line", "+", "'\\n'", "continue", "if", "line", ".", "startswith", "(", "'['", ")", ":", "# Section", "section", "=", "line", ".", "strip", "(", "'[]'", ")", "self", ".", "_add_dot_key", "(", "section", ")", "if", "comment", ":", "self", ".", "_comments", "[", "section", "]", "=", "comment", ".", "rstrip", "(", ")", "elif", "CONFIG_KEY_RE", ".", "match", "(", "line", ")", ":", "# Config", "key", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "[", "0", "]", ".", "strip", "(", ")", "self", ".", "_add_dot_key", "(", "section", ",", "key", ")", "if", "comment", ":", "self", ".", "_comments", "[", "(", "section", ",", "key", ")", "]", "=", "comment", ".", "rstrip", "(", ")", "comment", "=", "''", "if", "comment", ":", "self", ".", "_comments", "[", "self", ".", "LAST_COMMENT_KEY", "]", "=", "comment" ]
Parse and store the config comments and create maps for dot notion lookup
[ "Parse", "and", "store", "the", "config", "comments", "and", "create", "maps", "for", "dot", "notion", "lookup" ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L231-L265
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig._typed_value
def _typed_value(self, value): """ Transform string value to an actual data type of the same value. """ if value not in self._value_cache: new_value = value if is_int(value): new_value = int(value) elif is_float(value): new_value = float(value) elif is_bool(value): new_value = to_bool(value) elif is_none(value): new_value = None self._value_cache[value] = new_value return self._value_cache[value]
python
def _typed_value(self, value): """ Transform string value to an actual data type of the same value. """ if value not in self._value_cache: new_value = value if is_int(value): new_value = int(value) elif is_float(value): new_value = float(value) elif is_bool(value): new_value = to_bool(value) elif is_none(value): new_value = None self._value_cache[value] = new_value return self._value_cache[value]
[ "def", "_typed_value", "(", "self", ",", "value", ")", ":", "if", "value", "not", "in", "self", ".", "_value_cache", ":", "new_value", "=", "value", "if", "is_int", "(", "value", ")", ":", "new_value", "=", "int", "(", "value", ")", "elif", "is_float", "(", "value", ")", ":", "new_value", "=", "float", "(", "value", ")", "elif", "is_bool", "(", "value", ")", ":", "new_value", "=", "to_bool", "(", "value", ")", "elif", "is_none", "(", "value", ")", ":", "new_value", "=", "None", "self", ".", "_value_cache", "[", "value", "]", "=", "new_value", "return", "self", ".", "_value_cache", "[", "value", "]" ]
Transform string value to an actual data type of the same value.
[ "Transform", "string", "value", "to", "an", "actual", "data", "type", "of", "the", "same", "value", "." ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L330-L345
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig.add_section
def add_section(self, section, comment=None): """ Add a section :param str section: Section to add :raise DuplicateSectionError: if section already exist. """ self._read_sources() if self._to_dot_key(section) in self._dot_keys: raise DuplicateSectionError(section) self._parser.add_section(section) self._add_dot_key(section) if comment: self._set_comment(section, comment)
python
def add_section(self, section, comment=None): """ Add a section :param str section: Section to add :raise DuplicateSectionError: if section already exist. """ self._read_sources() if self._to_dot_key(section) in self._dot_keys: raise DuplicateSectionError(section) self._parser.add_section(section) self._add_dot_key(section) if comment: self._set_comment(section, comment)
[ "def", "add_section", "(", "self", ",", "section", ",", "comment", "=", "None", ")", ":", "self", ".", "_read_sources", "(", ")", "if", "self", ".", "_to_dot_key", "(", "section", ")", "in", "self", ".", "_dot_keys", ":", "raise", "DuplicateSectionError", "(", "section", ")", "self", ".", "_parser", ".", "add_section", "(", "section", ")", "self", ".", "_add_dot_key", "(", "section", ")", "if", "comment", ":", "self", ".", "_set_comment", "(", "section", ",", "comment", ")" ]
Add a section :param str section: Section to add :raise DuplicateSectionError: if section already exist.
[ "Add", "a", "section" ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L381-L396
train
maxzheng/localconfig
localconfig/manager.py
LocalConfig._set_comment
def _set_comment(self, section, comment, key=None): """ Set a comment for section or key :param str section: Section to add comment to :param str comment: Comment to add :param str key: Key to add comment to """ if '\n' in comment: comment = '\n# '.join(comment.split('\n')) comment = '# ' + comment if key: self._comments[(section, key)] = comment else: self._comments[section] = comment
python
def _set_comment(self, section, comment, key=None): """ Set a comment for section or key :param str section: Section to add comment to :param str comment: Comment to add :param str key: Key to add comment to """ if '\n' in comment: comment = '\n# '.join(comment.split('\n')) comment = '# ' + comment if key: self._comments[(section, key)] = comment else: self._comments[section] = comment
[ "def", "_set_comment", "(", "self", ",", "section", ",", "comment", ",", "key", "=", "None", ")", ":", "if", "'\\n'", "in", "comment", ":", "comment", "=", "'\\n# '", ".", "join", "(", "comment", ".", "split", "(", "'\\n'", ")", ")", "comment", "=", "'# '", "+", "comment", "if", "key", ":", "self", ".", "_comments", "[", "(", "section", ",", "key", ")", "]", "=", "comment", "else", ":", "self", ".", "_comments", "[", "section", "]", "=", "comment" ]
Set a comment for section or key :param str section: Section to add comment to :param str comment: Comment to add :param str key: Key to add comment to
[ "Set", "a", "comment", "for", "section", "or", "key" ]
636087f2489295d9dae2693dda8a86e4daa4ff9d
https://github.com/maxzheng/localconfig/blob/636087f2489295d9dae2693dda8a86e4daa4ff9d/localconfig/manager.py#L398-L414
train
thiagopbueno/tf-rddlsim
tfrddlsim/policy/random_policy.py
RandomPolicy._sample_actions
def _sample_actions(self, state: Sequence[tf.Tensor]) -> Tuple[Sequence[tf.Tensor], tf.Tensor, tf.Tensor]: '''Returns sampled action fluents and tensors related to the sampling. Args: state (Sequence[tf.Tensor]): A list of state fluents. Returns: Tuple[Sequence[tf.Tensor], tf.Tensor, tf.Tensor]: A tuple with action fluents, an integer tensor for the number of samples, and a boolean tensor for checking all action preconditions. ''' default = self.compiler.compile_default_action(self.batch_size) bound_constraints = self.compiler.compile_action_bound_constraints(state) action = self._sample_action(bound_constraints, default) n, action, checking = self._check_preconditions(state, action, bound_constraints, default) return action, n, checking
python
def _sample_actions(self, state: Sequence[tf.Tensor]) -> Tuple[Sequence[tf.Tensor], tf.Tensor, tf.Tensor]: '''Returns sampled action fluents and tensors related to the sampling. Args: state (Sequence[tf.Tensor]): A list of state fluents. Returns: Tuple[Sequence[tf.Tensor], tf.Tensor, tf.Tensor]: A tuple with action fluents, an integer tensor for the number of samples, and a boolean tensor for checking all action preconditions. ''' default = self.compiler.compile_default_action(self.batch_size) bound_constraints = self.compiler.compile_action_bound_constraints(state) action = self._sample_action(bound_constraints, default) n, action, checking = self._check_preconditions(state, action, bound_constraints, default) return action, n, checking
[ "def", "_sample_actions", "(", "self", ",", "state", ":", "Sequence", "[", "tf", ".", "Tensor", "]", ")", "->", "Tuple", "[", "Sequence", "[", "tf", ".", "Tensor", "]", ",", "tf", ".", "Tensor", ",", "tf", ".", "Tensor", "]", ":", "default", "=", "self", ".", "compiler", ".", "compile_default_action", "(", "self", ".", "batch_size", ")", "bound_constraints", "=", "self", ".", "compiler", ".", "compile_action_bound_constraints", "(", "state", ")", "action", "=", "self", ".", "_sample_action", "(", "bound_constraints", ",", "default", ")", "n", ",", "action", ",", "checking", "=", "self", ".", "_check_preconditions", "(", "state", ",", "action", ",", "bound_constraints", ",", "default", ")", "return", "action", ",", "n", ",", "checking" ]
Returns sampled action fluents and tensors related to the sampling. Args: state (Sequence[tf.Tensor]): A list of state fluents. Returns: Tuple[Sequence[tf.Tensor], tf.Tensor, tf.Tensor]: A tuple with action fluents, an integer tensor for the number of samples, and a boolean tensor for checking all action preconditions.
[ "Returns", "sampled", "action", "fluents", "and", "tensors", "related", "to", "the", "sampling", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/policy/random_policy.py#L70-L86
train
thiagopbueno/tf-rddlsim
tfrddlsim/policy/random_policy.py
RandomPolicy._check_preconditions
def _check_preconditions(self, state: Sequence[tf.Tensor], action: Sequence[tf.Tensor], bound_constraints: Dict[str, Constraints], default: Sequence[tf.Tensor]) -> Tuple[tf.Tensor, Sequence[tf.Tensor], tf.Tensor]: '''Samples action fluents until all preconditions are satisfied. Checks action preconditions for the sampled `action` and current `state`, and iff all preconditions are satisfied it returns the sampled action fluents. Args: state (Sequence[tf.Tensor]): A list of state fluents. action (Sequence[tf.Tensor]): A list of action fluents. bound_constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. Returns: Tuple[tf.Tensor, Sequence[tf.Tensor], tf.Tensor]: A tuple with an integer tensor corresponding to the number of samples, action fluents and a boolean tensor for checking all action preconditions. ''' def condition(i, a, checking): not_checking = tf.reduce_any(tf.logical_not(checking)) return not_checking def body(i, a, checking): new_action = [] new_sampled_action = self._sample_action(bound_constraints, default) new_preconds_checking = self.compiler.compile_action_preconditions_checking(state, new_sampled_action) for action_fluent, new_sampled_action_fluent in zip(a, new_sampled_action): new_action_fluent = tf.where(checking, action_fluent, new_sampled_action_fluent) new_action.append(new_action_fluent) new_action = tuple(new_action) new_checking = tf.logical_or(checking, new_preconds_checking) return (i + 1, new_action, new_checking) i0 = tf.constant(0) preconds_checking = self.compiler.compile_action_preconditions_checking(state, action) return tf.while_loop(condition, body, loop_vars=[i0, action, preconds_checking])
python
def _check_preconditions(self, state: Sequence[tf.Tensor], action: Sequence[tf.Tensor], bound_constraints: Dict[str, Constraints], default: Sequence[tf.Tensor]) -> Tuple[tf.Tensor, Sequence[tf.Tensor], tf.Tensor]: '''Samples action fluents until all preconditions are satisfied. Checks action preconditions for the sampled `action` and current `state`, and iff all preconditions are satisfied it returns the sampled action fluents. Args: state (Sequence[tf.Tensor]): A list of state fluents. action (Sequence[tf.Tensor]): A list of action fluents. bound_constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. Returns: Tuple[tf.Tensor, Sequence[tf.Tensor], tf.Tensor]: A tuple with an integer tensor corresponding to the number of samples, action fluents and a boolean tensor for checking all action preconditions. ''' def condition(i, a, checking): not_checking = tf.reduce_any(tf.logical_not(checking)) return not_checking def body(i, a, checking): new_action = [] new_sampled_action = self._sample_action(bound_constraints, default) new_preconds_checking = self.compiler.compile_action_preconditions_checking(state, new_sampled_action) for action_fluent, new_sampled_action_fluent in zip(a, new_sampled_action): new_action_fluent = tf.where(checking, action_fluent, new_sampled_action_fluent) new_action.append(new_action_fluent) new_action = tuple(new_action) new_checking = tf.logical_or(checking, new_preconds_checking) return (i + 1, new_action, new_checking) i0 = tf.constant(0) preconds_checking = self.compiler.compile_action_preconditions_checking(state, action) return tf.while_loop(condition, body, loop_vars=[i0, action, preconds_checking])
[ "def", "_check_preconditions", "(", "self", ",", "state", ":", "Sequence", "[", "tf", ".", "Tensor", "]", ",", "action", ":", "Sequence", "[", "tf", ".", "Tensor", "]", ",", "bound_constraints", ":", "Dict", "[", "str", ",", "Constraints", "]", ",", "default", ":", "Sequence", "[", "tf", ".", "Tensor", "]", ")", "->", "Tuple", "[", "tf", ".", "Tensor", ",", "Sequence", "[", "tf", ".", "Tensor", "]", ",", "tf", ".", "Tensor", "]", ":", "def", "condition", "(", "i", ",", "a", ",", "checking", ")", ":", "not_checking", "=", "tf", ".", "reduce_any", "(", "tf", ".", "logical_not", "(", "checking", ")", ")", "return", "not_checking", "def", "body", "(", "i", ",", "a", ",", "checking", ")", ":", "new_action", "=", "[", "]", "new_sampled_action", "=", "self", ".", "_sample_action", "(", "bound_constraints", ",", "default", ")", "new_preconds_checking", "=", "self", ".", "compiler", ".", "compile_action_preconditions_checking", "(", "state", ",", "new_sampled_action", ")", "for", "action_fluent", ",", "new_sampled_action_fluent", "in", "zip", "(", "a", ",", "new_sampled_action", ")", ":", "new_action_fluent", "=", "tf", ".", "where", "(", "checking", ",", "action_fluent", ",", "new_sampled_action_fluent", ")", "new_action", ".", "append", "(", "new_action_fluent", ")", "new_action", "=", "tuple", "(", "new_action", ")", "new_checking", "=", "tf", ".", "logical_or", "(", "checking", ",", "new_preconds_checking", ")", "return", "(", "i", "+", "1", ",", "new_action", ",", "new_checking", ")", "i0", "=", "tf", ".", "constant", "(", "0", ")", "preconds_checking", "=", "self", ".", "compiler", ".", "compile_action_preconditions_checking", "(", "state", ",", "action", ")", "return", "tf", ".", "while_loop", "(", "condition", ",", "body", ",", "loop_vars", "=", "[", "i0", ",", "action", ",", "preconds_checking", "]", ")" ]
Samples action fluents until all preconditions are satisfied. Checks action preconditions for the sampled `action` and current `state`, and iff all preconditions are satisfied it returns the sampled action fluents. Args: state (Sequence[tf.Tensor]): A list of state fluents. action (Sequence[tf.Tensor]): A list of action fluents. bound_constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. Returns: Tuple[tf.Tensor, Sequence[tf.Tensor], tf.Tensor]: A tuple with an integer tensor corresponding to the number of samples, action fluents and a boolean tensor for checking all action preconditions.
[ "Samples", "action", "fluents", "until", "all", "preconditions", "are", "satisfied", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/policy/random_policy.py#L88-L127
train
thiagopbueno/tf-rddlsim
tfrddlsim/policy/random_policy.py
RandomPolicy._sample_action
def _sample_action(self, constraints: Dict[str, Constraints], default: Sequence[tf.Tensor], prob: float = 0.3) -> Sequence[tf.Tensor]: '''Samples action fluents respecting the given bound `constraints`. With probability `prob` it chooses the action fluent default value, with probability 1-`prob` it samples the fluent w.r.t. its bounds. Args: constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. prob (float): A probability measure. Returns: Sequence[tf.Tensor]: A tuple of action fluents. ''' ordering = self.compiler.rddl.domain.action_fluent_ordering dtypes = map(rddl2tf.utils.range_type_to_dtype, self.compiler.rddl.action_range_type) size = self.compiler.rddl.action_size action = [] for name, dtype, size, default_value in zip(ordering, dtypes, size, default): action_fluent = self._sample_action_fluent(name, dtype, size, constraints, default_value, prob) action.append(action_fluent) return tuple(action)
python
def _sample_action(self, constraints: Dict[str, Constraints], default: Sequence[tf.Tensor], prob: float = 0.3) -> Sequence[tf.Tensor]: '''Samples action fluents respecting the given bound `constraints`. With probability `prob` it chooses the action fluent default value, with probability 1-`prob` it samples the fluent w.r.t. its bounds. Args: constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. prob (float): A probability measure. Returns: Sequence[tf.Tensor]: A tuple of action fluents. ''' ordering = self.compiler.rddl.domain.action_fluent_ordering dtypes = map(rddl2tf.utils.range_type_to_dtype, self.compiler.rddl.action_range_type) size = self.compiler.rddl.action_size action = [] for name, dtype, size, default_value in zip(ordering, dtypes, size, default): action_fluent = self._sample_action_fluent(name, dtype, size, constraints, default_value, prob) action.append(action_fluent) return tuple(action)
[ "def", "_sample_action", "(", "self", ",", "constraints", ":", "Dict", "[", "str", ",", "Constraints", "]", ",", "default", ":", "Sequence", "[", "tf", ".", "Tensor", "]", ",", "prob", ":", "float", "=", "0.3", ")", "->", "Sequence", "[", "tf", ".", "Tensor", "]", ":", "ordering", "=", "self", ".", "compiler", ".", "rddl", ".", "domain", ".", "action_fluent_ordering", "dtypes", "=", "map", "(", "rddl2tf", ".", "utils", ".", "range_type_to_dtype", ",", "self", ".", "compiler", ".", "rddl", ".", "action_range_type", ")", "size", "=", "self", ".", "compiler", ".", "rddl", ".", "action_size", "action", "=", "[", "]", "for", "name", ",", "dtype", ",", "size", ",", "default_value", "in", "zip", "(", "ordering", ",", "dtypes", ",", "size", ",", "default", ")", ":", "action_fluent", "=", "self", ".", "_sample_action_fluent", "(", "name", ",", "dtype", ",", "size", ",", "constraints", ",", "default_value", ",", "prob", ")", "action", ".", "append", "(", "action_fluent", ")", "return", "tuple", "(", "action", ")" ]
Samples action fluents respecting the given bound `constraints`. With probability `prob` it chooses the action fluent default value, with probability 1-`prob` it samples the fluent w.r.t. its bounds. Args: constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default (Sequence[tf.Tensor]): The default action fluents. prob (float): A probability measure. Returns: Sequence[tf.Tensor]: A tuple of action fluents.
[ "Samples", "action", "fluents", "respecting", "the", "given", "bound", "constraints", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/policy/random_policy.py#L129-L155
train
thiagopbueno/tf-rddlsim
tfrddlsim/policy/random_policy.py
RandomPolicy._sample_action_fluent
def _sample_action_fluent(self, name: str, dtype: tf.DType, size: Sequence[int], constraints: Dict[str, Constraints], default_value: tf.Tensor, prob: float) -> tf.Tensor: '''Samples the action fluent with given `name`, `dtype`, and `size`. With probability `prob` it chooses the action fluent `default_value`, with probability 1-`prob` it samples the fluent w.r.t. its `constraints`. Args: name (str): The name of the action fluent. dtype (tf.DType): The data type of the action fluent. size (Sequence[int]): The size and shape of the action fluent. constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default_value (tf.Tensor): The default value for the action fluent. prob (float): A probability measure. Returns: tf.Tensor: A tensor for sampling the action fluent. ''' shape = [self.batch_size] + list(size) if dtype == tf.float32: bounds = constraints.get(name) if bounds is None: low, high = -self.MAX_REAL_VALUE, self.MAX_REAL_VALUE dist = tf.distributions.Uniform(low=low, high=high) sampled_fluent = dist.sample(shape) else: low, high = bounds batch = (low is not None and low.batch) or (high is not None and high.batch) low = tf.cast(low.tensor, tf.float32) if low is not None else -self.MAX_REAL_VALUE high = tf.cast(high.tensor, tf.float32) if high is not None else self.MAX_REAL_VALUE dist = tf.distributions.Uniform(low=low, high=high) if batch: sampled_fluent = dist.sample() elif isinstance(low, tf.Tensor) or isinstance(high, tf.Tensor): if (low+high).shape.as_list() == list(size): sampled_fluent = dist.sample([self.batch_size]) else: raise ValueError('bounds are not compatible with action fluent.') else: sampled_fluent = dist.sample(shape) elif dtype == tf.int32: logits = [1.0] * self.MAX_INT_VALUE dist = tf.distributions.Categorical(logits=logits, dtype=tf.int32) sampled_fluent = dist.sample(shape) elif dtype == tf.bool: probs = 0.5 dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool) sampled_fluent = dist.sample(shape) select_default = tf.distributions.Bernoulli(prob, dtype=tf.bool).sample(self.batch_size) action_fluent = tf.where(select_default, default_value, sampled_fluent) return action_fluent
python
def _sample_action_fluent(self, name: str, dtype: tf.DType, size: Sequence[int], constraints: Dict[str, Constraints], default_value: tf.Tensor, prob: float) -> tf.Tensor: '''Samples the action fluent with given `name`, `dtype`, and `size`. With probability `prob` it chooses the action fluent `default_value`, with probability 1-`prob` it samples the fluent w.r.t. its `constraints`. Args: name (str): The name of the action fluent. dtype (tf.DType): The data type of the action fluent. size (Sequence[int]): The size and shape of the action fluent. constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default_value (tf.Tensor): The default value for the action fluent. prob (float): A probability measure. Returns: tf.Tensor: A tensor for sampling the action fluent. ''' shape = [self.batch_size] + list(size) if dtype == tf.float32: bounds = constraints.get(name) if bounds is None: low, high = -self.MAX_REAL_VALUE, self.MAX_REAL_VALUE dist = tf.distributions.Uniform(low=low, high=high) sampled_fluent = dist.sample(shape) else: low, high = bounds batch = (low is not None and low.batch) or (high is not None and high.batch) low = tf.cast(low.tensor, tf.float32) if low is not None else -self.MAX_REAL_VALUE high = tf.cast(high.tensor, tf.float32) if high is not None else self.MAX_REAL_VALUE dist = tf.distributions.Uniform(low=low, high=high) if batch: sampled_fluent = dist.sample() elif isinstance(low, tf.Tensor) or isinstance(high, tf.Tensor): if (low+high).shape.as_list() == list(size): sampled_fluent = dist.sample([self.batch_size]) else: raise ValueError('bounds are not compatible with action fluent.') else: sampled_fluent = dist.sample(shape) elif dtype == tf.int32: logits = [1.0] * self.MAX_INT_VALUE dist = tf.distributions.Categorical(logits=logits, dtype=tf.int32) sampled_fluent = dist.sample(shape) elif dtype == tf.bool: probs = 0.5 dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool) sampled_fluent = dist.sample(shape) select_default = tf.distributions.Bernoulli(prob, dtype=tf.bool).sample(self.batch_size) action_fluent = tf.where(select_default, default_value, sampled_fluent) return action_fluent
[ "def", "_sample_action_fluent", "(", "self", ",", "name", ":", "str", ",", "dtype", ":", "tf", ".", "DType", ",", "size", ":", "Sequence", "[", "int", "]", ",", "constraints", ":", "Dict", "[", "str", ",", "Constraints", "]", ",", "default_value", ":", "tf", ".", "Tensor", ",", "prob", ":", "float", ")", "->", "tf", ".", "Tensor", ":", "shape", "=", "[", "self", ".", "batch_size", "]", "+", "list", "(", "size", ")", "if", "dtype", "==", "tf", ".", "float32", ":", "bounds", "=", "constraints", ".", "get", "(", "name", ")", "if", "bounds", "is", "None", ":", "low", ",", "high", "=", "-", "self", ".", "MAX_REAL_VALUE", ",", "self", ".", "MAX_REAL_VALUE", "dist", "=", "tf", ".", "distributions", ".", "Uniform", "(", "low", "=", "low", ",", "high", "=", "high", ")", "sampled_fluent", "=", "dist", ".", "sample", "(", "shape", ")", "else", ":", "low", ",", "high", "=", "bounds", "batch", "=", "(", "low", "is", "not", "None", "and", "low", ".", "batch", ")", "or", "(", "high", "is", "not", "None", "and", "high", ".", "batch", ")", "low", "=", "tf", ".", "cast", "(", "low", ".", "tensor", ",", "tf", ".", "float32", ")", "if", "low", "is", "not", "None", "else", "-", "self", ".", "MAX_REAL_VALUE", "high", "=", "tf", ".", "cast", "(", "high", ".", "tensor", ",", "tf", ".", "float32", ")", "if", "high", "is", "not", "None", "else", "self", ".", "MAX_REAL_VALUE", "dist", "=", "tf", ".", "distributions", ".", "Uniform", "(", "low", "=", "low", ",", "high", "=", "high", ")", "if", "batch", ":", "sampled_fluent", "=", "dist", ".", "sample", "(", ")", "elif", "isinstance", "(", "low", ",", "tf", ".", "Tensor", ")", "or", "isinstance", "(", "high", ",", "tf", ".", "Tensor", ")", ":", "if", "(", "low", "+", "high", ")", ".", "shape", ".", "as_list", "(", ")", "==", "list", "(", "size", ")", ":", "sampled_fluent", "=", "dist", ".", "sample", "(", "[", "self", ".", "batch_size", "]", ")", "else", ":", "raise", "ValueError", "(", "'bounds are not compatible with action fluent.'", ")", "else", ":", "sampled_fluent", "=", "dist", ".", "sample", "(", "shape", ")", "elif", "dtype", "==", "tf", ".", "int32", ":", "logits", "=", "[", "1.0", "]", "*", "self", ".", "MAX_INT_VALUE", "dist", "=", "tf", ".", "distributions", ".", "Categorical", "(", "logits", "=", "logits", ",", "dtype", "=", "tf", ".", "int32", ")", "sampled_fluent", "=", "dist", ".", "sample", "(", "shape", ")", "elif", "dtype", "==", "tf", ".", "bool", ":", "probs", "=", "0.5", "dist", "=", "tf", ".", "distributions", ".", "Bernoulli", "(", "probs", "=", "probs", ",", "dtype", "=", "tf", ".", "bool", ")", "sampled_fluent", "=", "dist", ".", "sample", "(", "shape", ")", "select_default", "=", "tf", ".", "distributions", ".", "Bernoulli", "(", "prob", ",", "dtype", "=", "tf", ".", "bool", ")", ".", "sample", "(", "self", ".", "batch_size", ")", "action_fluent", "=", "tf", ".", "where", "(", "select_default", ",", "default_value", ",", "sampled_fluent", ")", "return", "action_fluent" ]
Samples the action fluent with given `name`, `dtype`, and `size`. With probability `prob` it chooses the action fluent `default_value`, with probability 1-`prob` it samples the fluent w.r.t. its `constraints`. Args: name (str): The name of the action fluent. dtype (tf.DType): The data type of the action fluent. size (Sequence[int]): The size and shape of the action fluent. constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent. default_value (tf.Tensor): The default value for the action fluent. prob (float): A probability measure. Returns: tf.Tensor: A tensor for sampling the action fluent.
[ "Samples", "the", "action", "fluent", "with", "given", "name", "dtype", "and", "size", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/policy/random_policy.py#L157-L215
train
Nic30/hwtGraph
hwtGraph/elk/fromHwt/convertor.py
UnitToLNode
def UnitToLNode(u: Unit, node: Optional[LNode]=None, toL: Optional[dict]=None, optimizations=[]) -> LNode: """ Build LNode instance from Unit instance :attention: unit has to be synthesized """ if toL is None: toL = {} if node is None: root = LNode(name=u._name, originObj=u, node2lnode=toL) else: root = node stmPorts = {} # {RtlSignal: NetCtx} netCtx = NetCtxs(root) # create subunits for su in u._units: n = root.addNode(name=su._name, originObj=su) UnitToLNode(su, n, toL, optimizations) # create subunits from statements for stm in u._ctx.statements: n = addStmAsLNode(root, stm, stmPorts, netCtx) # create ports for this unit for intf in u._interfaces: addPort(root, intf) # render content of statements for stm in u._ctx.statements: n = toL.get(stm, None) if n is not None: if isinstance(n, VirtualLNode): # statement is not in wrap and does not need any port context p = None else: # statement is in wrap and needs a port context # to resolve port connections to wrap p = stmPorts[n] r = StatementRenderer(n, toL, p, netCtx) r.renderContent() # connect nets inside this unit for s in u._ctx.signals: if not s.hidden: net, _ = netCtx.getDefault(s) for e in s.endpoints: if isinstance(e, PortItem): net.addEndpoint(toL[e]) for d in s.drivers: if isinstance(d, PortItem): net.addDriver(toL[d]) netCtx.applyConnections(root) for opt in optimizations: opt(root) isRootOfWholeGraph = root.parent is None if not isRootOfWholeGraph: for intf in u._interfaces: # connect my external port to port on my container on parent # also override toL to use this new port ext_p = toL[originObjOfPort(intf)].parentNode nodePort = addPortToLNode(root, intf) # connect this node which represents port to port of this node if intf._direction == INTF_DIRECTION.SLAVE: src = nodePort dst = ext_p.addPort("", PortType.INPUT, PortSide.WEST) else: src = ext_p.addPort("", PortType.OUTPUT, PortSide.EAST) dst = nodePort root.addEdge(src, dst, name=repr(intf), originObj=intf) return root
python
def UnitToLNode(u: Unit, node: Optional[LNode]=None, toL: Optional[dict]=None, optimizations=[]) -> LNode: """ Build LNode instance from Unit instance :attention: unit has to be synthesized """ if toL is None: toL = {} if node is None: root = LNode(name=u._name, originObj=u, node2lnode=toL) else: root = node stmPorts = {} # {RtlSignal: NetCtx} netCtx = NetCtxs(root) # create subunits for su in u._units: n = root.addNode(name=su._name, originObj=su) UnitToLNode(su, n, toL, optimizations) # create subunits from statements for stm in u._ctx.statements: n = addStmAsLNode(root, stm, stmPorts, netCtx) # create ports for this unit for intf in u._interfaces: addPort(root, intf) # render content of statements for stm in u._ctx.statements: n = toL.get(stm, None) if n is not None: if isinstance(n, VirtualLNode): # statement is not in wrap and does not need any port context p = None else: # statement is in wrap and needs a port context # to resolve port connections to wrap p = stmPorts[n] r = StatementRenderer(n, toL, p, netCtx) r.renderContent() # connect nets inside this unit for s in u._ctx.signals: if not s.hidden: net, _ = netCtx.getDefault(s) for e in s.endpoints: if isinstance(e, PortItem): net.addEndpoint(toL[e]) for d in s.drivers: if isinstance(d, PortItem): net.addDriver(toL[d]) netCtx.applyConnections(root) for opt in optimizations: opt(root) isRootOfWholeGraph = root.parent is None if not isRootOfWholeGraph: for intf in u._interfaces: # connect my external port to port on my container on parent # also override toL to use this new port ext_p = toL[originObjOfPort(intf)].parentNode nodePort = addPortToLNode(root, intf) # connect this node which represents port to port of this node if intf._direction == INTF_DIRECTION.SLAVE: src = nodePort dst = ext_p.addPort("", PortType.INPUT, PortSide.WEST) else: src = ext_p.addPort("", PortType.OUTPUT, PortSide.EAST) dst = nodePort root.addEdge(src, dst, name=repr(intf), originObj=intf) return root
[ "def", "UnitToLNode", "(", "u", ":", "Unit", ",", "node", ":", "Optional", "[", "LNode", "]", "=", "None", ",", "toL", ":", "Optional", "[", "dict", "]", "=", "None", ",", "optimizations", "=", "[", "]", ")", "->", "LNode", ":", "if", "toL", "is", "None", ":", "toL", "=", "{", "}", "if", "node", "is", "None", ":", "root", "=", "LNode", "(", "name", "=", "u", ".", "_name", ",", "originObj", "=", "u", ",", "node2lnode", "=", "toL", ")", "else", ":", "root", "=", "node", "stmPorts", "=", "{", "}", "# {RtlSignal: NetCtx}", "netCtx", "=", "NetCtxs", "(", "root", ")", "# create subunits", "for", "su", "in", "u", ".", "_units", ":", "n", "=", "root", ".", "addNode", "(", "name", "=", "su", ".", "_name", ",", "originObj", "=", "su", ")", "UnitToLNode", "(", "su", ",", "n", ",", "toL", ",", "optimizations", ")", "# create subunits from statements", "for", "stm", "in", "u", ".", "_ctx", ".", "statements", ":", "n", "=", "addStmAsLNode", "(", "root", ",", "stm", ",", "stmPorts", ",", "netCtx", ")", "# create ports for this unit", "for", "intf", "in", "u", ".", "_interfaces", ":", "addPort", "(", "root", ",", "intf", ")", "# render content of statements", "for", "stm", "in", "u", ".", "_ctx", ".", "statements", ":", "n", "=", "toL", ".", "get", "(", "stm", ",", "None", ")", "if", "n", "is", "not", "None", ":", "if", "isinstance", "(", "n", ",", "VirtualLNode", ")", ":", "# statement is not in wrap and does not need any port context", "p", "=", "None", "else", ":", "# statement is in wrap and needs a port context", "# to resolve port connections to wrap", "p", "=", "stmPorts", "[", "n", "]", "r", "=", "StatementRenderer", "(", "n", ",", "toL", ",", "p", ",", "netCtx", ")", "r", ".", "renderContent", "(", ")", "# connect nets inside this unit", "for", "s", "in", "u", ".", "_ctx", ".", "signals", ":", "if", "not", "s", ".", "hidden", ":", "net", ",", "_", "=", "netCtx", ".", "getDefault", "(", "s", ")", "for", "e", "in", "s", ".", "endpoints", ":", "if", "isinstance", "(", "e", ",", "PortItem", ")", ":", "net", ".", "addEndpoint", "(", "toL", "[", "e", "]", ")", "for", "d", "in", "s", ".", "drivers", ":", "if", "isinstance", "(", "d", ",", "PortItem", ")", ":", "net", ".", "addDriver", "(", "toL", "[", "d", "]", ")", "netCtx", ".", "applyConnections", "(", "root", ")", "for", "opt", "in", "optimizations", ":", "opt", "(", "root", ")", "isRootOfWholeGraph", "=", "root", ".", "parent", "is", "None", "if", "not", "isRootOfWholeGraph", ":", "for", "intf", "in", "u", ".", "_interfaces", ":", "# connect my external port to port on my container on parent", "# also override toL to use this new port", "ext_p", "=", "toL", "[", "originObjOfPort", "(", "intf", ")", "]", ".", "parentNode", "nodePort", "=", "addPortToLNode", "(", "root", ",", "intf", ")", "# connect this node which represents port to port of this node", "if", "intf", ".", "_direction", "==", "INTF_DIRECTION", ".", "SLAVE", ":", "src", "=", "nodePort", "dst", "=", "ext_p", ".", "addPort", "(", "\"\"", ",", "PortType", ".", "INPUT", ",", "PortSide", ".", "WEST", ")", "else", ":", "src", "=", "ext_p", ".", "addPort", "(", "\"\"", ",", "PortType", ".", "OUTPUT", ",", "PortSide", ".", "EAST", ")", "dst", "=", "nodePort", "root", ".", "addEdge", "(", "src", ",", "dst", ",", "name", "=", "repr", "(", "intf", ")", ",", "originObj", "=", "intf", ")", "return", "root" ]
Build LNode instance from Unit instance :attention: unit has to be synthesized
[ "Build", "LNode", "instance", "from", "Unit", "instance" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/fromHwt/convertor.py#L20-L102
train
redhat-openstack/python-tripleo-helper
tripleohelper/host0.py
Host0.configure
def configure(self, rhsm=None, repositories=None): """This method will configure the host0 and run the hypervisor.""" if rhsm is not None: self.rhsm_register(rhsm) if repositories is not None: self.enable_repositories(repositories) self.create_stack_user() self.deploy_hypervisor()
python
def configure(self, rhsm=None, repositories=None): """This method will configure the host0 and run the hypervisor.""" if rhsm is not None: self.rhsm_register(rhsm) if repositories is not None: self.enable_repositories(repositories) self.create_stack_user() self.deploy_hypervisor()
[ "def", "configure", "(", "self", ",", "rhsm", "=", "None", ",", "repositories", "=", "None", ")", ":", "if", "rhsm", "is", "not", "None", ":", "self", ".", "rhsm_register", "(", "rhsm", ")", "if", "repositories", "is", "not", "None", ":", "self", ".", "enable_repositories", "(", "repositories", ")", "self", ".", "create_stack_user", "(", ")", "self", ".", "deploy_hypervisor", "(", ")" ]
This method will configure the host0 and run the hypervisor.
[ "This", "method", "will", "configure", "the", "host0", "and", "run", "the", "hypervisor", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/host0.py#L35-L42
train
redhat-openstack/python-tripleo-helper
tripleohelper/host0.py
Host0.deploy_hypervisor
def deploy_hypervisor(self): """Install the libvirtd and instack-undercloud packages. """ self.yum_install(['libvirt-daemon-driver-nwfilter', 'libvirt-client', 'libvirt-daemon-config-network', 'libvirt-daemon-driver-nodedev', 'libvirt-daemon-kvm', 'libvirt-python', 'libvirt-daemon-config-nwfilter', 'libvirt-glib', 'libvirt-daemon', 'libvirt-daemon-driver-storage', 'libvirt', 'libvirt-daemon-driver-network', 'libvirt-devel', 'libvirt-gobject', 'libvirt-daemon-driver-secret', 'libvirt-daemon-driver-qemu', 'libvirt-daemon-driver-interface', 'libguestfs-tools', 'virt-install', 'genisoimage', 'openstack-tripleo', 'instack-undercloud']) self.run('sed -i "s,#auth_unix_rw,auth_unix_rw," /etc/libvirt/libvirtd.conf') self.run('systemctl start libvirtd') self.run('systemctl status libvirtd') self.install_base_packages() self.clean_system() self.yum_update()
python
def deploy_hypervisor(self): """Install the libvirtd and instack-undercloud packages. """ self.yum_install(['libvirt-daemon-driver-nwfilter', 'libvirt-client', 'libvirt-daemon-config-network', 'libvirt-daemon-driver-nodedev', 'libvirt-daemon-kvm', 'libvirt-python', 'libvirt-daemon-config-nwfilter', 'libvirt-glib', 'libvirt-daemon', 'libvirt-daemon-driver-storage', 'libvirt', 'libvirt-daemon-driver-network', 'libvirt-devel', 'libvirt-gobject', 'libvirt-daemon-driver-secret', 'libvirt-daemon-driver-qemu', 'libvirt-daemon-driver-interface', 'libguestfs-tools', 'virt-install', 'genisoimage', 'openstack-tripleo', 'instack-undercloud']) self.run('sed -i "s,#auth_unix_rw,auth_unix_rw," /etc/libvirt/libvirtd.conf') self.run('systemctl start libvirtd') self.run('systemctl status libvirtd') self.install_base_packages() self.clean_system() self.yum_update()
[ "def", "deploy_hypervisor", "(", "self", ")", ":", "self", ".", "yum_install", "(", "[", "'libvirt-daemon-driver-nwfilter'", ",", "'libvirt-client'", ",", "'libvirt-daemon-config-network'", ",", "'libvirt-daemon-driver-nodedev'", ",", "'libvirt-daemon-kvm'", ",", "'libvirt-python'", ",", "'libvirt-daemon-config-nwfilter'", ",", "'libvirt-glib'", ",", "'libvirt-daemon'", ",", "'libvirt-daemon-driver-storage'", ",", "'libvirt'", ",", "'libvirt-daemon-driver-network'", ",", "'libvirt-devel'", ",", "'libvirt-gobject'", ",", "'libvirt-daemon-driver-secret'", ",", "'libvirt-daemon-driver-qemu'", ",", "'libvirt-daemon-driver-interface'", ",", "'libguestfs-tools'", ",", "'virt-install'", ",", "'genisoimage'", ",", "'openstack-tripleo'", ",", "'instack-undercloud'", "]", ")", "self", ".", "run", "(", "'sed -i \"s,#auth_unix_rw,auth_unix_rw,\" /etc/libvirt/libvirtd.conf'", ")", "self", ".", "run", "(", "'systemctl start libvirtd'", ")", "self", ".", "run", "(", "'systemctl status libvirtd'", ")", "self", ".", "install_base_packages", "(", ")", "self", ".", "clean_system", "(", ")", "self", ".", "yum_update", "(", ")" ]
Install the libvirtd and instack-undercloud packages.
[ "Install", "the", "libvirtd", "and", "instack", "-", "undercloud", "packages", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/host0.py#L44-L54
train
redhat-openstack/python-tripleo-helper
tripleohelper/host0.py
Host0.build_undercloud_on_libvirt
def build_undercloud_on_libvirt(self, image_path, rhsm=None, repositories=[]): """Build the Undercloud by using instack-virt-setup script.""" self.run('sysctl net.ipv4.ip_forward=1') self.fetch_image(path=image_path, dest='/home/stack/guest_image.qcow2', user='stack') # NOTE(Gonéri): this is a hack for our OpenStack, the MTU of its outgoing route # is 1400 and libvirt do not provide a mechanism to adjust the guests MTU. self.run("LIBGUESTFS_BACKEND=direct virt-customize -a /home/stack/guest_image.qcow2 --run-command 'echo MTU=\"1400\" >> /etc/sysconfig/network-scripts/ifcfg-eth0'") env = Environment() env.loader = FileSystemLoader(pkg_data_filename('template')) template = env.get_template('virt-setup-env.j2') self.run('mkdir -p /home/stack/DIB', user='stack') self.run('cp -v /etc/yum.repos.d/*.repo /home/stack/DIB', user='stack') # NOTE(Gonéri): Hack to be sure DIB won't complain because of missing gpg files # self.run('sed -i "s,gpgcheck=1,gpgcheck=0," /home/stack/DIB/*.repo', user='stack') dib_yum_repo_conf = self.run('find /home/stack/DIB -type f', user='stack')[0].split() virt_setup_template = { 'dib_yum_repo_conf': dib_yum_repo_conf, 'node': { 'count': 2, 'mem': 6144, 'cpu': 2 }, 'undercloud_node_mem': 8192, 'guest_image_name': '/home/stack/guest_image.qcow2' } if rhsm is not None: virt_setup_template['rhsm'] = { 'login': rhsm.get('login'), 'password': rhsm.get('password', os.environ.get('RHN_PW')), 'pool_id': rhsm.get('pool_id', ''), 'repositories': [i['name'] for i in repositories if i['type'] == 'rhsm_channel'] } virt_setup_env = template.render(virt_setup_template) self.create_file('virt-setup-env', virt_setup_env, user='stack') self.run('virsh destroy instack', ignore_error=True) self.run('virsh undefine instack --remove-all-storage', ignore_error=True) self.run('source virt-setup-env; instack-virt-setup', user='stack') undercloud_ip = self.run( '/sbin/ip n | grep $(tripleo get-vm-mac instack) | awk \'{print $1;}\'', user='stack')[0] assert undercloud_ip, 'undercloud should have an IP' undercloud = Undercloud(hostname=undercloud_ip, via_ip=self.hostname, user='root', key_filename=self._key_filename) return undercloud
python
def build_undercloud_on_libvirt(self, image_path, rhsm=None, repositories=[]): """Build the Undercloud by using instack-virt-setup script.""" self.run('sysctl net.ipv4.ip_forward=1') self.fetch_image(path=image_path, dest='/home/stack/guest_image.qcow2', user='stack') # NOTE(Gonéri): this is a hack for our OpenStack, the MTU of its outgoing route # is 1400 and libvirt do not provide a mechanism to adjust the guests MTU. self.run("LIBGUESTFS_BACKEND=direct virt-customize -a /home/stack/guest_image.qcow2 --run-command 'echo MTU=\"1400\" >> /etc/sysconfig/network-scripts/ifcfg-eth0'") env = Environment() env.loader = FileSystemLoader(pkg_data_filename('template')) template = env.get_template('virt-setup-env.j2') self.run('mkdir -p /home/stack/DIB', user='stack') self.run('cp -v /etc/yum.repos.d/*.repo /home/stack/DIB', user='stack') # NOTE(Gonéri): Hack to be sure DIB won't complain because of missing gpg files # self.run('sed -i "s,gpgcheck=1,gpgcheck=0," /home/stack/DIB/*.repo', user='stack') dib_yum_repo_conf = self.run('find /home/stack/DIB -type f', user='stack')[0].split() virt_setup_template = { 'dib_yum_repo_conf': dib_yum_repo_conf, 'node': { 'count': 2, 'mem': 6144, 'cpu': 2 }, 'undercloud_node_mem': 8192, 'guest_image_name': '/home/stack/guest_image.qcow2' } if rhsm is not None: virt_setup_template['rhsm'] = { 'login': rhsm.get('login'), 'password': rhsm.get('password', os.environ.get('RHN_PW')), 'pool_id': rhsm.get('pool_id', ''), 'repositories': [i['name'] for i in repositories if i['type'] == 'rhsm_channel'] } virt_setup_env = template.render(virt_setup_template) self.create_file('virt-setup-env', virt_setup_env, user='stack') self.run('virsh destroy instack', ignore_error=True) self.run('virsh undefine instack --remove-all-storage', ignore_error=True) self.run('source virt-setup-env; instack-virt-setup', user='stack') undercloud_ip = self.run( '/sbin/ip n | grep $(tripleo get-vm-mac instack) | awk \'{print $1;}\'', user='stack')[0] assert undercloud_ip, 'undercloud should have an IP' undercloud = Undercloud(hostname=undercloud_ip, via_ip=self.hostname, user='root', key_filename=self._key_filename) return undercloud
[ "def", "build_undercloud_on_libvirt", "(", "self", ",", "image_path", ",", "rhsm", "=", "None", ",", "repositories", "=", "[", "]", ")", ":", "self", ".", "run", "(", "'sysctl net.ipv4.ip_forward=1'", ")", "self", ".", "fetch_image", "(", "path", "=", "image_path", ",", "dest", "=", "'/home/stack/guest_image.qcow2'", ",", "user", "=", "'stack'", ")", "# NOTE(Gonéri): this is a hack for our OpenStack, the MTU of its outgoing route", "# is 1400 and libvirt do not provide a mechanism to adjust the guests MTU.", "self", ".", "run", "(", "\"LIBGUESTFS_BACKEND=direct virt-customize -a /home/stack/guest_image.qcow2 --run-command 'echo MTU=\\\"1400\\\" >> /etc/sysconfig/network-scripts/ifcfg-eth0'\"", ")", "env", "=", "Environment", "(", ")", "env", ".", "loader", "=", "FileSystemLoader", "(", "pkg_data_filename", "(", "'template'", ")", ")", "template", "=", "env", ".", "get_template", "(", "'virt-setup-env.j2'", ")", "self", ".", "run", "(", "'mkdir -p /home/stack/DIB'", ",", "user", "=", "'stack'", ")", "self", ".", "run", "(", "'cp -v /etc/yum.repos.d/*.repo /home/stack/DIB'", ",", "user", "=", "'stack'", ")", "# NOTE(Gonéri): Hack to be sure DIB won't complain because of missing gpg files", "# self.run('sed -i \"s,gpgcheck=1,gpgcheck=0,\" /home/stack/DIB/*.repo', user='stack')", "dib_yum_repo_conf", "=", "self", ".", "run", "(", "'find /home/stack/DIB -type f'", ",", "user", "=", "'stack'", ")", "[", "0", "]", ".", "split", "(", ")", "virt_setup_template", "=", "{", "'dib_yum_repo_conf'", ":", "dib_yum_repo_conf", ",", "'node'", ":", "{", "'count'", ":", "2", ",", "'mem'", ":", "6144", ",", "'cpu'", ":", "2", "}", ",", "'undercloud_node_mem'", ":", "8192", ",", "'guest_image_name'", ":", "'/home/stack/guest_image.qcow2'", "}", "if", "rhsm", "is", "not", "None", ":", "virt_setup_template", "[", "'rhsm'", "]", "=", "{", "'login'", ":", "rhsm", ".", "get", "(", "'login'", ")", ",", "'password'", ":", "rhsm", ".", "get", "(", "'password'", ",", "os", ".", "environ", ".", "get", "(", "'RHN_PW'", ")", ")", ",", "'pool_id'", ":", "rhsm", ".", "get", "(", "'pool_id'", ",", "''", ")", ",", "'repositories'", ":", "[", "i", "[", "'name'", "]", "for", "i", "in", "repositories", "if", "i", "[", "'type'", "]", "==", "'rhsm_channel'", "]", "}", "virt_setup_env", "=", "template", ".", "render", "(", "virt_setup_template", ")", "self", ".", "create_file", "(", "'virt-setup-env'", ",", "virt_setup_env", ",", "user", "=", "'stack'", ")", "self", ".", "run", "(", "'virsh destroy instack'", ",", "ignore_error", "=", "True", ")", "self", ".", "run", "(", "'virsh undefine instack --remove-all-storage'", ",", "ignore_error", "=", "True", ")", "self", ".", "run", "(", "'source virt-setup-env; instack-virt-setup'", ",", "user", "=", "'stack'", ")", "undercloud_ip", "=", "self", ".", "run", "(", "'/sbin/ip n | grep $(tripleo get-vm-mac instack) | awk \\'{print $1;}\\''", ",", "user", "=", "'stack'", ")", "[", "0", "]", "assert", "undercloud_ip", ",", "'undercloud should have an IP'", "undercloud", "=", "Undercloud", "(", "hostname", "=", "undercloud_ip", ",", "via_ip", "=", "self", ".", "hostname", ",", "user", "=", "'root'", ",", "key_filename", "=", "self", ".", "_key_filename", ")", "return", "undercloud" ]
Build the Undercloud by using instack-virt-setup script.
[ "Build", "the", "Undercloud", "by", "using", "instack", "-", "virt", "-", "setup", "script", "." ]
bfa165538335edb1088170c7a92f097167225c81
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/host0.py#L56-L105
train