repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
sashs/filebytes
filebytes/pe.py
PE._getSectionForDataDirectoryEntry
def _getSectionForDataDirectoryEntry(self, data_directory_entry, sections): """Returns the section which contains the data of DataDirectory""" for section in sections: if data_directory_entry.VirtualAddress >= section.header.VirtualAddress and \ data_directory_entry.VirtualAddress < section.header.VirtualAddress + section.header.SizeOfRawData : return section
python
def _getSectionForDataDirectoryEntry(self, data_directory_entry, sections): """Returns the section which contains the data of DataDirectory""" for section in sections: if data_directory_entry.VirtualAddress >= section.header.VirtualAddress and \ data_directory_entry.VirtualAddress < section.header.VirtualAddress + section.header.SizeOfRawData : return section
[ "def", "_getSectionForDataDirectoryEntry", "(", "self", ",", "data_directory_entry", ",", "sections", ")", ":", "for", "section", "in", "sections", ":", "if", "data_directory_entry", ".", "VirtualAddress", ">=", "section", ".", "header", ".", "VirtualAddress", "and", "data_directory_entry", ".", "VirtualAddress", "<", "section", ".", "header", ".", "VirtualAddress", "+", "section", ".", "header", ".", "SizeOfRawData", ":", "return", "section" ]
Returns the section which contains the data of DataDirectory
[ "Returns", "the", "section", "which", "contains", "the", "data", "of", "DataDirectory" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L541-L547
sashs/filebytes
filebytes/pe.py
PE._parseDataDirectory
def _parseDataDirectory(self, data, sections, imageNtHeaders): """Parses the entries of the DataDirectory and returns a list of the content""" data_directory_data_list = [None for i in range(15)] # parse DataDirectory[Export] export_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.EXPORT] export_section = self._getSectionForDataDirectoryEntry(export_data_directory, sections) export_data_directory_data = self._parseDataDirectoryExport(data, export_data_directory, export_section) data_directory_data_list[ImageDirectoryEntry.EXPORT] = export_data_directory_data # parse DataDirectory[Import] import_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.IMPORT] import_section = self._getSectionForDataDirectoryEntry(import_data_directory, sections) import_data_directory_data = self._parseDataDirectoryImport(import_data_directory, import_section) data_directory_data_list[ImageDirectoryEntry.IMPORT] = import_data_directory_data # parse DataDirectory[LOAD_CONFIG] loadconfig_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.LOAD_CONFIG] loadconfig_section = self._getSectionForDataDirectoryEntry(loadconfig_data_directory, sections) loadconfig_data = self._parseLoadConfig(loadconfig_data_directory, loadconfig_section) data_directory_data_list[ImageDirectoryEntry.LOAD_CONFIG] = loadconfig_data return data_directory_data_list
python
def _parseDataDirectory(self, data, sections, imageNtHeaders): """Parses the entries of the DataDirectory and returns a list of the content""" data_directory_data_list = [None for i in range(15)] # parse DataDirectory[Export] export_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.EXPORT] export_section = self._getSectionForDataDirectoryEntry(export_data_directory, sections) export_data_directory_data = self._parseDataDirectoryExport(data, export_data_directory, export_section) data_directory_data_list[ImageDirectoryEntry.EXPORT] = export_data_directory_data # parse DataDirectory[Import] import_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.IMPORT] import_section = self._getSectionForDataDirectoryEntry(import_data_directory, sections) import_data_directory_data = self._parseDataDirectoryImport(import_data_directory, import_section) data_directory_data_list[ImageDirectoryEntry.IMPORT] = import_data_directory_data # parse DataDirectory[LOAD_CONFIG] loadconfig_data_directory = imageNtHeaders.header.OptionalHeader.DataDirectory[ImageDirectoryEntry.LOAD_CONFIG] loadconfig_section = self._getSectionForDataDirectoryEntry(loadconfig_data_directory, sections) loadconfig_data = self._parseLoadConfig(loadconfig_data_directory, loadconfig_section) data_directory_data_list[ImageDirectoryEntry.LOAD_CONFIG] = loadconfig_data return data_directory_data_list
[ "def", "_parseDataDirectory", "(", "self", ",", "data", ",", "sections", ",", "imageNtHeaders", ")", ":", "data_directory_data_list", "=", "[", "None", "for", "i", "in", "range", "(", "15", ")", "]", "# parse DataDirectory[Export]", "export_data_directory", "=", "imageNtHeaders", ".", "header", ".", "OptionalHeader", ".", "DataDirectory", "[", "ImageDirectoryEntry", ".", "EXPORT", "]", "export_section", "=", "self", ".", "_getSectionForDataDirectoryEntry", "(", "export_data_directory", ",", "sections", ")", "export_data_directory_data", "=", "self", ".", "_parseDataDirectoryExport", "(", "data", ",", "export_data_directory", ",", "export_section", ")", "data_directory_data_list", "[", "ImageDirectoryEntry", ".", "EXPORT", "]", "=", "export_data_directory_data", "# parse DataDirectory[Import]", "import_data_directory", "=", "imageNtHeaders", ".", "header", ".", "OptionalHeader", ".", "DataDirectory", "[", "ImageDirectoryEntry", ".", "IMPORT", "]", "import_section", "=", "self", ".", "_getSectionForDataDirectoryEntry", "(", "import_data_directory", ",", "sections", ")", "import_data_directory_data", "=", "self", ".", "_parseDataDirectoryImport", "(", "import_data_directory", ",", "import_section", ")", "data_directory_data_list", "[", "ImageDirectoryEntry", ".", "IMPORT", "]", "=", "import_data_directory_data", "# parse DataDirectory[LOAD_CONFIG]", "loadconfig_data_directory", "=", "imageNtHeaders", ".", "header", ".", "OptionalHeader", ".", "DataDirectory", "[", "ImageDirectoryEntry", ".", "LOAD_CONFIG", "]", "loadconfig_section", "=", "self", ".", "_getSectionForDataDirectoryEntry", "(", "loadconfig_data_directory", ",", "sections", ")", "loadconfig_data", "=", "self", ".", "_parseLoadConfig", "(", "loadconfig_data_directory", ",", "loadconfig_section", ")", "data_directory_data_list", "[", "ImageDirectoryEntry", ".", "LOAD_CONFIG", "]", "=", "loadconfig_data", "return", "data_directory_data_list" ]
Parses the entries of the DataDirectory and returns a list of the content
[ "Parses", "the", "entries", "of", "the", "DataDirectory", "and", "returns", "a", "list", "of", "the", "content" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L549-L571
sashs/filebytes
filebytes/pe.py
PE._parseDataDirectoryExport
def _parseDataDirectoryExport(self, data, dataDirectoryEntry, exportSection): """Parses the EmportDataDirectory and returns an instance of ExportDirectoryData""" if not exportSection: return functions = [] export_directory = IMAGE_EXPORT_DIRECTORY.from_buffer(exportSection.raw, to_offset(dataDirectoryEntry.VirtualAddress, exportSection)) offset = to_offset(export_directory.Name, exportSection) checkOffset(offset, exportSection) name = get_str(exportSection.raw, offset) offsetOfNames = to_offset(export_directory.AddressOfNames, exportSection) offsetOfAddress = to_offset(export_directory.AddressOfFunctions, exportSection) offsetOfNameOrdinals = to_offset(export_directory.AddressOfNameOrdinals, exportSection) for i in range(export_directory.NumberOfNames): name_address = c_uint.from_buffer(exportSection.raw, offsetOfNames).value name_offset = to_offset(name_address, exportSection) checkOffset(name_offset, exportSection) func_name = get_str(exportSection.raw, name_offset) ordinal = c_ushort.from_buffer(exportSection.raw, offsetOfNameOrdinals).value func_addr = c_uint.from_buffer(exportSection.raw, offsetOfAddress).value offsetOfNames += 4 offsetOfAddress += 4 offsetOfNameOrdinals += 2 functions.append(FunctionData(name=func_name, rva=func_addr, ordinal=ordinal)) return ExportDirectoryData(header=export_directory, name=name, functions=functions)
python
def _parseDataDirectoryExport(self, data, dataDirectoryEntry, exportSection): """Parses the EmportDataDirectory and returns an instance of ExportDirectoryData""" if not exportSection: return functions = [] export_directory = IMAGE_EXPORT_DIRECTORY.from_buffer(exportSection.raw, to_offset(dataDirectoryEntry.VirtualAddress, exportSection)) offset = to_offset(export_directory.Name, exportSection) checkOffset(offset, exportSection) name = get_str(exportSection.raw, offset) offsetOfNames = to_offset(export_directory.AddressOfNames, exportSection) offsetOfAddress = to_offset(export_directory.AddressOfFunctions, exportSection) offsetOfNameOrdinals = to_offset(export_directory.AddressOfNameOrdinals, exportSection) for i in range(export_directory.NumberOfNames): name_address = c_uint.from_buffer(exportSection.raw, offsetOfNames).value name_offset = to_offset(name_address, exportSection) checkOffset(name_offset, exportSection) func_name = get_str(exportSection.raw, name_offset) ordinal = c_ushort.from_buffer(exportSection.raw, offsetOfNameOrdinals).value func_addr = c_uint.from_buffer(exportSection.raw, offsetOfAddress).value offsetOfNames += 4 offsetOfAddress += 4 offsetOfNameOrdinals += 2 functions.append(FunctionData(name=func_name, rva=func_addr, ordinal=ordinal)) return ExportDirectoryData(header=export_directory, name=name, functions=functions)
[ "def", "_parseDataDirectoryExport", "(", "self", ",", "data", ",", "dataDirectoryEntry", ",", "exportSection", ")", ":", "if", "not", "exportSection", ":", "return", "functions", "=", "[", "]", "export_directory", "=", "IMAGE_EXPORT_DIRECTORY", ".", "from_buffer", "(", "exportSection", ".", "raw", ",", "to_offset", "(", "dataDirectoryEntry", ".", "VirtualAddress", ",", "exportSection", ")", ")", "offset", "=", "to_offset", "(", "export_directory", ".", "Name", ",", "exportSection", ")", "checkOffset", "(", "offset", ",", "exportSection", ")", "name", "=", "get_str", "(", "exportSection", ".", "raw", ",", "offset", ")", "offsetOfNames", "=", "to_offset", "(", "export_directory", ".", "AddressOfNames", ",", "exportSection", ")", "offsetOfAddress", "=", "to_offset", "(", "export_directory", ".", "AddressOfFunctions", ",", "exportSection", ")", "offsetOfNameOrdinals", "=", "to_offset", "(", "export_directory", ".", "AddressOfNameOrdinals", ",", "exportSection", ")", "for", "i", "in", "range", "(", "export_directory", ".", "NumberOfNames", ")", ":", "name_address", "=", "c_uint", ".", "from_buffer", "(", "exportSection", ".", "raw", ",", "offsetOfNames", ")", ".", "value", "name_offset", "=", "to_offset", "(", "name_address", ",", "exportSection", ")", "checkOffset", "(", "name_offset", ",", "exportSection", ")", "func_name", "=", "get_str", "(", "exportSection", ".", "raw", ",", "name_offset", ")", "ordinal", "=", "c_ushort", ".", "from_buffer", "(", "exportSection", ".", "raw", ",", "offsetOfNameOrdinals", ")", ".", "value", "func_addr", "=", "c_uint", ".", "from_buffer", "(", "exportSection", ".", "raw", ",", "offsetOfAddress", ")", ".", "value", "offsetOfNames", "+=", "4", "offsetOfAddress", "+=", "4", "offsetOfNameOrdinals", "+=", "2", "functions", ".", "append", "(", "FunctionData", "(", "name", "=", "func_name", ",", "rva", "=", "func_addr", ",", "ordinal", "=", "ordinal", ")", ")", "return", "ExportDirectoryData", "(", "header", "=", "export_directory", ",", "name", "=", "name", ",", "functions", "=", "functions", ")" ]
Parses the EmportDataDirectory and returns an instance of ExportDirectoryData
[ "Parses", "the", "EmportDataDirectory", "and", "returns", "an", "instance", "of", "ExportDirectoryData" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L573-L601
sashs/filebytes
filebytes/pe.py
PE._parseDataDirectoryImport
def _parseDataDirectoryImport(self, dataDirectoryEntry, importSection): """Parses the ImportDataDirectory and returns a list of ImportDescriptorData""" if not importSection: return raw_bytes = (c_ubyte * dataDirectoryEntry.Size).from_buffer(importSection.raw, to_offset(dataDirectoryEntry.VirtualAddress, importSection)) offset = 0 import_descriptors = [] while True: import_descriptor = IMAGE_IMPORT_DESCRIPTOR.from_buffer(raw_bytes, offset) if import_descriptor.OriginalFirstThunk == 0: break else: nameOffset = to_offset(import_descriptor.Name, importSection) checkOffset(nameOffset, importSection) dllName = get_str(importSection.raw, nameOffset) import_name_table = self.__parseThunks(import_descriptor.OriginalFirstThunk, importSection) import_address_table = self.__parseThunks(import_descriptor.FirstThunk, importSection) import_descriptors.append(ImportDescriptorData(header=import_descriptor, dllName=dllName, importNameTable=import_name_table, importAddressTable=import_address_table)) offset += sizeof(IMAGE_IMPORT_DESCRIPTOR) return import_descriptors
python
def _parseDataDirectoryImport(self, dataDirectoryEntry, importSection): """Parses the ImportDataDirectory and returns a list of ImportDescriptorData""" if not importSection: return raw_bytes = (c_ubyte * dataDirectoryEntry.Size).from_buffer(importSection.raw, to_offset(dataDirectoryEntry.VirtualAddress, importSection)) offset = 0 import_descriptors = [] while True: import_descriptor = IMAGE_IMPORT_DESCRIPTOR.from_buffer(raw_bytes, offset) if import_descriptor.OriginalFirstThunk == 0: break else: nameOffset = to_offset(import_descriptor.Name, importSection) checkOffset(nameOffset, importSection) dllName = get_str(importSection.raw, nameOffset) import_name_table = self.__parseThunks(import_descriptor.OriginalFirstThunk, importSection) import_address_table = self.__parseThunks(import_descriptor.FirstThunk, importSection) import_descriptors.append(ImportDescriptorData(header=import_descriptor, dllName=dllName, importNameTable=import_name_table, importAddressTable=import_address_table)) offset += sizeof(IMAGE_IMPORT_DESCRIPTOR) return import_descriptors
[ "def", "_parseDataDirectoryImport", "(", "self", ",", "dataDirectoryEntry", ",", "importSection", ")", ":", "if", "not", "importSection", ":", "return", "raw_bytes", "=", "(", "c_ubyte", "*", "dataDirectoryEntry", ".", "Size", ")", ".", "from_buffer", "(", "importSection", ".", "raw", ",", "to_offset", "(", "dataDirectoryEntry", ".", "VirtualAddress", ",", "importSection", ")", ")", "offset", "=", "0", "import_descriptors", "=", "[", "]", "while", "True", ":", "import_descriptor", "=", "IMAGE_IMPORT_DESCRIPTOR", ".", "from_buffer", "(", "raw_bytes", ",", "offset", ")", "if", "import_descriptor", ".", "OriginalFirstThunk", "==", "0", ":", "break", "else", ":", "nameOffset", "=", "to_offset", "(", "import_descriptor", ".", "Name", ",", "importSection", ")", "checkOffset", "(", "nameOffset", ",", "importSection", ")", "dllName", "=", "get_str", "(", "importSection", ".", "raw", ",", "nameOffset", ")", "import_name_table", "=", "self", ".", "__parseThunks", "(", "import_descriptor", ".", "OriginalFirstThunk", ",", "importSection", ")", "import_address_table", "=", "self", ".", "__parseThunks", "(", "import_descriptor", ".", "FirstThunk", ",", "importSection", ")", "import_descriptors", ".", "append", "(", "ImportDescriptorData", "(", "header", "=", "import_descriptor", ",", "dllName", "=", "dllName", ",", "importNameTable", "=", "import_name_table", ",", "importAddressTable", "=", "import_address_table", ")", ")", "offset", "+=", "sizeof", "(", "IMAGE_IMPORT_DESCRIPTOR", ")", "return", "import_descriptors" ]
Parses the ImportDataDirectory and returns a list of ImportDescriptorData
[ "Parses", "the", "ImportDataDirectory", "and", "returns", "a", "list", "of", "ImportDescriptorData" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L603-L629
sashs/filebytes
filebytes/pe.py
PE.__parseThunks
def __parseThunks(self, thunkRVA, importSection): """Parses the thunks and returns a list""" offset = to_offset(thunkRVA, importSection) table_offset = 0 thunks = [] while True: thunk = IMAGE_THUNK_DATA.from_buffer(importSection.raw, offset) offset += sizeof(IMAGE_THUNK_DATA) if thunk.Ordinal == 0: break thunkData = ThunkData(header=thunk, rva=table_offset+thunkRVA,ordinal=None, importByName=None) if to_offset(thunk.AddressOfData, importSection) > 0 and to_offset(thunk.AddressOfData, importSection) < len(self._bytes): self.__parseThunkData(thunkData, importSection) thunks.append(thunkData) table_offset += 4 return thunks
python
def __parseThunks(self, thunkRVA, importSection): """Parses the thunks and returns a list""" offset = to_offset(thunkRVA, importSection) table_offset = 0 thunks = [] while True: thunk = IMAGE_THUNK_DATA.from_buffer(importSection.raw, offset) offset += sizeof(IMAGE_THUNK_DATA) if thunk.Ordinal == 0: break thunkData = ThunkData(header=thunk, rva=table_offset+thunkRVA,ordinal=None, importByName=None) if to_offset(thunk.AddressOfData, importSection) > 0 and to_offset(thunk.AddressOfData, importSection) < len(self._bytes): self.__parseThunkData(thunkData, importSection) thunks.append(thunkData) table_offset += 4 return thunks
[ "def", "__parseThunks", "(", "self", ",", "thunkRVA", ",", "importSection", ")", ":", "offset", "=", "to_offset", "(", "thunkRVA", ",", "importSection", ")", "table_offset", "=", "0", "thunks", "=", "[", "]", "while", "True", ":", "thunk", "=", "IMAGE_THUNK_DATA", ".", "from_buffer", "(", "importSection", ".", "raw", ",", "offset", ")", "offset", "+=", "sizeof", "(", "IMAGE_THUNK_DATA", ")", "if", "thunk", ".", "Ordinal", "==", "0", ":", "break", "thunkData", "=", "ThunkData", "(", "header", "=", "thunk", ",", "rva", "=", "table_offset", "+", "thunkRVA", ",", "ordinal", "=", "None", ",", "importByName", "=", "None", ")", "if", "to_offset", "(", "thunk", ".", "AddressOfData", ",", "importSection", ")", ">", "0", "and", "to_offset", "(", "thunk", ".", "AddressOfData", ",", "importSection", ")", "<", "len", "(", "self", ".", "_bytes", ")", ":", "self", ".", "__parseThunkData", "(", "thunkData", ",", "importSection", ")", "thunks", ".", "append", "(", "thunkData", ")", "table_offset", "+=", "4", "return", "thunks" ]
Parses the thunks and returns a list
[ "Parses", "the", "thunks", "and", "returns", "a", "list" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L672-L687
sashs/filebytes
filebytes/pe.py
PE.__parseThunkData
def __parseThunkData(self, thunk,importSection): """Parses the data of a thunk and sets the data""" offset = to_offset(thunk.header.AddressOfData, importSection) if 0xf0000000 & thunk.header.AddressOfData == 0x80000000: thunk.ordinal = thunk.header.AddressOfData & 0x0fffffff else: ibn = IMAGE_IMPORT_BY_NAME.from_buffer(importSection.raw, offset) checkOffset(offset+2, importSection) name = get_str(importSection.raw, offset+2) thunk.importByName = ImportByNameData(header=ibn, hint=ibn.Hint, name=name)
python
def __parseThunkData(self, thunk,importSection): """Parses the data of a thunk and sets the data""" offset = to_offset(thunk.header.AddressOfData, importSection) if 0xf0000000 & thunk.header.AddressOfData == 0x80000000: thunk.ordinal = thunk.header.AddressOfData & 0x0fffffff else: ibn = IMAGE_IMPORT_BY_NAME.from_buffer(importSection.raw, offset) checkOffset(offset+2, importSection) name = get_str(importSection.raw, offset+2) thunk.importByName = ImportByNameData(header=ibn, hint=ibn.Hint, name=name)
[ "def", "__parseThunkData", "(", "self", ",", "thunk", ",", "importSection", ")", ":", "offset", "=", "to_offset", "(", "thunk", ".", "header", ".", "AddressOfData", ",", "importSection", ")", "if", "0xf0000000", "&", "thunk", ".", "header", ".", "AddressOfData", "==", "0x80000000", ":", "thunk", ".", "ordinal", "=", "thunk", ".", "header", ".", "AddressOfData", "&", "0x0fffffff", "else", ":", "ibn", "=", "IMAGE_IMPORT_BY_NAME", ".", "from_buffer", "(", "importSection", ".", "raw", ",", "offset", ")", "checkOffset", "(", "offset", "+", "2", ",", "importSection", ")", "name", "=", "get_str", "(", "importSection", ".", "raw", ",", "offset", "+", "2", ")", "thunk", ".", "importByName", "=", "ImportByNameData", "(", "header", "=", "ibn", ",", "hint", "=", "ibn", ".", "Hint", ",", "name", "=", "name", ")" ]
Parses the data of a thunk and sets the data
[ "Parses", "the", "data", "of", "a", "thunk", "and", "sets", "the", "data" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/pe.py#L689-L699
sashs/filebytes
filebytes/ctypes_helper.py
get_ptr
def get_ptr(data, offset=None, ptr_type=ctypes.c_void_p): """Returns a void pointer to the data""" ptr = ctypes.cast(ctypes.pointer(data), ctypes.c_void_p) if offset: ptr = ctypes.c_void_p(ptr.value + offset) if ptr_type != ctypes.c_void_p: ptr = ctypes.cast(ptr, ptr_type) return ptr
python
def get_ptr(data, offset=None, ptr_type=ctypes.c_void_p): """Returns a void pointer to the data""" ptr = ctypes.cast(ctypes.pointer(data), ctypes.c_void_p) if offset: ptr = ctypes.c_void_p(ptr.value + offset) if ptr_type != ctypes.c_void_p: ptr = ctypes.cast(ptr, ptr_type) return ptr
[ "def", "get_ptr", "(", "data", ",", "offset", "=", "None", ",", "ptr_type", "=", "ctypes", ".", "c_void_p", ")", ":", "ptr", "=", "ctypes", ".", "cast", "(", "ctypes", ".", "pointer", "(", "data", ")", ",", "ctypes", ".", "c_void_p", ")", "if", "offset", ":", "ptr", "=", "ctypes", ".", "c_void_p", "(", "ptr", ".", "value", "+", "offset", ")", "if", "ptr_type", "!=", "ctypes", ".", "c_void_p", ":", "ptr", "=", "ctypes", ".", "cast", "(", "ptr", ",", "ptr_type", ")", "return", "ptr" ]
Returns a void pointer to the data
[ "Returns", "a", "void", "pointer", "to", "the", "data" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/ctypes_helper.py#L33-L43
sashs/filebytes
filebytes/ctypes_helper.py
to_ubyte_array
def to_ubyte_array(barray): """Returns a c_ubyte_array filled with the given data of a bytearray or bytes""" bs = (ctypes.c_ubyte * len(barray))() pack_into('%ds' % len(barray), bs, 0, barray) return bs
python
def to_ubyte_array(barray): """Returns a c_ubyte_array filled with the given data of a bytearray or bytes""" bs = (ctypes.c_ubyte * len(barray))() pack_into('%ds' % len(barray), bs, 0, barray) return bs
[ "def", "to_ubyte_array", "(", "barray", ")", ":", "bs", "=", "(", "ctypes", ".", "c_ubyte", "*", "len", "(", "barray", ")", ")", "(", ")", "pack_into", "(", "'%ds'", "%", "len", "(", "barray", ")", ",", "bs", ",", "0", ",", "barray", ")", "return", "bs" ]
Returns a c_ubyte_array filled with the given data of a bytearray or bytes
[ "Returns", "a", "c_ubyte_array", "filled", "with", "the", "given", "data", "of", "a", "bytearray", "or", "bytes" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/ctypes_helper.py#L48-L53
sashs/filebytes
filebytes/binary.py
Binary._readFile
def _readFile(self, fileName): """ Returns the bytes of the file. """ with open(fileName, 'rb') as binFile: b = binFile.read() return to_ubyte_array(b)
python
def _readFile(self, fileName): """ Returns the bytes of the file. """ with open(fileName, 'rb') as binFile: b = binFile.read() return to_ubyte_array(b)
[ "def", "_readFile", "(", "self", ",", "fileName", ")", ":", "with", "open", "(", "fileName", ",", "'rb'", ")", "as", "binFile", ":", "b", "=", "binFile", ".", "read", "(", ")", "return", "to_ubyte_array", "(", "b", ")" ]
Returns the bytes of the file.
[ "Returns", "the", "bytes", "of", "the", "file", "." ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/binary.py#L71-L77
sashs/filebytes
filebytes/elf.py
ELF._getSuitableClasses
def _getSuitableClasses(self, data): """Returns the class which holds the suitable classes for the loaded file""" classes = None if data[EI.CLASS] == ELFCLASS.BITS_32: if data[EI.DATA] == ELFDATA.LSB: classes = LSB_32 elif data[EI.DATA] == ELFDATA.MSB: classes = MSB_32 elif data[EI.CLASS] == ELFCLASS.BITS_64: if data[EI.DATA] == ELFDATA.LSB: classes = LSB_64 elif data[EI.DATA] == ELFDATA.MSB: classes = MSB_64 return classes
python
def _getSuitableClasses(self, data): """Returns the class which holds the suitable classes for the loaded file""" classes = None if data[EI.CLASS] == ELFCLASS.BITS_32: if data[EI.DATA] == ELFDATA.LSB: classes = LSB_32 elif data[EI.DATA] == ELFDATA.MSB: classes = MSB_32 elif data[EI.CLASS] == ELFCLASS.BITS_64: if data[EI.DATA] == ELFDATA.LSB: classes = LSB_64 elif data[EI.DATA] == ELFDATA.MSB: classes = MSB_64 return classes
[ "def", "_getSuitableClasses", "(", "self", ",", "data", ")", ":", "classes", "=", "None", "if", "data", "[", "EI", ".", "CLASS", "]", "==", "ELFCLASS", ".", "BITS_32", ":", "if", "data", "[", "EI", ".", "DATA", "]", "==", "ELFDATA", ".", "LSB", ":", "classes", "=", "LSB_32", "elif", "data", "[", "EI", ".", "DATA", "]", "==", "ELFDATA", ".", "MSB", ":", "classes", "=", "MSB_32", "elif", "data", "[", "EI", ".", "CLASS", "]", "==", "ELFCLASS", ".", "BITS_64", ":", "if", "data", "[", "EI", ".", "DATA", "]", "==", "ELFDATA", ".", "LSB", ":", "classes", "=", "LSB_64", "elif", "data", "[", "EI", ".", "DATA", "]", "==", "ELFDATA", ".", "MSB", ":", "classes", "=", "MSB_64", "return", "classes" ]
Returns the class which holds the suitable classes for the loaded file
[ "Returns", "the", "class", "which", "holds", "the", "suitable", "classes", "for", "the", "loaded", "file" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L881-L896
sashs/filebytes
filebytes/elf.py
ELF._parseElfHeader
def _parseElfHeader(self, data): """Returns the elf header""" ehdr = self.__classes.EHDR.from_buffer(data) return EhdrData(header=ehdr)
python
def _parseElfHeader(self, data): """Returns the elf header""" ehdr = self.__classes.EHDR.from_buffer(data) return EhdrData(header=ehdr)
[ "def", "_parseElfHeader", "(", "self", ",", "data", ")", ":", "ehdr", "=", "self", ".", "__classes", ".", "EHDR", ".", "from_buffer", "(", "data", ")", "return", "EhdrData", "(", "header", "=", "ehdr", ")" ]
Returns the elf header
[ "Returns", "the", "elf", "header" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L898-L901
sashs/filebytes
filebytes/elf.py
ELF._parseSegments
def _parseSegments(self, data, elfHeader): """Return a list of segments""" offset = elfHeader.header.e_phoff segments = [] for i in range(elfHeader.header.e_phnum): phdr = self.__classes.PHDR.from_buffer(data, offset) segment_bytes = (c_ubyte * phdr.p_filesz).from_buffer(data, phdr.p_offset) phdrData = PhdrData(header=phdr, raw=segment_bytes, bytes=bytearray(segment_bytes), type=PT[phdr.p_type], vaddr=phdr.p_vaddr, offset=phdr.p_offset) segments.append(phdrData) offset += elfHeader.header.e_phentsize return segments
python
def _parseSegments(self, data, elfHeader): """Return a list of segments""" offset = elfHeader.header.e_phoff segments = [] for i in range(elfHeader.header.e_phnum): phdr = self.__classes.PHDR.from_buffer(data, offset) segment_bytes = (c_ubyte * phdr.p_filesz).from_buffer(data, phdr.p_offset) phdrData = PhdrData(header=phdr, raw=segment_bytes, bytes=bytearray(segment_bytes), type=PT[phdr.p_type], vaddr=phdr.p_vaddr, offset=phdr.p_offset) segments.append(phdrData) offset += elfHeader.header.e_phentsize return segments
[ "def", "_parseSegments", "(", "self", ",", "data", ",", "elfHeader", ")", ":", "offset", "=", "elfHeader", ".", "header", ".", "e_phoff", "segments", "=", "[", "]", "for", "i", "in", "range", "(", "elfHeader", ".", "header", ".", "e_phnum", ")", ":", "phdr", "=", "self", ".", "__classes", ".", "PHDR", ".", "from_buffer", "(", "data", ",", "offset", ")", "segment_bytes", "=", "(", "c_ubyte", "*", "phdr", ".", "p_filesz", ")", ".", "from_buffer", "(", "data", ",", "phdr", ".", "p_offset", ")", "phdrData", "=", "PhdrData", "(", "header", "=", "phdr", ",", "raw", "=", "segment_bytes", ",", "bytes", "=", "bytearray", "(", "segment_bytes", ")", ",", "type", "=", "PT", "[", "phdr", ".", "p_type", "]", ",", "vaddr", "=", "phdr", ".", "p_vaddr", ",", "offset", "=", "phdr", ".", "p_offset", ")", "segments", ".", "append", "(", "phdrData", ")", "offset", "+=", "elfHeader", ".", "header", ".", "e_phentsize", "return", "segments" ]
Return a list of segments
[ "Return", "a", "list", "of", "segments" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L903-L916
sashs/filebytes
filebytes/elf.py
ELF._parseSections
def _parseSections(self, data, elfHeader): """Returns a list of sections""" offset = elfHeader.header.e_shoff shdrs = [] for i in range(elfHeader.header.e_shnum): shdr = self.__classes.SHDR.from_buffer(data, offset) section_bytes = None ba_section_bytes = None if shdr.sh_type != SHT.NOBITS: section_bytes = (c_ubyte * shdr.sh_size).from_buffer(data, shdr.sh_offset) ba_section_bytes = bytearray(section_bytes) shdrs.append(ShdrData(name=None,header=shdr, raw=section_bytes, bytes=ba_section_bytes)) offset += elfHeader.header.e_shentsize if elfHeader.header.e_shstrndx != SHN.UNDEF: strtab = shdrs[elfHeader.header.e_shstrndx] strtab_offset = strtab.header.sh_offset for section in shdrs: section.name = get_str(strtab.raw, section.header.sh_name) return shdrs
python
def _parseSections(self, data, elfHeader): """Returns a list of sections""" offset = elfHeader.header.e_shoff shdrs = [] for i in range(elfHeader.header.e_shnum): shdr = self.__classes.SHDR.from_buffer(data, offset) section_bytes = None ba_section_bytes = None if shdr.sh_type != SHT.NOBITS: section_bytes = (c_ubyte * shdr.sh_size).from_buffer(data, shdr.sh_offset) ba_section_bytes = bytearray(section_bytes) shdrs.append(ShdrData(name=None,header=shdr, raw=section_bytes, bytes=ba_section_bytes)) offset += elfHeader.header.e_shentsize if elfHeader.header.e_shstrndx != SHN.UNDEF: strtab = shdrs[elfHeader.header.e_shstrndx] strtab_offset = strtab.header.sh_offset for section in shdrs: section.name = get_str(strtab.raw, section.header.sh_name) return shdrs
[ "def", "_parseSections", "(", "self", ",", "data", ",", "elfHeader", ")", ":", "offset", "=", "elfHeader", ".", "header", ".", "e_shoff", "shdrs", "=", "[", "]", "for", "i", "in", "range", "(", "elfHeader", ".", "header", ".", "e_shnum", ")", ":", "shdr", "=", "self", ".", "__classes", ".", "SHDR", ".", "from_buffer", "(", "data", ",", "offset", ")", "section_bytes", "=", "None", "ba_section_bytes", "=", "None", "if", "shdr", ".", "sh_type", "!=", "SHT", ".", "NOBITS", ":", "section_bytes", "=", "(", "c_ubyte", "*", "shdr", ".", "sh_size", ")", ".", "from_buffer", "(", "data", ",", "shdr", ".", "sh_offset", ")", "ba_section_bytes", "=", "bytearray", "(", "section_bytes", ")", "shdrs", ".", "append", "(", "ShdrData", "(", "name", "=", "None", ",", "header", "=", "shdr", ",", "raw", "=", "section_bytes", ",", "bytes", "=", "ba_section_bytes", ")", ")", "offset", "+=", "elfHeader", ".", "header", ".", "e_shentsize", "if", "elfHeader", ".", "header", ".", "e_shstrndx", "!=", "SHN", ".", "UNDEF", ":", "strtab", "=", "shdrs", "[", "elfHeader", ".", "header", ".", "e_shstrndx", "]", "strtab_offset", "=", "strtab", ".", "header", ".", "sh_offset", "for", "section", "in", "shdrs", ":", "section", ".", "name", "=", "get_str", "(", "strtab", ".", "raw", ",", "section", ".", "header", ".", "sh_name", ")", "return", "shdrs" ]
Returns a list of sections
[ "Returns", "a", "list", "of", "sections" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L918-L939
sashs/filebytes
filebytes/elf.py
ELF._parseSymbols
def _parseSymbols(self, sections): """Sets a list of symbols in each DYNSYM and SYMTAB section""" for section in sections: strtab = sections[section.header.sh_link] if section.header.sh_type in (int(SHT.DYNSYM), int(SHT.SYMTAB)): section.symbols = self.__parseSymbolEntriesForSection(section, strtab)
python
def _parseSymbols(self, sections): """Sets a list of symbols in each DYNSYM and SYMTAB section""" for section in sections: strtab = sections[section.header.sh_link] if section.header.sh_type in (int(SHT.DYNSYM), int(SHT.SYMTAB)): section.symbols = self.__parseSymbolEntriesForSection(section, strtab)
[ "def", "_parseSymbols", "(", "self", ",", "sections", ")", ":", "for", "section", "in", "sections", ":", "strtab", "=", "sections", "[", "section", ".", "header", ".", "sh_link", "]", "if", "section", ".", "header", ".", "sh_type", "in", "(", "int", "(", "SHT", ".", "DYNSYM", ")", ",", "int", "(", "SHT", ".", "SYMTAB", ")", ")", ":", "section", ".", "symbols", "=", "self", ".", "__parseSymbolEntriesForSection", "(", "section", ",", "strtab", ")" ]
Sets a list of symbols in each DYNSYM and SYMTAB section
[ "Sets", "a", "list", "of", "symbols", "in", "each", "DYNSYM", "and", "SYMTAB", "section" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L941-L946
sashs/filebytes
filebytes/elf.py
ELF._parseRelocations
def _parseRelocations(self, sections): """Parses the relocations and add those to the section""" for section in sections: if section.header.sh_link != SHN.UNDEF and section.header.sh_type in (SHT.REL, SHT.RELA): symbols = sections[section.header.sh_link].symbols relocations = self.__parseRelocationEntries(section, symbols) section.relocations = relocations
python
def _parseRelocations(self, sections): """Parses the relocations and add those to the section""" for section in sections: if section.header.sh_link != SHN.UNDEF and section.header.sh_type in (SHT.REL, SHT.RELA): symbols = sections[section.header.sh_link].symbols relocations = self.__parseRelocationEntries(section, symbols) section.relocations = relocations
[ "def", "_parseRelocations", "(", "self", ",", "sections", ")", ":", "for", "section", "in", "sections", ":", "if", "section", ".", "header", ".", "sh_link", "!=", "SHN", ".", "UNDEF", "and", "section", ".", "header", ".", "sh_type", "in", "(", "SHT", ".", "REL", ",", "SHT", ".", "RELA", ")", ":", "symbols", "=", "sections", "[", "section", ".", "header", ".", "sh_link", "]", ".", "symbols", "relocations", "=", "self", ".", "__parseRelocationEntries", "(", "section", ",", "symbols", ")", "section", ".", "relocations", "=", "relocations" ]
Parses the relocations and add those to the section
[ "Parses", "the", "relocations", "and", "add", "those", "to", "the", "section" ]
train
https://github.com/sashs/filebytes/blob/41ee009832aba19603f33d1fd3483b84d6684ebf/filebytes/elf.py#L965-L971
pyqg/pyqg
pyqg/model.py
run_with_snapshots
def run_with_snapshots(self, tsnapstart=0., tsnapint=432000.): """Run the model forward, yielding to user code at specified intervals. Parameters ---------- tsnapstart : int The timestep at which to begin yielding. tstapint : int The interval at which to yield. """ tsnapints = np.ceil(tsnapint/self.dt) while(self.t < self.tmax): self._step_forward() if self.t>=tsnapstart and (self.tc%tsnapints)==0: yield self.t return
python
def run_with_snapshots(self, tsnapstart=0., tsnapint=432000.): """Run the model forward, yielding to user code at specified intervals. Parameters ---------- tsnapstart : int The timestep at which to begin yielding. tstapint : int The interval at which to yield. """ tsnapints = np.ceil(tsnapint/self.dt) while(self.t < self.tmax): self._step_forward() if self.t>=tsnapstart and (self.tc%tsnapints)==0: yield self.t return
[ "def", "run_with_snapshots", "(", "self", ",", "tsnapstart", "=", "0.", ",", "tsnapint", "=", "432000.", ")", ":", "tsnapints", "=", "np", ".", "ceil", "(", "tsnapint", "/", "self", ".", "dt", ")", "while", "(", "self", ".", "t", "<", "self", ".", "tmax", ")", ":", "self", ".", "_step_forward", "(", ")", "if", "self", ".", "t", ">=", "tsnapstart", "and", "(", "self", ".", "tc", "%", "tsnapints", ")", "==", "0", ":", "yield", "self", ".", "t", "return" ]
Run the model forward, yielding to user code at specified intervals. Parameters ---------- tsnapstart : int The timestep at which to begin yielding. tstapint : int The interval at which to yield.
[ "Run", "the", "model", "forward", "yielding", "to", "user", "code", "at", "specified", "intervals", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/model.py#L210-L228
pyqg/pyqg
pyqg/model.py
vertical_modes
def vertical_modes(self): """ Calculate standard vertical modes. Simply the eigenvectors of the stretching matrix S """ evals,evecs = np.linalg.eig(-self.S) asort = evals.argsort() # deformation wavenumbers and radii self.kdi2 = evals[asort] self.radii = np.zeros_like(self.kdi2) self.radii[0] = self.g*self.H/np.abs(self.f) # barotropic def. radius self.radii[1:] = 1./np.sqrt(self.kdi2[1:]) # eigenstructure self.pmodes = evecs[:,asort] # normalize to have unit L2-norm Ai = (self.H / (self.Hi[:,np.newaxis]*(self.pmodes**2)).sum(axis=0))**0.5 self.pmodes = Ai[np.newaxis,:]*self.pmodes
python
def vertical_modes(self): """ Calculate standard vertical modes. Simply the eigenvectors of the stretching matrix S """ evals,evecs = np.linalg.eig(-self.S) asort = evals.argsort() # deformation wavenumbers and radii self.kdi2 = evals[asort] self.radii = np.zeros_like(self.kdi2) self.radii[0] = self.g*self.H/np.abs(self.f) # barotropic def. radius self.radii[1:] = 1./np.sqrt(self.kdi2[1:]) # eigenstructure self.pmodes = evecs[:,asort] # normalize to have unit L2-norm Ai = (self.H / (self.Hi[:,np.newaxis]*(self.pmodes**2)).sum(axis=0))**0.5 self.pmodes = Ai[np.newaxis,:]*self.pmodes
[ "def", "vertical_modes", "(", "self", ")", ":", "evals", ",", "evecs", "=", "np", ".", "linalg", ".", "eig", "(", "-", "self", ".", "S", ")", "asort", "=", "evals", ".", "argsort", "(", ")", "# deformation wavenumbers and radii", "self", ".", "kdi2", "=", "evals", "[", "asort", "]", "self", ".", "radii", "=", "np", ".", "zeros_like", "(", "self", ".", "kdi2", ")", "self", ".", "radii", "[", "0", "]", "=", "self", ".", "g", "*", "self", ".", "H", "/", "np", ".", "abs", "(", "self", ".", "f", ")", "# barotropic def. radius", "self", ".", "radii", "[", "1", ":", "]", "=", "1.", "/", "np", ".", "sqrt", "(", "self", ".", "kdi2", "[", "1", ":", "]", ")", "# eigenstructure", "self", ".", "pmodes", "=", "evecs", "[", ":", ",", "asort", "]", "# normalize to have unit L2-norm", "Ai", "=", "(", "self", ".", "H", "/", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", "]", "*", "(", "self", ".", "pmodes", "**", "2", ")", ")", ".", "sum", "(", "axis", "=", "0", ")", ")", "**", "0.5", "self", ".", "pmodes", "=", "Ai", "[", "np", ".", "newaxis", ",", ":", "]", "*", "self", ".", "pmodes" ]
Calculate standard vertical modes. Simply the eigenvectors of the stretching matrix S
[ "Calculate", "standard", "vertical", "modes", ".", "Simply", "the", "eigenvectors", "of", "the", "stretching", "matrix", "S" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/model.py#L236-L255
pyqg/pyqg
pyqg/model.py
modal_projection
def modal_projection(self,p,forward=True): """ Performs a field p into modal amplitudes pn using the basis [pmodes]. The inverse transform calculates p from pn""" if forward: pt = np.linalg.solve(self.pmodes[np.newaxis,np.newaxis],p.T).T else: pt = np.einsum("ik,k...->i...",self.pmodes,p) return pt
python
def modal_projection(self,p,forward=True): """ Performs a field p into modal amplitudes pn using the basis [pmodes]. The inverse transform calculates p from pn""" if forward: pt = np.linalg.solve(self.pmodes[np.newaxis,np.newaxis],p.T).T else: pt = np.einsum("ik,k...->i...",self.pmodes,p) return pt
[ "def", "modal_projection", "(", "self", ",", "p", ",", "forward", "=", "True", ")", ":", "if", "forward", ":", "pt", "=", "np", ".", "linalg", ".", "solve", "(", "self", ".", "pmodes", "[", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", ",", "p", ".", "T", ")", ".", "T", "else", ":", "pt", "=", "np", ".", "einsum", "(", "\"ik,k...->i...\"", ",", "self", ".", "pmodes", ",", "p", ")", "return", "pt" ]
Performs a field p into modal amplitudes pn using the basis [pmodes]. The inverse transform calculates p from pn
[ "Performs", "a", "field", "p", "into", "modal", "amplitudes", "pn", "using", "the", "basis", "[", "pmodes", "]", ".", "The", "inverse", "transform", "calculates", "p", "from", "pn" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/model.py#L257-L267
pyqg/pyqg
pyqg/sqg_model.py
SQGModel._initialize_background
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # background vel. if len(np.shape(self.U)) == 0: self.U = (self.U * np.ones((self.ny))) print(np.shape(self.U)) self.set_U(self.U) # the meridional PV gradients in each layer self.Qy = (self.beta + np.gradient(np.gradient(self.U, self.dy), self.dy))[np.newaxis,...] # complex versions, multiplied by k, speeds up computations to pre-compute self.ikQy = np.expand_dims(self.Qy, axis=2) * 1j * self.k self.ilQx = 0.
python
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # background vel. if len(np.shape(self.U)) == 0: self.U = (self.U * np.ones((self.ny))) print(np.shape(self.U)) self.set_U(self.U) # the meridional PV gradients in each layer self.Qy = (self.beta + np.gradient(np.gradient(self.U, self.dy), self.dy))[np.newaxis,...] # complex versions, multiplied by k, speeds up computations to pre-compute self.ikQy = np.expand_dims(self.Qy, axis=2) * 1j * self.k self.ilQx = 0.
[ "def", "_initialize_background", "(", "self", ")", ":", "# background vel.", "if", "len", "(", "np", ".", "shape", "(", "self", ".", "U", ")", ")", "==", "0", ":", "self", ".", "U", "=", "(", "self", ".", "U", "*", "np", ".", "ones", "(", "(", "self", ".", "ny", ")", ")", ")", "print", "(", "np", ".", "shape", "(", "self", ".", "U", ")", ")", "self", ".", "set_U", "(", "self", ".", "U", ")", "# the meridional PV gradients in each layer", "self", ".", "Qy", "=", "(", "self", ".", "beta", "+", "np", ".", "gradient", "(", "np", ".", "gradient", "(", "self", ".", "U", ",", "self", ".", "dy", ")", ",", "self", ".", "dy", ")", ")", "[", "np", ".", "newaxis", ",", "...", "]", "# complex versions, multiplied by k, speeds up computations to pre-compute", "self", ".", "ikQy", "=", "np", ".", "expand_dims", "(", "self", ".", "Qy", ",", "axis", "=", "2", ")", "*", "1j", "*", "self", ".", "k", "self", ".", "ilQx", "=", "0." ]
Set up background state (zonal flow and PV gradients).
[ "Set", "up", "background", "state", "(", "zonal", "flow", "and", "PV", "gradients", ")", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/sqg_model.py#L51-L67
pyqg/pyqg
pyqg/sqg_model.py
SQGModel._initialize_inversion_matrix
def _initialize_inversion_matrix(self): """ the inversion """ # The sqg model is diagonal. The inversion is simply qh = -kappa**2 ph self.a = np.asarray(self.Nb*np.sqrt(self.wv2i))[np.newaxis, np.newaxis, :, :]
python
def _initialize_inversion_matrix(self): """ the inversion """ # The sqg model is diagonal. The inversion is simply qh = -kappa**2 ph self.a = np.asarray(self.Nb*np.sqrt(self.wv2i))[np.newaxis, np.newaxis, :, :]
[ "def", "_initialize_inversion_matrix", "(", "self", ")", ":", "# The sqg model is diagonal. The inversion is simply qh = -kappa**2 ph", "self", ".", "a", "=", "np", ".", "asarray", "(", "self", ".", "Nb", "*", "np", ".", "sqrt", "(", "self", ".", "wv2i", ")", ")", "[", "np", ".", "newaxis", ",", "np", ".", "newaxis", ",", ":", ",", ":", "]" ]
the inversion
[ "the", "inversion" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/sqg_model.py#L69-L72
pyqg/pyqg
pyqg/sqg_model.py
SQGModel.set_U
def set_U(self, U): """Set background zonal flow""" self.Ubg = np.asarray(U)[np.newaxis,...]
python
def set_U(self, U): """Set background zonal flow""" self.Ubg = np.asarray(U)[np.newaxis,...]
[ "def", "set_U", "(", "self", ",", "U", ")", ":", "self", ".", "Ubg", "=", "np", ".", "asarray", "(", "U", ")", "[", "np", ".", "newaxis", ",", "...", "]" ]
Set background zonal flow
[ "Set", "background", "zonal", "flow" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/sqg_model.py#L77-L79
pyqg/pyqg
pyqg/particles.py
LagrangianParticleArray2D.step_forward_with_function
def step_forward_with_function(self, uv0fun, uv1fun, dt): """Advance particles using a function to determine u and v. Parameters ---------- uv0fun : function Called like ``uv0fun(x,y)``. Should return the velocity field u, v at time t. uv1fun(x,y) : function Called like ``uv1fun(x,y)``. Should return the velocity field u, v at time t + dt. dt : number Timestep.""" dx, dy = self._rk4_integrate(self.x, self.y, uv0fun, uv1fun, dt) self.x = self._wrap_x(self.x + dx) self.y = self._wrap_y(self.y + dy)
python
def step_forward_with_function(self, uv0fun, uv1fun, dt): """Advance particles using a function to determine u and v. Parameters ---------- uv0fun : function Called like ``uv0fun(x,y)``. Should return the velocity field u, v at time t. uv1fun(x,y) : function Called like ``uv1fun(x,y)``. Should return the velocity field u, v at time t + dt. dt : number Timestep.""" dx, dy = self._rk4_integrate(self.x, self.y, uv0fun, uv1fun, dt) self.x = self._wrap_x(self.x + dx) self.y = self._wrap_y(self.y + dy)
[ "def", "step_forward_with_function", "(", "self", ",", "uv0fun", ",", "uv1fun", ",", "dt", ")", ":", "dx", ",", "dy", "=", "self", ".", "_rk4_integrate", "(", "self", ".", "x", ",", "self", ".", "y", ",", "uv0fun", ",", "uv1fun", ",", "dt", ")", "self", ".", "x", "=", "self", ".", "_wrap_x", "(", "self", ".", "x", "+", "dx", ")", "self", ".", "y", "=", "self", ".", "_wrap_y", "(", "self", ".", "y", "+", "dy", ")" ]
Advance particles using a function to determine u and v. Parameters ---------- uv0fun : function Called like ``uv0fun(x,y)``. Should return the velocity field u, v at time t. uv1fun(x,y) : function Called like ``uv1fun(x,y)``. Should return the velocity field u, v at time t + dt. dt : number Timestep.
[ "Advance", "particles", "using", "a", "function", "to", "determine", "u", "and", "v", ".", "Parameters", "----------", "uv0fun", ":", "function", "Called", "like", "uv0fun", "(", "x", "y", ")", ".", "Should", "return", "the", "velocity", "field", "u", "v", "at", "time", "t", ".", "uv1fun", "(", "x", "y", ")", ":", "function", "Called", "like", "uv1fun", "(", "x", "y", ")", ".", "Should", "return", "the", "velocity", "field", "u", "v", "at", "time", "t", "+", "dt", ".", "dt", ":", "number", "Timestep", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/particles.py#L65-L81
pyqg/pyqg
pyqg/particles.py
LagrangianParticleArray2D._rk4_integrate
def _rk4_integrate(self, x, y, uv0fun, uv1fun, dt): """Integrates positions x, y using velocity functions uv0fun, uv1fun. Returns dx and dy, the displacements.""" u0, v0 = uv0fun(x, y) k1u = dt*u0 k1v = dt*v0 x11 = self._wrap_x(x + 0.5*k1u) y11 = self._wrap_y(y + 0.5*k1v) u11, v11 = uv1fun(x11, y11) k2u = dt*u11 k2v = dt*v11 x12 = self._wrap_x(x + 0.5*k2u) y12 = self._wrap_y(y + 0.5*k2v) u12, v12 = uv1fun(x12, y12) k3u = dt*u12 k3v = dt*v12 x13 = self._wrap_x(x + k3u) y13 = self._wrap_y(y + k3v) u13, v13 = uv1fun(x13, y13) k4u = dt*u13 k4v = dt*v13 # update dx = 6**-1*(k1u + 2*k2u + 2*k3u + k4u) dy = 6**-1*(k1v + 2*k2v + 2*k3v + k4v) return dx, dy
python
def _rk4_integrate(self, x, y, uv0fun, uv1fun, dt): """Integrates positions x, y using velocity functions uv0fun, uv1fun. Returns dx and dy, the displacements.""" u0, v0 = uv0fun(x, y) k1u = dt*u0 k1v = dt*v0 x11 = self._wrap_x(x + 0.5*k1u) y11 = self._wrap_y(y + 0.5*k1v) u11, v11 = uv1fun(x11, y11) k2u = dt*u11 k2v = dt*v11 x12 = self._wrap_x(x + 0.5*k2u) y12 = self._wrap_y(y + 0.5*k2v) u12, v12 = uv1fun(x12, y12) k3u = dt*u12 k3v = dt*v12 x13 = self._wrap_x(x + k3u) y13 = self._wrap_y(y + k3v) u13, v13 = uv1fun(x13, y13) k4u = dt*u13 k4v = dt*v13 # update dx = 6**-1*(k1u + 2*k2u + 2*k3u + k4u) dy = 6**-1*(k1v + 2*k2v + 2*k3v + k4v) return dx, dy
[ "def", "_rk4_integrate", "(", "self", ",", "x", ",", "y", ",", "uv0fun", ",", "uv1fun", ",", "dt", ")", ":", "u0", ",", "v0", "=", "uv0fun", "(", "x", ",", "y", ")", "k1u", "=", "dt", "*", "u0", "k1v", "=", "dt", "*", "v0", "x11", "=", "self", ".", "_wrap_x", "(", "x", "+", "0.5", "*", "k1u", ")", "y11", "=", "self", ".", "_wrap_y", "(", "y", "+", "0.5", "*", "k1v", ")", "u11", ",", "v11", "=", "uv1fun", "(", "x11", ",", "y11", ")", "k2u", "=", "dt", "*", "u11", "k2v", "=", "dt", "*", "v11", "x12", "=", "self", ".", "_wrap_x", "(", "x", "+", "0.5", "*", "k2u", ")", "y12", "=", "self", ".", "_wrap_y", "(", "y", "+", "0.5", "*", "k2v", ")", "u12", ",", "v12", "=", "uv1fun", "(", "x12", ",", "y12", ")", "k3u", "=", "dt", "*", "u12", "k3v", "=", "dt", "*", "v12", "x13", "=", "self", ".", "_wrap_x", "(", "x", "+", "k3u", ")", "y13", "=", "self", ".", "_wrap_y", "(", "y", "+", "k3v", ")", "u13", ",", "v13", "=", "uv1fun", "(", "x13", ",", "y13", ")", "k4u", "=", "dt", "*", "u13", "k4v", "=", "dt", "*", "v13", "# update", "dx", "=", "6", "**", "-", "1", "*", "(", "k1u", "+", "2", "*", "k2u", "+", "2", "*", "k3u", "+", "k4u", ")", "dy", "=", "6", "**", "-", "1", "*", "(", "k1v", "+", "2", "*", "k2v", "+", "2", "*", "k3v", "+", "k4v", ")", "return", "dx", ",", "dy" ]
Integrates positions x, y using velocity functions uv0fun, uv1fun. Returns dx and dy, the displacements.
[ "Integrates", "positions", "x", "y", "using", "velocity", "functions", "uv0fun", "uv1fun", ".", "Returns", "dx", "and", "dy", "the", "displacements", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/particles.py#L83-L108
pyqg/pyqg
pyqg/particles.py
LagrangianParticleArray2D._distance
def _distance(self, x0, y0, x1, y1): """Utitlity function to compute distance between points.""" dx = x1-x0 dy = y1-y0 # roll displacements across the borders if self.pix: dx[ dx > self.Lx/2 ] -= self.Lx dx[ dx < -self.Lx/2 ] += self.Lx if self.piy: dy[ dy > self.Ly/2 ] -= self.Ly dy[ dy < -self.Ly/2 ] += self.Ly return dx, dy
python
def _distance(self, x0, y0, x1, y1): """Utitlity function to compute distance between points.""" dx = x1-x0 dy = y1-y0 # roll displacements across the borders if self.pix: dx[ dx > self.Lx/2 ] -= self.Lx dx[ dx < -self.Lx/2 ] += self.Lx if self.piy: dy[ dy > self.Ly/2 ] -= self.Ly dy[ dy < -self.Ly/2 ] += self.Ly return dx, dy
[ "def", "_distance", "(", "self", ",", "x0", ",", "y0", ",", "x1", ",", "y1", ")", ":", "dx", "=", "x1", "-", "x0", "dy", "=", "y1", "-", "y0", "# roll displacements across the borders", "if", "self", ".", "pix", ":", "dx", "[", "dx", ">", "self", ".", "Lx", "/", "2", "]", "-=", "self", ".", "Lx", "dx", "[", "dx", "<", "-", "self", ".", "Lx", "/", "2", "]", "+=", "self", ".", "Lx", "if", "self", ".", "piy", ":", "dy", "[", "dy", ">", "self", ".", "Ly", "/", "2", "]", "-=", "self", ".", "Ly", "dy", "[", "dy", "<", "-", "self", ".", "Ly", "/", "2", "]", "+=", "self", ".", "Ly", "return", "dx", ",", "dy" ]
Utitlity function to compute distance between points.
[ "Utitlity", "function", "to", "compute", "distance", "between", "points", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/particles.py#L124-L135
pyqg/pyqg
pyqg/particles.py
GriddedLagrangianParticleArray2D.interpolate_gridded_scalar
def interpolate_gridded_scalar(self, x, y, c, order=1, pad=1, offset=0): """Interpolate gridded scalar C to points x,y. Parameters ---------- x, y : array-like Points at which to interpolate c : array-like The scalar, assumed to be defined on the grid. order : int Order of interpolation pad : int Number of pad cells added offset : int ??? Returns ------- ci : array-like The interpolated scalar """ ## no longer necessary because we accept pre-padded arrays # assert c.shape == (self.Ny, self.Nx), 'Shape of c needs to be (Ny,Nx)' # first pad the array to deal with the boundaries # (map_coordinates can't seem to deal with this by itself) # pad twice so cubic interpolation can be used if pad > 0: cp = self._pad_field(c, pad=pad) else: cp = c # now the shape is Nx+2, Nx+2 i = (x - self.xmin)/self.Lx*self.Nx + pad + offset - 0.5 j = (y - self.ymin)/self.Ly*self.Ny + pad + offset - 0.5 # for some reason this still does not work with high precision near the boundaries return scipy.ndimage.map_coordinates(cp, [j,i], mode='constant', order=order, cval=np.nan)
python
def interpolate_gridded_scalar(self, x, y, c, order=1, pad=1, offset=0): """Interpolate gridded scalar C to points x,y. Parameters ---------- x, y : array-like Points at which to interpolate c : array-like The scalar, assumed to be defined on the grid. order : int Order of interpolation pad : int Number of pad cells added offset : int ??? Returns ------- ci : array-like The interpolated scalar """ ## no longer necessary because we accept pre-padded arrays # assert c.shape == (self.Ny, self.Nx), 'Shape of c needs to be (Ny,Nx)' # first pad the array to deal with the boundaries # (map_coordinates can't seem to deal with this by itself) # pad twice so cubic interpolation can be used if pad > 0: cp = self._pad_field(c, pad=pad) else: cp = c # now the shape is Nx+2, Nx+2 i = (x - self.xmin)/self.Lx*self.Nx + pad + offset - 0.5 j = (y - self.ymin)/self.Ly*self.Ny + pad + offset - 0.5 # for some reason this still does not work with high precision near the boundaries return scipy.ndimage.map_coordinates(cp, [j,i], mode='constant', order=order, cval=np.nan)
[ "def", "interpolate_gridded_scalar", "(", "self", ",", "x", ",", "y", ",", "c", ",", "order", "=", "1", ",", "pad", "=", "1", ",", "offset", "=", "0", ")", ":", "## no longer necessary because we accept pre-padded arrays", "# assert c.shape == (self.Ny, self.Nx), 'Shape of c needs to be (Ny,Nx)'", "# first pad the array to deal with the boundaries", "# (map_coordinates can't seem to deal with this by itself)", "# pad twice so cubic interpolation can be used", "if", "pad", ">", "0", ":", "cp", "=", "self", ".", "_pad_field", "(", "c", ",", "pad", "=", "pad", ")", "else", ":", "cp", "=", "c", "# now the shape is Nx+2, Nx+2", "i", "=", "(", "x", "-", "self", ".", "xmin", ")", "/", "self", ".", "Lx", "*", "self", ".", "Nx", "+", "pad", "+", "offset", "-", "0.5", "j", "=", "(", "y", "-", "self", ".", "ymin", ")", "/", "self", ".", "Ly", "*", "self", ".", "Ny", "+", "pad", "+", "offset", "-", "0.5", "# for some reason this still does not work with high precision near the boundaries", "return", "scipy", ".", "ndimage", ".", "map_coordinates", "(", "cp", ",", "[", "j", ",", "i", "]", ",", "mode", "=", "'constant'", ",", "order", "=", "order", ",", "cval", "=", "np", ".", "nan", ")" ]
Interpolate gridded scalar C to points x,y. Parameters ---------- x, y : array-like Points at which to interpolate c : array-like The scalar, assumed to be defined on the grid. order : int Order of interpolation pad : int Number of pad cells added offset : int ??? Returns ------- ci : array-like The interpolated scalar
[ "Interpolate", "gridded", "scalar", "C", "to", "points", "x", "y", ".", "Parameters", "----------", "x", "y", ":", "array", "-", "like", "Points", "at", "which", "to", "interpolate", "c", ":", "array", "-", "like", "The", "scalar", "assumed", "to", "be", "defined", "on", "the", "grid", ".", "order", ":", "int", "Order", "of", "interpolation", "pad", ":", "int", "Number", "of", "pad", "cells", "added", "offset", ":", "int", "???", "Returns", "-------", "ci", ":", "array", "-", "like", "The", "interpolated", "scalar" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/particles.py#L180-L218
pyqg/pyqg
pyqg/particles.py
GriddedLagrangianParticleArray2D.step_forward_with_gridded_uv
def step_forward_with_gridded_uv(self, U0, V0, U1, V1, dt, order=1): """Advance particles using a gridded velocity field. Because of the Runga-Kutta timestepping, we need two velocity fields at different times. Parameters ---------- U0, V0 : array-like Gridded velocity fields at time t - dt. U1, V1 : array-like Gridded velocity fields at time t. dt : number Timestep. order : int Order of interpolation. """ # create interpolation functions which return u and v # pre-pad arrays so it only has to be done once # for linear interpolation (default order=1), only one pad is necessary pad = order [U0p, V0p, U1p, V1p] = [self._pad_field(c, pad=pad) for c in [U0, V0, U1, V1]] # pad u and v as necessary uv0fun = (lambda x, y : (self.interpolate_gridded_scalar(x, y, U0p, pad=0, order=order, offset=pad), self.interpolate_gridded_scalar(x, y, V0p, pad=0, order=order, offset=pad))) uv1fun = (lambda x, y : (self.interpolate_gridded_scalar(x, y, U1p, pad=0, order=order, offset=pad), self.interpolate_gridded_scalar(x, y, V1p, pad=0, order=order, offset=pad))) self.step_forward_with_function(uv0fun, uv1fun, dt)
python
def step_forward_with_gridded_uv(self, U0, V0, U1, V1, dt, order=1): """Advance particles using a gridded velocity field. Because of the Runga-Kutta timestepping, we need two velocity fields at different times. Parameters ---------- U0, V0 : array-like Gridded velocity fields at time t - dt. U1, V1 : array-like Gridded velocity fields at time t. dt : number Timestep. order : int Order of interpolation. """ # create interpolation functions which return u and v # pre-pad arrays so it only has to be done once # for linear interpolation (default order=1), only one pad is necessary pad = order [U0p, V0p, U1p, V1p] = [self._pad_field(c, pad=pad) for c in [U0, V0, U1, V1]] # pad u and v as necessary uv0fun = (lambda x, y : (self.interpolate_gridded_scalar(x, y, U0p, pad=0, order=order, offset=pad), self.interpolate_gridded_scalar(x, y, V0p, pad=0, order=order, offset=pad))) uv1fun = (lambda x, y : (self.interpolate_gridded_scalar(x, y, U1p, pad=0, order=order, offset=pad), self.interpolate_gridded_scalar(x, y, V1p, pad=0, order=order, offset=pad))) self.step_forward_with_function(uv0fun, uv1fun, dt)
[ "def", "step_forward_with_gridded_uv", "(", "self", ",", "U0", ",", "V0", ",", "U1", ",", "V1", ",", "dt", ",", "order", "=", "1", ")", ":", "# create interpolation functions which return u and v", "# pre-pad arrays so it only has to be done once", "# for linear interpolation (default order=1), only one pad is necessary", "pad", "=", "order", "[", "U0p", ",", "V0p", ",", "U1p", ",", "V1p", "]", "=", "[", "self", ".", "_pad_field", "(", "c", ",", "pad", "=", "pad", ")", "for", "c", "in", "[", "U0", ",", "V0", ",", "U1", ",", "V1", "]", "]", "# pad u and v as necessary", "uv0fun", "=", "(", "lambda", "x", ",", "y", ":", "(", "self", ".", "interpolate_gridded_scalar", "(", "x", ",", "y", ",", "U0p", ",", "pad", "=", "0", ",", "order", "=", "order", ",", "offset", "=", "pad", ")", ",", "self", ".", "interpolate_gridded_scalar", "(", "x", ",", "y", ",", "V0p", ",", "pad", "=", "0", ",", "order", "=", "order", ",", "offset", "=", "pad", ")", ")", ")", "uv1fun", "=", "(", "lambda", "x", ",", "y", ":", "(", "self", ".", "interpolate_gridded_scalar", "(", "x", ",", "y", ",", "U1p", ",", "pad", "=", "0", ",", "order", "=", "order", ",", "offset", "=", "pad", ")", ",", "self", ".", "interpolate_gridded_scalar", "(", "x", ",", "y", ",", "V1p", ",", "pad", "=", "0", ",", "order", "=", "order", ",", "offset", "=", "pad", ")", ")", ")", "self", ".", "step_forward_with_function", "(", "uv0fun", ",", "uv1fun", ",", "dt", ")" ]
Advance particles using a gridded velocity field. Because of the Runga-Kutta timestepping, we need two velocity fields at different times. Parameters ---------- U0, V0 : array-like Gridded velocity fields at time t - dt. U1, V1 : array-like Gridded velocity fields at time t. dt : number Timestep. order : int Order of interpolation.
[ "Advance", "particles", "using", "a", "gridded", "velocity", "field", ".", "Because", "of", "the", "Runga", "-", "Kutta", "timestepping", "we", "need", "two", "velocity", "fields", "at", "different", "times", ".", "Parameters", "----------", "U0", "V0", ":", "array", "-", "like", "Gridded", "velocity", "fields", "at", "time", "t", "-", "dt", ".", "U1", "V1", ":", "array", "-", "like", "Gridded", "velocity", "fields", "at", "time", "t", ".", "dt", ":", "number", "Timestep", ".", "order", ":", "int", "Order", "of", "interpolation", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/particles.py#L223-L258
pyqg/pyqg
pyqg/diagnostic_tools.py
spec_var
def spec_var(model, ph): """Compute variance of ``p`` from Fourier coefficients ``ph``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- var_dens : float The variance of `ph` """ var_dens = 2. * np.abs(ph)**2 / model.M**2 # only half of coefs [0] and [nx/2+1] due to symmetry in real fft2 var_dens[...,0] /= 2 var_dens[...,-1] /= 2 return var_dens.sum(axis=(-1,-2))
python
def spec_var(model, ph): """Compute variance of ``p`` from Fourier coefficients ``ph``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- var_dens : float The variance of `ph` """ var_dens = 2. * np.abs(ph)**2 / model.M**2 # only half of coefs [0] and [nx/2+1] due to symmetry in real fft2 var_dens[...,0] /= 2 var_dens[...,-1] /= 2 return var_dens.sum(axis=(-1,-2))
[ "def", "spec_var", "(", "model", ",", "ph", ")", ":", "var_dens", "=", "2.", "*", "np", ".", "abs", "(", "ph", ")", "**", "2", "/", "model", ".", "M", "**", "2", "# only half of coefs [0] and [nx/2+1] due to symmetry in real fft2", "var_dens", "[", "...", ",", "0", "]", "/=", "2", "var_dens", "[", "...", ",", "-", "1", "]", "/=", "2", "return", "var_dens", ".", "sum", "(", "axis", "=", "(", "-", "1", ",", "-", "2", ")", ")" ]
Compute variance of ``p`` from Fourier coefficients ``ph``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- var_dens : float The variance of `ph`
[ "Compute", "variance", "of", "p", "from", "Fourier", "coefficients", "ph", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/diagnostic_tools.py#L7-L27
pyqg/pyqg
pyqg/diagnostic_tools.py
spec_sum
def spec_sum(ph2): """Compute total spectral sum of the real spectral quantity``ph^2``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph2 : real array The field on which to compute the sum Returns ------- var_dens : float The sum of `ph2` """ ph2 = 2.*ph2 ph2[...,0] = ph2[...,0]/2. ph2[...,-1] = ph2[...,-1]/2. return ph2.sum(axis=(-1,-2))
python
def spec_sum(ph2): """Compute total spectral sum of the real spectral quantity``ph^2``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph2 : real array The field on which to compute the sum Returns ------- var_dens : float The sum of `ph2` """ ph2 = 2.*ph2 ph2[...,0] = ph2[...,0]/2. ph2[...,-1] = ph2[...,-1]/2. return ph2.sum(axis=(-1,-2))
[ "def", "spec_sum", "(", "ph2", ")", ":", "ph2", "=", "2.", "*", "ph2", "ph2", "[", "...", ",", "0", "]", "=", "ph2", "[", "...", ",", "0", "]", "/", "2.", "ph2", "[", "...", ",", "-", "1", "]", "=", "ph2", "[", "...", ",", "-", "1", "]", "/", "2.", "return", "ph2", ".", "sum", "(", "axis", "=", "(", "-", "1", ",", "-", "2", ")", ")" ]
Compute total spectral sum of the real spectral quantity``ph^2``. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph2 : real array The field on which to compute the sum Returns ------- var_dens : float The sum of `ph2`
[ "Compute", "total", "spectral", "sum", "of", "the", "real", "spectral", "quantity", "ph^2", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/diagnostic_tools.py#L30-L50
pyqg/pyqg
pyqg/diagnostic_tools.py
calc_ispec
def calc_ispec(model, ph): """Compute isotropic spectrum `phr` of `ph` from 2D spectrum. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- kr : array isotropic wavenumber phr : array isotropic spectrum """ if model.kk.max()>model.ll.max(): kmax = model.ll.max() else: kmax = model.kk.max() # create radial wavenumber dkr = np.sqrt(model.dk**2 + model.dl**2) kr = np.arange(dkr/2.,kmax+dkr,dkr) phr = np.zeros(kr.size) for i in range(kr.size): fkr = (model.wv>=kr[i]-dkr/2) & (model.wv<=kr[i]+dkr/2) dth = pi / (fkr.sum()-1) phr[i] = ph[fkr].sum() * kr[i] * dth return kr, phr
python
def calc_ispec(model, ph): """Compute isotropic spectrum `phr` of `ph` from 2D spectrum. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- kr : array isotropic wavenumber phr : array isotropic spectrum """ if model.kk.max()>model.ll.max(): kmax = model.ll.max() else: kmax = model.kk.max() # create radial wavenumber dkr = np.sqrt(model.dk**2 + model.dl**2) kr = np.arange(dkr/2.,kmax+dkr,dkr) phr = np.zeros(kr.size) for i in range(kr.size): fkr = (model.wv>=kr[i]-dkr/2) & (model.wv<=kr[i]+dkr/2) dth = pi / (fkr.sum()-1) phr[i] = ph[fkr].sum() * kr[i] * dth return kr, phr
[ "def", "calc_ispec", "(", "model", ",", "ph", ")", ":", "if", "model", ".", "kk", ".", "max", "(", ")", ">", "model", ".", "ll", ".", "max", "(", ")", ":", "kmax", "=", "model", ".", "ll", ".", "max", "(", ")", "else", ":", "kmax", "=", "model", ".", "kk", ".", "max", "(", ")", "# create radial wavenumber", "dkr", "=", "np", ".", "sqrt", "(", "model", ".", "dk", "**", "2", "+", "model", ".", "dl", "**", "2", ")", "kr", "=", "np", ".", "arange", "(", "dkr", "/", "2.", ",", "kmax", "+", "dkr", ",", "dkr", ")", "phr", "=", "np", ".", "zeros", "(", "kr", ".", "size", ")", "for", "i", "in", "range", "(", "kr", ".", "size", ")", ":", "fkr", "=", "(", "model", ".", "wv", ">=", "kr", "[", "i", "]", "-", "dkr", "/", "2", ")", "&", "(", "model", ".", "wv", "<=", "kr", "[", "i", "]", "+", "dkr", "/", "2", ")", "dth", "=", "pi", "/", "(", "fkr", ".", "sum", "(", ")", "-", "1", ")", "phr", "[", "i", "]", "=", "ph", "[", "fkr", "]", ".", "sum", "(", ")", "*", "kr", "[", "i", "]", "*", "dth", "return", "kr", ",", "phr" ]
Compute isotropic spectrum `phr` of `ph` from 2D spectrum. Parameters ---------- model : pyqg.Model instance The model object from which `ph` originates ph : complex array The field on which to compute the variance Returns ------- kr : array isotropic wavenumber phr : array isotropic spectrum
[ "Compute", "isotropic", "spectrum", "phr", "of", "ph", "from", "2D", "spectrum", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/diagnostic_tools.py#L53-L86
pyqg/pyqg
pyqg/layered_model.py
LayeredModel._initialize_stretching_matrix
def _initialize_stretching_matrix(self): """ Set up the stretching matrix """ self.S = np.zeros((self.nz, self.nz)) if (self.nz==2) and (self.rd) and (self.delta): self.del1 = self.delta/(self.delta+1.) self.del2 = (self.delta+1.)**-1 self.Us = self.Ubg[0]-self.Ubg[1] self.F1 = self.rd**-2 / (1.+self.delta) self.F2 = self.delta*self.F1 self.S[0,0], self.S[0,1] = -self.F1, self.F1 self.S[1,0], self.S[1,1] = self.F2, -self.F2 else: for i in range(self.nz): if i == 0: self.S[i,i] = -self.f2/self.Hi[i]/self.gpi[i] self.S[i,i+1] = self.f2/self.Hi[i]/self.gpi[i] elif i == self.nz-1: self.S[i,i] = -self.f2/self.Hi[i]/self.gpi[i-1] self.S[i,i-1] = self.f2/self.Hi[i]/self.gpi[i-1] else: self.S[i,i-1] = self.f2/self.Hi[i]/self.gpi[i-1] self.S[i,i] = -(self.f2/self.Hi[i]/self.gpi[i] + self.f2/self.Hi[i]/self.gpi[i-1]) self.S[i,i+1] = self.f2/self.Hi[i]/self.gpi[i]
python
def _initialize_stretching_matrix(self): """ Set up the stretching matrix """ self.S = np.zeros((self.nz, self.nz)) if (self.nz==2) and (self.rd) and (self.delta): self.del1 = self.delta/(self.delta+1.) self.del2 = (self.delta+1.)**-1 self.Us = self.Ubg[0]-self.Ubg[1] self.F1 = self.rd**-2 / (1.+self.delta) self.F2 = self.delta*self.F1 self.S[0,0], self.S[0,1] = -self.F1, self.F1 self.S[1,0], self.S[1,1] = self.F2, -self.F2 else: for i in range(self.nz): if i == 0: self.S[i,i] = -self.f2/self.Hi[i]/self.gpi[i] self.S[i,i+1] = self.f2/self.Hi[i]/self.gpi[i] elif i == self.nz-1: self.S[i,i] = -self.f2/self.Hi[i]/self.gpi[i-1] self.S[i,i-1] = self.f2/self.Hi[i]/self.gpi[i-1] else: self.S[i,i-1] = self.f2/self.Hi[i]/self.gpi[i-1] self.S[i,i] = -(self.f2/self.Hi[i]/self.gpi[i] + self.f2/self.Hi[i]/self.gpi[i-1]) self.S[i,i+1] = self.f2/self.Hi[i]/self.gpi[i]
[ "def", "_initialize_stretching_matrix", "(", "self", ")", ":", "self", ".", "S", "=", "np", ".", "zeros", "(", "(", "self", ".", "nz", ",", "self", ".", "nz", ")", ")", "if", "(", "self", ".", "nz", "==", "2", ")", "and", "(", "self", ".", "rd", ")", "and", "(", "self", ".", "delta", ")", ":", "self", ".", "del1", "=", "self", ".", "delta", "/", "(", "self", ".", "delta", "+", "1.", ")", "self", ".", "del2", "=", "(", "self", ".", "delta", "+", "1.", ")", "**", "-", "1", "self", ".", "Us", "=", "self", ".", "Ubg", "[", "0", "]", "-", "self", ".", "Ubg", "[", "1", "]", "self", ".", "F1", "=", "self", ".", "rd", "**", "-", "2", "/", "(", "1.", "+", "self", ".", "delta", ")", "self", ".", "F2", "=", "self", ".", "delta", "*", "self", ".", "F1", "self", ".", "S", "[", "0", ",", "0", "]", ",", "self", ".", "S", "[", "0", ",", "1", "]", "=", "-", "self", ".", "F1", ",", "self", ".", "F1", "self", ".", "S", "[", "1", ",", "0", "]", ",", "self", ".", "S", "[", "1", ",", "1", "]", "=", "self", ".", "F2", ",", "-", "self", ".", "F2", "else", ":", "for", "i", "in", "range", "(", "self", ".", "nz", ")", ":", "if", "i", "==", "0", ":", "self", ".", "S", "[", "i", ",", "i", "]", "=", "-", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "]", "self", ".", "S", "[", "i", ",", "i", "+", "1", "]", "=", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "]", "elif", "i", "==", "self", ".", "nz", "-", "1", ":", "self", ".", "S", "[", "i", ",", "i", "]", "=", "-", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "-", "1", "]", "self", ".", "S", "[", "i", ",", "i", "-", "1", "]", "=", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "-", "1", "]", "else", ":", "self", ".", "S", "[", "i", ",", "i", "-", "1", "]", "=", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "-", "1", "]", "self", ".", "S", "[", "i", ",", "i", "]", "=", "-", "(", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "]", "+", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "-", "1", "]", ")", "self", ".", "S", "[", "i", ",", "i", "+", "1", "]", "=", "self", ".", "f2", "/", "self", ".", "Hi", "[", "i", "]", "/", "self", ".", "gpi", "[", "i", "]" ]
Set up the stretching matrix
[ "Set", "up", "the", "stretching", "matrix" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/layered_model.py#L130-L162
pyqg/pyqg
pyqg/layered_model.py
LayeredModel._initialize_background
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" self.H = self.Hi.sum() if np.asarray(self.U).ndim == 2: self.Ubg = self.U * np.ones((self.ny)) else: self.Ubg = np.expand_dims(self.U,axis=1) * np.ones((self.ny)) if not (self.nz==2): self.gpi = self.g*(self.rhoi[1:]-self.rhoi[:-1])/self.rhoi[:-1] self.f2gpi = (self.f2/self.gpi)[:,np.newaxis,np.newaxis] assert self.gpi.size == self.nz-1, "Invalid size of gpi" assert np.all(self.gpi>0.), "Buoyancy jump has negative sign!" assert self.Hi.size == self.nz, self.logger.error('size of Hi does not' + 'match number of vertical levels nz') assert self.rhoi.size == self.nz, self.logger.error('size of rhoi does not' + 'match number of vertical levels nz') assert self.Ubg.size == self.nz * self.ny, self.logger.error('size of Ubg does not' + 'match number of vertical levels nz') assert self.Vbg.size == self.nz, self.logger.error('size of Vbg does not' + 'match number of vertical levels nz') else: self.f2gpi = np.array(self.rd**-2 * (self.Hi[0]*self.Hi[1])/self.H)[np.newaxis] self._initialize_stretching_matrix() # the meridional PV gradients in each layer self.Qy = self.beta - np.dot(self.S, self.Ubg) + np.gradient(np.gradient(self.Ubg, self.dy, axis=1), self.dy, axis=1) self.Qx = np.dot(self.S,self.Vbg) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy = self.Qy[:,:,np.newaxis]*1j*self.k self.ilQx = self.Qx[:,np.newaxis,np.newaxis]*1j*self.l
python
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" self.H = self.Hi.sum() if np.asarray(self.U).ndim == 2: self.Ubg = self.U * np.ones((self.ny)) else: self.Ubg = np.expand_dims(self.U,axis=1) * np.ones((self.ny)) if not (self.nz==2): self.gpi = self.g*(self.rhoi[1:]-self.rhoi[:-1])/self.rhoi[:-1] self.f2gpi = (self.f2/self.gpi)[:,np.newaxis,np.newaxis] assert self.gpi.size == self.nz-1, "Invalid size of gpi" assert np.all(self.gpi>0.), "Buoyancy jump has negative sign!" assert self.Hi.size == self.nz, self.logger.error('size of Hi does not' + 'match number of vertical levels nz') assert self.rhoi.size == self.nz, self.logger.error('size of rhoi does not' + 'match number of vertical levels nz') assert self.Ubg.size == self.nz * self.ny, self.logger.error('size of Ubg does not' + 'match number of vertical levels nz') assert self.Vbg.size == self.nz, self.logger.error('size of Vbg does not' + 'match number of vertical levels nz') else: self.f2gpi = np.array(self.rd**-2 * (self.Hi[0]*self.Hi[1])/self.H)[np.newaxis] self._initialize_stretching_matrix() # the meridional PV gradients in each layer self.Qy = self.beta - np.dot(self.S, self.Ubg) + np.gradient(np.gradient(self.Ubg, self.dy, axis=1), self.dy, axis=1) self.Qx = np.dot(self.S,self.Vbg) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy = self.Qy[:,:,np.newaxis]*1j*self.k self.ilQx = self.Qx[:,np.newaxis,np.newaxis]*1j*self.l
[ "def", "_initialize_background", "(", "self", ")", ":", "self", ".", "H", "=", "self", ".", "Hi", ".", "sum", "(", ")", "if", "np", ".", "asarray", "(", "self", ".", "U", ")", ".", "ndim", "==", "2", ":", "self", ".", "Ubg", "=", "self", ".", "U", "*", "np", ".", "ones", "(", "(", "self", ".", "ny", ")", ")", "else", ":", "self", ".", "Ubg", "=", "np", ".", "expand_dims", "(", "self", ".", "U", ",", "axis", "=", "1", ")", "*", "np", ".", "ones", "(", "(", "self", ".", "ny", ")", ")", "if", "not", "(", "self", ".", "nz", "==", "2", ")", ":", "self", ".", "gpi", "=", "self", ".", "g", "*", "(", "self", ".", "rhoi", "[", "1", ":", "]", "-", "self", ".", "rhoi", "[", ":", "-", "1", "]", ")", "/", "self", ".", "rhoi", "[", ":", "-", "1", "]", "self", ".", "f2gpi", "=", "(", "self", ".", "f2", "/", "self", ".", "gpi", ")", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "assert", "self", ".", "gpi", ".", "size", "==", "self", ".", "nz", "-", "1", ",", "\"Invalid size of gpi\"", "assert", "np", ".", "all", "(", "self", ".", "gpi", ">", "0.", ")", ",", "\"Buoyancy jump has negative sign!\"", "assert", "self", ".", "Hi", ".", "size", "==", "self", ".", "nz", ",", "self", ".", "logger", ".", "error", "(", "'size of Hi does not'", "+", "'match number of vertical levels nz'", ")", "assert", "self", ".", "rhoi", ".", "size", "==", "self", ".", "nz", ",", "self", ".", "logger", ".", "error", "(", "'size of rhoi does not'", "+", "'match number of vertical levels nz'", ")", "assert", "self", ".", "Ubg", ".", "size", "==", "self", ".", "nz", "*", "self", ".", "ny", ",", "self", ".", "logger", ".", "error", "(", "'size of Ubg does not'", "+", "'match number of vertical levels nz'", ")", "assert", "self", ".", "Vbg", ".", "size", "==", "self", ".", "nz", ",", "self", ".", "logger", ".", "error", "(", "'size of Vbg does not'", "+", "'match number of vertical levels nz'", ")", "else", ":", "self", ".", "f2gpi", "=", "np", ".", "array", "(", "self", ".", "rd", "**", "-", "2", "*", "(", "self", ".", "Hi", "[", "0", "]", "*", "self", ".", "Hi", "[", "1", "]", ")", "/", "self", ".", "H", ")", "[", "np", ".", "newaxis", "]", "self", ".", "_initialize_stretching_matrix", "(", ")", "# the meridional PV gradients in each layer", "self", ".", "Qy", "=", "self", ".", "beta", "-", "np", ".", "dot", "(", "self", ".", "S", ",", "self", ".", "Ubg", ")", "+", "np", ".", "gradient", "(", "np", ".", "gradient", "(", "self", ".", "Ubg", ",", "self", ".", "dy", ",", "axis", "=", "1", ")", ",", "self", ".", "dy", ",", "axis", "=", "1", ")", "self", ".", "Qx", "=", "np", ".", "dot", "(", "self", ".", "S", ",", "self", ".", "Vbg", ")", "# complex versions, multiplied by k, speeds up computations to precompute", "self", ".", "ikQy", "=", "self", ".", "Qy", "[", ":", ",", ":", ",", "np", ".", "newaxis", "]", "*", "1j", "*", "self", ".", "k", "self", ".", "ilQx", "=", "self", ".", "Qx", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "1j", "*", "self", ".", "l" ]
Set up background state (zonal flow and PV gradients).
[ "Set", "up", "background", "state", "(", "zonal", "flow", "and", "PV", "gradients", ")", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/layered_model.py#L164-L208
pyqg/pyqg
pyqg/layered_model.py
LayeredModel._calc_eddy_time
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = 0. for j in range(self.nz): ens = .5*self.Hi[j] * self.spec_var(self.wv2*self.ph[j]) return 2.*pi*np.sqrt( self.H / ens.sum() ) / 86400
python
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = 0. for j in range(self.nz): ens = .5*self.Hi[j] * self.spec_var(self.wv2*self.ph[j]) return 2.*pi*np.sqrt( self.H / ens.sum() ) / 86400
[ "def", "_calc_eddy_time", "(", "self", ")", ":", "ens", "=", "0.", "for", "j", "in", "range", "(", "self", ".", "nz", ")", ":", "ens", "=", ".5", "*", "self", ".", "Hi", "[", "j", "]", "*", "self", ".", "spec_var", "(", "self", ".", "wv2", "*", "self", ".", "ph", "[", "j", "]", ")", "return", "2.", "*", "pi", "*", "np", ".", "sqrt", "(", "self", ".", "H", "/", "ens", ".", "sum", "(", ")", ")", "/", "86400" ]
estimate the eddy turn-over time in days
[ "estimate", "the", "eddy", "turn", "-", "over", "time", "in", "days" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/layered_model.py#L255-L261
pyqg/pyqg
pyqg/layered_model.py
LayeredModel._initialize_model_diagnostics
def _initialize_model_diagnostics(self): """ Extra diagnostics for layered model """ self.add_diagnostic('entspec', description='barotropic enstrophy spectrum', function= (lambda self: np.abs((self.Hi[:,np.newaxis,np.newaxis]*self.qh).sum(axis=0))**2/self.H) ) self.add_diagnostic('KEspec_modal', description='modal KE spectra', function= (lambda self: self.wv2*(np.abs(self.phn)**2)/self.M**2 )) self.add_diagnostic('PEspec_modal', description='modal PE spectra', function= (lambda self: self.kdi2[1:,np.newaxis,np.newaxis]*(np.abs(self.phn[1:,:,:])**2)/self.M**2 )) self.add_diagnostic('APEspec', description='available potential energy spectrum', function= (lambda self: (self.f2gpi* np.abs(self.ph[:-1]-self.ph[1:])**2).sum(axis=0)/self.H)) self.add_diagnostic('KEflux', description='spectral divergence of flux of kinetic energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.ph.conj()*self.Jpxi).real).sum(axis=0)/self.H)) self.add_diagnostic('APEflux', description='spectral divergence of flux of available potential energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.ph.conj()*self.JSp).real).sum(axis=0)/self.H)) self.add_diagnostic('APEgenspec', description='the spectrum of the rate of generation of available potential energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.Ubg[:,np.newaxis,np.newaxis]*self.k + self.Vbg[:,np.newaxis,np.newaxis]*self.l)* (1j*self.ph.conj()*self.Sph).real).sum(axis=0)/self.H)) self.add_diagnostic('ENSflux', description='barotropic enstrophy flux', function = (lambda self: (-self.Hi[:,np.newaxis,np.newaxis]* (self.qh.conj()*self.Jq).real).sum(axis=0)/self.H)) self.add_diagnostic('ENSgenspec', description='the spectrum of the rate of generation of barotropic enstrophy', function = (lambda self: -(self.Hi[:,np.newaxis,np.newaxis]*((self.ikQy - self.ilQx)*(self.Sph.conj()*self.ph)).real).sum(axis=0)/self.H))
python
def _initialize_model_diagnostics(self): """ Extra diagnostics for layered model """ self.add_diagnostic('entspec', description='barotropic enstrophy spectrum', function= (lambda self: np.abs((self.Hi[:,np.newaxis,np.newaxis]*self.qh).sum(axis=0))**2/self.H) ) self.add_diagnostic('KEspec_modal', description='modal KE spectra', function= (lambda self: self.wv2*(np.abs(self.phn)**2)/self.M**2 )) self.add_diagnostic('PEspec_modal', description='modal PE spectra', function= (lambda self: self.kdi2[1:,np.newaxis,np.newaxis]*(np.abs(self.phn[1:,:,:])**2)/self.M**2 )) self.add_diagnostic('APEspec', description='available potential energy spectrum', function= (lambda self: (self.f2gpi* np.abs(self.ph[:-1]-self.ph[1:])**2).sum(axis=0)/self.H)) self.add_diagnostic('KEflux', description='spectral divergence of flux of kinetic energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.ph.conj()*self.Jpxi).real).sum(axis=0)/self.H)) self.add_diagnostic('APEflux', description='spectral divergence of flux of available potential energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.ph.conj()*self.JSp).real).sum(axis=0)/self.H)) self.add_diagnostic('APEgenspec', description='the spectrum of the rate of generation of available potential energy', function =(lambda self: (self.Hi[:,np.newaxis,np.newaxis]* (self.Ubg[:,np.newaxis,np.newaxis]*self.k + self.Vbg[:,np.newaxis,np.newaxis]*self.l)* (1j*self.ph.conj()*self.Sph).real).sum(axis=0)/self.H)) self.add_diagnostic('ENSflux', description='barotropic enstrophy flux', function = (lambda self: (-self.Hi[:,np.newaxis,np.newaxis]* (self.qh.conj()*self.Jq).real).sum(axis=0)/self.H)) self.add_diagnostic('ENSgenspec', description='the spectrum of the rate of generation of barotropic enstrophy', function = (lambda self: -(self.Hi[:,np.newaxis,np.newaxis]*((self.ikQy - self.ilQx)*(self.Sph.conj()*self.ph)).real).sum(axis=0)/self.H))
[ "def", "_initialize_model_diagnostics", "(", "self", ")", ":", "self", ".", "add_diagnostic", "(", "'entspec'", ",", "description", "=", "'barotropic enstrophy spectrum'", ",", "function", "=", "(", "lambda", "self", ":", "np", ".", "abs", "(", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "self", ".", "qh", ")", ".", "sum", "(", "axis", "=", "0", ")", ")", "**", "2", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'KEspec_modal'", ",", "description", "=", "'modal KE spectra'", ",", "function", "=", "(", "lambda", "self", ":", "self", ".", "wv2", "*", "(", "np", ".", "abs", "(", "self", ".", "phn", ")", "**", "2", ")", "/", "self", ".", "M", "**", "2", ")", ")", "self", ".", "add_diagnostic", "(", "'PEspec_modal'", ",", "description", "=", "'modal PE spectra'", ",", "function", "=", "(", "lambda", "self", ":", "self", ".", "kdi2", "[", "1", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "np", ".", "abs", "(", "self", ".", "phn", "[", "1", ":", ",", ":", ",", ":", "]", ")", "**", "2", ")", "/", "self", ".", "M", "**", "2", ")", ")", "self", ".", "add_diagnostic", "(", "'APEspec'", ",", "description", "=", "'available potential energy spectrum'", ",", "function", "=", "(", "lambda", "self", ":", "(", "self", ".", "f2gpi", "*", "np", ".", "abs", "(", "self", ".", "ph", "[", ":", "-", "1", "]", "-", "self", ".", "ph", "[", "1", ":", "]", ")", "**", "2", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'KEflux'", ",", "description", "=", "'spectral divergence of flux of kinetic energy'", ",", "function", "=", "(", "lambda", "self", ":", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "self", ".", "ph", ".", "conj", "(", ")", "*", "self", ".", "Jpxi", ")", ".", "real", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'APEflux'", ",", "description", "=", "'spectral divergence of flux of available potential energy'", ",", "function", "=", "(", "lambda", "self", ":", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "self", ".", "ph", ".", "conj", "(", ")", "*", "self", ".", "JSp", ")", ".", "real", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'APEgenspec'", ",", "description", "=", "'the spectrum of the rate of generation of available potential energy'", ",", "function", "=", "(", "lambda", "self", ":", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "self", ".", "Ubg", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "self", ".", "k", "+", "self", ".", "Vbg", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "self", ".", "l", ")", "*", "(", "1j", "*", "self", ".", "ph", ".", "conj", "(", ")", "*", "self", ".", "Sph", ")", ".", "real", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'ENSflux'", ",", "description", "=", "'barotropic enstrophy flux'", ",", "function", "=", "(", "lambda", "self", ":", "(", "-", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "self", ".", "qh", ".", "conj", "(", ")", "*", "self", ".", "Jq", ")", ".", "real", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")", "self", ".", "add_diagnostic", "(", "'ENSgenspec'", ",", "description", "=", "'the spectrum of the rate of generation of barotropic enstrophy'", ",", "function", "=", "(", "lambda", "self", ":", "-", "(", "self", ".", "Hi", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "*", "(", "(", "self", ".", "ikQy", "-", "self", ".", "ilQx", ")", "*", "(", "self", ".", "Sph", ".", "conj", "(", ")", "*", "self", ".", "ph", ")", ")", ".", "real", ")", ".", "sum", "(", "axis", "=", "0", ")", "/", "self", ".", "H", ")", ")" ]
Extra diagnostics for layered model
[ "Extra", "diagnostics", "for", "layered", "model" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/layered_model.py#L277-L327
pyqg/pyqg
pyqg/qg_model.py
QGModel._initialize_background
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # Background zonal flow (m/s): self.H = self.Hi.sum() self.set_U1U2(self.U1, self.U2) self.U = self.U1 - self.U2 # the F parameters self.F1 = self.rd**-2 / (1.+self.delta) self.F2 = self.delta*self.F1 # the meridional PV gradients in each layer # need to calculate actual PV gradient self.Qy1 = self.beta + self.F1*(self.U1 - self.U2) + np.gradient(np.gradient(self.U1,self.dy),self.dy) self.Qy2 = self.beta - self.F2*(self.U1 - self.U2) + np.gradient(np.gradient(self.U2,self.dy),self.dy) self.Qy = np.array([self.Qy1, self.Qy2]) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy1 = self.Qy1[:,np.newaxis] * 1j * self.k self.ikQy2 = self.Qy2[:,np.newaxis] * 1j * self.k # vector version self.ikQy = np.vstack([self.ikQy1[np.newaxis,...], self.ikQy2[np.newaxis,...]]) self.ilQx = 0. # layer spacing self.del1 = self.delta/(self.delta+1.) self.del2 = (self.delta+1.)**-1
python
def _initialize_background(self): """Set up background state (zonal flow and PV gradients).""" # Background zonal flow (m/s): self.H = self.Hi.sum() self.set_U1U2(self.U1, self.U2) self.U = self.U1 - self.U2 # the F parameters self.F1 = self.rd**-2 / (1.+self.delta) self.F2 = self.delta*self.F1 # the meridional PV gradients in each layer # need to calculate actual PV gradient self.Qy1 = self.beta + self.F1*(self.U1 - self.U2) + np.gradient(np.gradient(self.U1,self.dy),self.dy) self.Qy2 = self.beta - self.F2*(self.U1 - self.U2) + np.gradient(np.gradient(self.U2,self.dy),self.dy) self.Qy = np.array([self.Qy1, self.Qy2]) # complex versions, multiplied by k, speeds up computations to precompute self.ikQy1 = self.Qy1[:,np.newaxis] * 1j * self.k self.ikQy2 = self.Qy2[:,np.newaxis] * 1j * self.k # vector version self.ikQy = np.vstack([self.ikQy1[np.newaxis,...], self.ikQy2[np.newaxis,...]]) self.ilQx = 0. # layer spacing self.del1 = self.delta/(self.delta+1.) self.del2 = (self.delta+1.)**-1
[ "def", "_initialize_background", "(", "self", ")", ":", "# Background zonal flow (m/s):", "self", ".", "H", "=", "self", ".", "Hi", ".", "sum", "(", ")", "self", ".", "set_U1U2", "(", "self", ".", "U1", ",", "self", ".", "U2", ")", "self", ".", "U", "=", "self", ".", "U1", "-", "self", ".", "U2", "# the F parameters", "self", ".", "F1", "=", "self", ".", "rd", "**", "-", "2", "/", "(", "1.", "+", "self", ".", "delta", ")", "self", ".", "F2", "=", "self", ".", "delta", "*", "self", ".", "F1", "# the meridional PV gradients in each layer", "# need to calculate actual PV gradient", "self", ".", "Qy1", "=", "self", ".", "beta", "+", "self", ".", "F1", "*", "(", "self", ".", "U1", "-", "self", ".", "U2", ")", "+", "np", ".", "gradient", "(", "np", ".", "gradient", "(", "self", ".", "U1", ",", "self", ".", "dy", ")", ",", "self", ".", "dy", ")", "self", ".", "Qy2", "=", "self", ".", "beta", "-", "self", ".", "F2", "*", "(", "self", ".", "U1", "-", "self", ".", "U2", ")", "+", "np", ".", "gradient", "(", "np", ".", "gradient", "(", "self", ".", "U2", ",", "self", ".", "dy", ")", ",", "self", ".", "dy", ")", "self", ".", "Qy", "=", "np", ".", "array", "(", "[", "self", ".", "Qy1", ",", "self", ".", "Qy2", "]", ")", "# complex versions, multiplied by k, speeds up computations to precompute", "self", ".", "ikQy1", "=", "self", ".", "Qy1", "[", ":", ",", "np", ".", "newaxis", "]", "*", "1j", "*", "self", ".", "k", "self", ".", "ikQy2", "=", "self", ".", "Qy2", "[", ":", ",", "np", ".", "newaxis", "]", "*", "1j", "*", "self", ".", "k", "# vector version", "self", ".", "ikQy", "=", "np", ".", "vstack", "(", "[", "self", ".", "ikQy1", "[", "np", ".", "newaxis", ",", "...", "]", ",", "self", ".", "ikQy2", "[", "np", ".", "newaxis", ",", "...", "]", "]", ")", "self", ".", "ilQx", "=", "0.", "# layer spacing", "self", ".", "del1", "=", "self", ".", "delta", "/", "(", "self", ".", "delta", "+", "1.", ")", "self", ".", "del2", "=", "(", "self", ".", "delta", "+", "1.", ")", "**", "-", "1" ]
Set up background state (zonal flow and PV gradients).
[ "Set", "up", "background", "state", "(", "zonal", "flow", "and", "PV", "gradients", ")", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/qg_model.py#L114-L142
pyqg/pyqg
pyqg/qg_model.py
QGModel.set_q1q2
def set_q1q2(self, q1, q2, check=False): """Set upper and lower layer PV anomalies. Parameters ---------- q1 : array-like Upper layer PV anomaly in spatial coordinates. q1 : array-like Lower layer PV anomaly in spatial coordinates. """ self.set_q(np.vstack([q1[np.newaxis,:,:], q2[np.newaxis,:,:]])) #self.q[0] = q1 #self.q[1] = q2 # initialize spectral PV #self.qh = self.fft2(self.q) # check that it works if check: np.testing.assert_allclose(self.q1, q1) np.testing.assert_allclose(self.q1, self.ifft2(self.qh1))
python
def set_q1q2(self, q1, q2, check=False): """Set upper and lower layer PV anomalies. Parameters ---------- q1 : array-like Upper layer PV anomaly in spatial coordinates. q1 : array-like Lower layer PV anomaly in spatial coordinates. """ self.set_q(np.vstack([q1[np.newaxis,:,:], q2[np.newaxis,:,:]])) #self.q[0] = q1 #self.q[1] = q2 # initialize spectral PV #self.qh = self.fft2(self.q) # check that it works if check: np.testing.assert_allclose(self.q1, q1) np.testing.assert_allclose(self.q1, self.ifft2(self.qh1))
[ "def", "set_q1q2", "(", "self", ",", "q1", ",", "q2", ",", "check", "=", "False", ")", ":", "self", ".", "set_q", "(", "np", ".", "vstack", "(", "[", "q1", "[", "np", ".", "newaxis", ",", ":", ",", ":", "]", ",", "q2", "[", "np", ".", "newaxis", ",", ":", ",", ":", "]", "]", ")", ")", "#self.q[0] = q1", "#self.q[1] = q2", "# initialize spectral PV", "#self.qh = self.fft2(self.q)", "# check that it works", "if", "check", ":", "np", ".", "testing", ".", "assert_allclose", "(", "self", ".", "q1", ",", "q1", ")", "np", ".", "testing", ".", "assert_allclose", "(", "self", ".", "q1", ",", "self", ".", "ifft2", "(", "self", ".", "qh1", ")", ")" ]
Set upper and lower layer PV anomalies. Parameters ---------- q1 : array-like Upper layer PV anomaly in spatial coordinates. q1 : array-like Lower layer PV anomaly in spatial coordinates.
[ "Set", "upper", "and", "lower", "layer", "PV", "anomalies", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/qg_model.py#L170-L191
pyqg/pyqg
pyqg/qg_model.py
QGModel.set_U1U2
def set_U1U2(self, U1, U2): """Set background zonal flow. Parameters ---------- U1 : number Upper layer flow. Units: m/s U2 : number Lower layer flow. Units: m/s """ if len(np.shape(U1)) == 0: U1 = U1 * np.ones((self.ny)) if len(np.shape(U2)) == 0: U2 = U2 * np.ones((self.ny)) #self.Ubg = np.array([U1,U2])[:,np.newaxis,np.newaxis] self.U1 = U1 self.U2 = U2 self.Ubg = np.array([U1,U2])
python
def set_U1U2(self, U1, U2): """Set background zonal flow. Parameters ---------- U1 : number Upper layer flow. Units: m/s U2 : number Lower layer flow. Units: m/s """ if len(np.shape(U1)) == 0: U1 = U1 * np.ones((self.ny)) if len(np.shape(U2)) == 0: U2 = U2 * np.ones((self.ny)) #self.Ubg = np.array([U1,U2])[:,np.newaxis,np.newaxis] self.U1 = U1 self.U2 = U2 self.Ubg = np.array([U1,U2])
[ "def", "set_U1U2", "(", "self", ",", "U1", ",", "U2", ")", ":", "if", "len", "(", "np", ".", "shape", "(", "U1", ")", ")", "==", "0", ":", "U1", "=", "U1", "*", "np", ".", "ones", "(", "(", "self", ".", "ny", ")", ")", "if", "len", "(", "np", ".", "shape", "(", "U2", ")", ")", "==", "0", ":", "U2", "=", "U2", "*", "np", ".", "ones", "(", "(", "self", ".", "ny", ")", ")", "#self.Ubg = np.array([U1,U2])[:,np.newaxis,np.newaxis]", "self", ".", "U1", "=", "U1", "self", ".", "U2", "=", "U2", "self", ".", "Ubg", "=", "np", ".", "array", "(", "[", "U1", ",", "U2", "]", ")" ]
Set background zonal flow. Parameters ---------- U1 : number Upper layer flow. Units: m/s U2 : number Lower layer flow. Units: m/s
[ "Set", "background", "zonal", "flow", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/qg_model.py#L193-L211
pyqg/pyqg
pyqg/qg_model.py
QGModel._calc_eddy_time
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = .5*self.Hi[0] * self.spec_var(self.wv2*self.ph1) + \ .5*self.Hi[1] * self.spec_var(self.wv2*self.ph2) return 2.*pi*np.sqrt( self.H / ens ) / 86400
python
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = .5*self.Hi[0] * self.spec_var(self.wv2*self.ph1) + \ .5*self.Hi[1] * self.spec_var(self.wv2*self.ph2) return 2.*pi*np.sqrt( self.H / ens ) / 86400
[ "def", "_calc_eddy_time", "(", "self", ")", ":", "ens", "=", ".5", "*", "self", ".", "Hi", "[", "0", "]", "*", "self", ".", "spec_var", "(", "self", ".", "wv2", "*", "self", ".", "ph1", ")", "+", ".5", "*", "self", ".", "Hi", "[", "1", "]", "*", "self", ".", "spec_var", "(", "self", ".", "wv2", "*", "self", ".", "ph2", ")", "return", "2.", "*", "pi", "*", "np", ".", "sqrt", "(", "self", ".", "H", "/", "ens", ")", "/", "86400" ]
estimate the eddy turn-over time in days
[ "estimate", "the", "eddy", "turn", "-", "over", "time", "in", "days" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/qg_model.py#L228-L234
pyqg/pyqg
pyqg/qg_model.py
QGModel._initialize_model_diagnostics
def _initialize_model_diagnostics(self): """Extra diagnostics for two-layer model""" self.add_diagnostic('entspec', description='barotropic enstrophy spectrum', function= (lambda self: np.abs(self.del1*self.qh[0] + self.del2*self.qh[1])**2.) ) self.add_diagnostic('APEflux', description='spectral flux of available potential energy', function= (lambda self: self.rd**-2 * self.del1*self.del2 * np.real((self.ph[0]-self.ph[1])*np.conj(self.Jptpc)) ) ) self.add_diagnostic('KEflux', description='spectral flux of kinetic energy', function= (lambda self: np.real(self.del1*self.ph[0]*np.conj(self.Jpxi[0])) + np.real(self.del2*self.ph[1]*np.conj(self.Jpxi[1])) ) ) self.add_diagnostic('APEgenspec', description='spectrum of APE generation', function= (lambda self: self.U[:,np.newaxis] * self.rd**-2 * self.del1 * self.del2 * np.real(1j*self.k*(self.del1*self.ph[0] + self.del2*self.ph[1]) * np.conj(self.ph[0] - self.ph[1])) ) ) self.add_diagnostic('APEgen', description='total APE generation', function= (lambda self: self.U * self.rd**-2 * self.del1 * self.del2 * np.real((1j*self.k* (self.del1*self.ph[0] + self.del2*self.ph[1]) * np.conj(self.ph[0] - self.ph[1])).sum() +(1j*self.k[:,1:-2]* (self.del1*self.ph[0,:,1:-2] + self.del2*self.ph[1,:,1:-2]) * np.conj(self.ph[0,:,1:-2] - self.ph[1,:,1:-2])).sum()) / (self.M**2) ) )
python
def _initialize_model_diagnostics(self): """Extra diagnostics for two-layer model""" self.add_diagnostic('entspec', description='barotropic enstrophy spectrum', function= (lambda self: np.abs(self.del1*self.qh[0] + self.del2*self.qh[1])**2.) ) self.add_diagnostic('APEflux', description='spectral flux of available potential energy', function= (lambda self: self.rd**-2 * self.del1*self.del2 * np.real((self.ph[0]-self.ph[1])*np.conj(self.Jptpc)) ) ) self.add_diagnostic('KEflux', description='spectral flux of kinetic energy', function= (lambda self: np.real(self.del1*self.ph[0]*np.conj(self.Jpxi[0])) + np.real(self.del2*self.ph[1]*np.conj(self.Jpxi[1])) ) ) self.add_diagnostic('APEgenspec', description='spectrum of APE generation', function= (lambda self: self.U[:,np.newaxis] * self.rd**-2 * self.del1 * self.del2 * np.real(1j*self.k*(self.del1*self.ph[0] + self.del2*self.ph[1]) * np.conj(self.ph[0] - self.ph[1])) ) ) self.add_diagnostic('APEgen', description='total APE generation', function= (lambda self: self.U * self.rd**-2 * self.del1 * self.del2 * np.real((1j*self.k* (self.del1*self.ph[0] + self.del2*self.ph[1]) * np.conj(self.ph[0] - self.ph[1])).sum() +(1j*self.k[:,1:-2]* (self.del1*self.ph[0,:,1:-2] + self.del2*self.ph[1,:,1:-2]) * np.conj(self.ph[0,:,1:-2] - self.ph[1,:,1:-2])).sum()) / (self.M**2) ) )
[ "def", "_initialize_model_diagnostics", "(", "self", ")", ":", "self", ".", "add_diagnostic", "(", "'entspec'", ",", "description", "=", "'barotropic enstrophy spectrum'", ",", "function", "=", "(", "lambda", "self", ":", "np", ".", "abs", "(", "self", ".", "del1", "*", "self", ".", "qh", "[", "0", "]", "+", "self", ".", "del2", "*", "self", ".", "qh", "[", "1", "]", ")", "**", "2.", ")", ")", "self", ".", "add_diagnostic", "(", "'APEflux'", ",", "description", "=", "'spectral flux of available potential energy'", ",", "function", "=", "(", "lambda", "self", ":", "self", ".", "rd", "**", "-", "2", "*", "self", ".", "del1", "*", "self", ".", "del2", "*", "np", ".", "real", "(", "(", "self", ".", "ph", "[", "0", "]", "-", "self", ".", "ph", "[", "1", "]", ")", "*", "np", ".", "conj", "(", "self", ".", "Jptpc", ")", ")", ")", ")", "self", ".", "add_diagnostic", "(", "'KEflux'", ",", "description", "=", "'spectral flux of kinetic energy'", ",", "function", "=", "(", "lambda", "self", ":", "np", ".", "real", "(", "self", ".", "del1", "*", "self", ".", "ph", "[", "0", "]", "*", "np", ".", "conj", "(", "self", ".", "Jpxi", "[", "0", "]", ")", ")", "+", "np", ".", "real", "(", "self", ".", "del2", "*", "self", ".", "ph", "[", "1", "]", "*", "np", ".", "conj", "(", "self", ".", "Jpxi", "[", "1", "]", ")", ")", ")", ")", "self", ".", "add_diagnostic", "(", "'APEgenspec'", ",", "description", "=", "'spectrum of APE generation'", ",", "function", "=", "(", "lambda", "self", ":", "self", ".", "U", "[", ":", ",", "np", ".", "newaxis", "]", "*", "self", ".", "rd", "**", "-", "2", "*", "self", ".", "del1", "*", "self", ".", "del2", "*", "np", ".", "real", "(", "1j", "*", "self", ".", "k", "*", "(", "self", ".", "del1", "*", "self", ".", "ph", "[", "0", "]", "+", "self", ".", "del2", "*", "self", ".", "ph", "[", "1", "]", ")", "*", "np", ".", "conj", "(", "self", ".", "ph", "[", "0", "]", "-", "self", ".", "ph", "[", "1", "]", ")", ")", ")", ")", "self", ".", "add_diagnostic", "(", "'APEgen'", ",", "description", "=", "'total APE generation'", ",", "function", "=", "(", "lambda", "self", ":", "self", ".", "U", "*", "self", ".", "rd", "**", "-", "2", "*", "self", ".", "del1", "*", "self", ".", "del2", "*", "np", ".", "real", "(", "(", "1j", "*", "self", ".", "k", "*", "(", "self", ".", "del1", "*", "self", ".", "ph", "[", "0", "]", "+", "self", ".", "del2", "*", "self", ".", "ph", "[", "1", "]", ")", "*", "np", ".", "conj", "(", "self", ".", "ph", "[", "0", "]", "-", "self", ".", "ph", "[", "1", "]", ")", ")", ".", "sum", "(", ")", "+", "(", "1j", "*", "self", ".", "k", "[", ":", ",", "1", ":", "-", "2", "]", "*", "(", "self", ".", "del1", "*", "self", ".", "ph", "[", "0", ",", ":", ",", "1", ":", "-", "2", "]", "+", "self", ".", "del2", "*", "self", ".", "ph", "[", "1", ",", ":", ",", "1", ":", "-", "2", "]", ")", "*", "np", ".", "conj", "(", "self", ".", "ph", "[", "0", ",", ":", ",", "1", ":", "-", "2", "]", "-", "self", ".", "ph", "[", "1", ",", ":", ",", "1", ":", "-", "2", "]", ")", ")", ".", "sum", "(", ")", ")", "/", "(", "self", ".", "M", "**", "2", ")", ")", ")" ]
Extra diagnostics for two-layer model
[ "Extra", "diagnostics", "for", "two", "-", "layer", "model" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/qg_model.py#L246-L286
pyqg/pyqg
pyqg/bt_model.py
BTModel._initialize_inversion_matrix
def _initialize_inversion_matrix(self): """ the inversion """ # The bt model is diagonal. The inversion is simply qh = -kappa**2 ph self.a = -(self.wv2i+self.kd2)[np.newaxis, np.newaxis, :, :]
python
def _initialize_inversion_matrix(self): """ the inversion """ # The bt model is diagonal. The inversion is simply qh = -kappa**2 ph self.a = -(self.wv2i+self.kd2)[np.newaxis, np.newaxis, :, :]
[ "def", "_initialize_inversion_matrix", "(", "self", ")", ":", "# The bt model is diagonal. The inversion is simply qh = -kappa**2 ph", "self", ".", "a", "=", "-", "(", "self", ".", "wv2i", "+", "self", ".", "kd2", ")", "[", "np", ".", "newaxis", ",", "np", ".", "newaxis", ",", ":", ",", ":", "]" ]
the inversion
[ "the", "inversion" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/bt_model.py#L75-L78
pyqg/pyqg
pyqg/bt_model.py
BTModel._calc_eddy_time
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = .5*self.H * self.spec_var(self.wv2*self.ph) return 2.*pi*np.sqrt( self.H / ens ) / year
python
def _calc_eddy_time(self): """ estimate the eddy turn-over time in days """ ens = .5*self.H * self.spec_var(self.wv2*self.ph) return 2.*pi*np.sqrt( self.H / ens ) / year
[ "def", "_calc_eddy_time", "(", "self", ")", ":", "ens", "=", ".5", "*", "self", ".", "H", "*", "self", ".", "spec_var", "(", "self", ".", "wv2", "*", "self", ".", "ph", ")", "return", "2.", "*", "pi", "*", "np", ".", "sqrt", "(", "self", ".", "H", "/", "ens", ")", "/", "year" ]
estimate the eddy turn-over time in days
[ "estimate", "the", "eddy", "turn", "-", "over", "time", "in", "days" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/bt_model.py#L123-L126
pyqg/pyqg
pyqg/point_vortex.py
PointVortexArray2D.calc_uv
def calc_uv(self, x, y, prev=False): """Calculate velocity at x and y points due to vortex velocity field. Assumes x and y are vortex positions and are ordered the same as x0 and y0. The ordering is used to neglect to vortex self interaction.""" assert len(x) == self.N assert len(y) == self.N u = np.zeros(self.N, self.x.dtype) v = np.zeros(self.N, self.y.dtype) for n in xrange(self.N): # don't include self interaction if prev: x0 = self.xprev[np.r_[:n,n+1:self.N]] y0 = self.yprev[np.r_[:n,n+1:self.N]] else: x0 = self.x[np.r_[:n,n+1:self.N]] y0 = self.y[np.r_[:n,n+1:self.N]] s0 = self.s[np.r_[:n,n+1:self.N]] u0, v0 = self.uv_at_xy(x[n], y[n], x0, y0, s0) u[n] = u0.sum() v[n] = v0.sum() return u, v
python
def calc_uv(self, x, y, prev=False): """Calculate velocity at x and y points due to vortex velocity field. Assumes x and y are vortex positions and are ordered the same as x0 and y0. The ordering is used to neglect to vortex self interaction.""" assert len(x) == self.N assert len(y) == self.N u = np.zeros(self.N, self.x.dtype) v = np.zeros(self.N, self.y.dtype) for n in xrange(self.N): # don't include self interaction if prev: x0 = self.xprev[np.r_[:n,n+1:self.N]] y0 = self.yprev[np.r_[:n,n+1:self.N]] else: x0 = self.x[np.r_[:n,n+1:self.N]] y0 = self.y[np.r_[:n,n+1:self.N]] s0 = self.s[np.r_[:n,n+1:self.N]] u0, v0 = self.uv_at_xy(x[n], y[n], x0, y0, s0) u[n] = u0.sum() v[n] = v0.sum() return u, v
[ "def", "calc_uv", "(", "self", ",", "x", ",", "y", ",", "prev", "=", "False", ")", ":", "assert", "len", "(", "x", ")", "==", "self", ".", "N", "assert", "len", "(", "y", ")", "==", "self", ".", "N", "u", "=", "np", ".", "zeros", "(", "self", ".", "N", ",", "self", ".", "x", ".", "dtype", ")", "v", "=", "np", ".", "zeros", "(", "self", ".", "N", ",", "self", ".", "y", ".", "dtype", ")", "for", "n", "in", "xrange", "(", "self", ".", "N", ")", ":", "# don't include self interaction", "if", "prev", ":", "x0", "=", "self", ".", "xprev", "[", "np", ".", "r_", "[", ":", "n", ",", "n", "+", "1", ":", "self", ".", "N", "]", "]", "y0", "=", "self", ".", "yprev", "[", "np", ".", "r_", "[", ":", "n", ",", "n", "+", "1", ":", "self", ".", "N", "]", "]", "else", ":", "x0", "=", "self", ".", "x", "[", "np", ".", "r_", "[", ":", "n", ",", "n", "+", "1", ":", "self", ".", "N", "]", "]", "y0", "=", "self", ".", "y", "[", "np", ".", "r_", "[", ":", "n", ",", "n", "+", "1", ":", "self", ".", "N", "]", "]", "s0", "=", "self", ".", "s", "[", "np", ".", "r_", "[", ":", "n", ",", "n", "+", "1", ":", "self", ".", "N", "]", "]", "u0", ",", "v0", "=", "self", ".", "uv_at_xy", "(", "x", "[", "n", "]", ",", "y", "[", "n", "]", ",", "x0", ",", "y0", ",", "s0", ")", "u", "[", "n", "]", "=", "u0", ".", "sum", "(", ")", "v", "[", "n", "]", "=", "v0", ".", "sum", "(", ")", "return", "u", ",", "v" ]
Calculate velocity at x and y points due to vortex velocity field. Assumes x and y are vortex positions and are ordered the same as x0 and y0. The ordering is used to neglect to vortex self interaction.
[ "Calculate", "velocity", "at", "x", "and", "y", "points", "due", "to", "vortex", "velocity", "field", ".", "Assumes", "x", "and", "y", "are", "vortex", "positions", "and", "are", "ordered", "the", "same", "as", "x0", "and", "y0", ".", "The", "ordering", "is", "used", "to", "neglect", "to", "vortex", "self", "interaction", "." ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/point_vortex.py#L32-L52
pyqg/pyqg
pyqg/point_vortex.py
PointVortexArray2D.uv_at_xy
def uv_at_xy(self, x, y, x0, y0, s0): """Returns two arrays of u, v""" dx, dy = self.distance(x0, y0, x, y) #print 'dx, dy:', dx, dy rr2 = (dx**2 + dy**2)**-1 u = - s0 * dy * r_twopi * rr2 v = s0 * dx * r_twopi * rr2 #print 'u, v', u, v return u, v
python
def uv_at_xy(self, x, y, x0, y0, s0): """Returns two arrays of u, v""" dx, dy = self.distance(x0, y0, x, y) #print 'dx, dy:', dx, dy rr2 = (dx**2 + dy**2)**-1 u = - s0 * dy * r_twopi * rr2 v = s0 * dx * r_twopi * rr2 #print 'u, v', u, v return u, v
[ "def", "uv_at_xy", "(", "self", ",", "x", ",", "y", ",", "x0", ",", "y0", ",", "s0", ")", ":", "dx", ",", "dy", "=", "self", ".", "distance", "(", "x0", ",", "y0", ",", "x", ",", "y", ")", "#print 'dx, dy:', dx, dy", "rr2", "=", "(", "dx", "**", "2", "+", "dy", "**", "2", ")", "**", "-", "1", "u", "=", "-", "s0", "*", "dy", "*", "r_twopi", "*", "rr2", "v", "=", "s0", "*", "dx", "*", "r_twopi", "*", "rr2", "#print 'u, v', u, v", "return", "u", ",", "v" ]
Returns two arrays of u, v
[ "Returns", "two", "arrays", "of", "u", "v" ]
train
https://github.com/pyqg/pyqg/blob/4f41584a12bcbf8657785b8cb310fa5065ecabd1/pyqg/point_vortex.py#L54-L62
brentp/interlap
interlap.py
reduce
def reduce(args): """ >>> reduce([(2, 4), (4, 9)]) [(2, 4), (4, 9)] >>> reduce([(2, 6), (4, 10)]) [(2, 10)] """ if len(args) < 2: return args args.sort() ret = [args[0]] for next_i, (s, e) in enumerate(args, start=1): if next_i == len(args): ret[-1] = ret[-1][0], max(ret[-1][1], e) break ns, ne = args[next_i] if e > ns or ret[-1][1] > ns: ret[-1] = ret[-1][0], max(e, ne, ret[-1][1]) else: ret.append((ns, ne)) return ret
python
def reduce(args): """ >>> reduce([(2, 4), (4, 9)]) [(2, 4), (4, 9)] >>> reduce([(2, 6), (4, 10)]) [(2, 10)] """ if len(args) < 2: return args args.sort() ret = [args[0]] for next_i, (s, e) in enumerate(args, start=1): if next_i == len(args): ret[-1] = ret[-1][0], max(ret[-1][1], e) break ns, ne = args[next_i] if e > ns or ret[-1][1] > ns: ret[-1] = ret[-1][0], max(e, ne, ret[-1][1]) else: ret.append((ns, ne)) return ret
[ "def", "reduce", "(", "args", ")", ":", "if", "len", "(", "args", ")", "<", "2", ":", "return", "args", "args", ".", "sort", "(", ")", "ret", "=", "[", "args", "[", "0", "]", "]", "for", "next_i", ",", "(", "s", ",", "e", ")", "in", "enumerate", "(", "args", ",", "start", "=", "1", ")", ":", "if", "next_i", "==", "len", "(", "args", ")", ":", "ret", "[", "-", "1", "]", "=", "ret", "[", "-", "1", "]", "[", "0", "]", ",", "max", "(", "ret", "[", "-", "1", "]", "[", "1", "]", ",", "e", ")", "break", "ns", ",", "ne", "=", "args", "[", "next_i", "]", "if", "e", ">", "ns", "or", "ret", "[", "-", "1", "]", "[", "1", "]", ">", "ns", ":", "ret", "[", "-", "1", "]", "=", "ret", "[", "-", "1", "]", "[", "0", "]", ",", "max", "(", "e", ",", "ne", ",", "ret", "[", "-", "1", "]", "[", "1", "]", ")", "else", ":", "ret", ".", "append", "(", "(", "ns", ",", "ne", ")", ")", "return", "ret" ]
>>> reduce([(2, 4), (4, 9)]) [(2, 4), (4, 9)] >>> reduce([(2, 6), (4, 10)]) [(2, 10)]
[ ">>>", "reduce", "(", "[", "(", "2", "4", ")", "(", "4", "9", ")", "]", ")", "[", "(", "2", "4", ")", "(", "4", "9", ")", "]" ]
train
https://github.com/brentp/interlap/blob/3c4a5923c97a5d9a11571e0c9ea5bb7ea4e784ee/interlap.py#L224-L245
brentp/interlap
interlap.py
InterLap.add
def add(self, ranges): r"""Add a single (or many) [start, end, \*] item to the tree.""" if len(ranges) and isinstance(ranges[0], int_types): ranges = [ranges] iset = self._iset self._maxlen = max(self._maxlen, max(r[1] - r[0] + 1 for r in ranges)) if len(ranges) > 30 or len(iset) < len(ranges): iset.extend(ranges) iset.sort() else: for o in ranges: iset.insert(binsearch_left_start(iset, o[0], 0, len(iset)), o)
python
def add(self, ranges): r"""Add a single (or many) [start, end, \*] item to the tree.""" if len(ranges) and isinstance(ranges[0], int_types): ranges = [ranges] iset = self._iset self._maxlen = max(self._maxlen, max(r[1] - r[0] + 1 for r in ranges)) if len(ranges) > 30 or len(iset) < len(ranges): iset.extend(ranges) iset.sort() else: for o in ranges: iset.insert(binsearch_left_start(iset, o[0], 0, len(iset)), o)
[ "def", "add", "(", "self", ",", "ranges", ")", ":", "if", "len", "(", "ranges", ")", "and", "isinstance", "(", "ranges", "[", "0", "]", ",", "int_types", ")", ":", "ranges", "=", "[", "ranges", "]", "iset", "=", "self", ".", "_iset", "self", ".", "_maxlen", "=", "max", "(", "self", ".", "_maxlen", ",", "max", "(", "r", "[", "1", "]", "-", "r", "[", "0", "]", "+", "1", "for", "r", "in", "ranges", ")", ")", "if", "len", "(", "ranges", ")", ">", "30", "or", "len", "(", "iset", ")", "<", "len", "(", "ranges", ")", ":", "iset", ".", "extend", "(", "ranges", ")", "iset", ".", "sort", "(", ")", "else", ":", "for", "o", "in", "ranges", ":", "iset", ".", "insert", "(", "binsearch_left_start", "(", "iset", ",", "o", "[", "0", "]", ",", "0", ",", "len", "(", "iset", ")", ")", ",", "o", ")" ]
r"""Add a single (or many) [start, end, \*] item to the tree.
[ "r", "Add", "a", "single", "(", "or", "many", ")", "[", "start", "end", "\\", "*", "]", "item", "to", "the", "tree", "." ]
train
https://github.com/brentp/interlap/blob/3c4a5923c97a5d9a11571e0c9ea5bb7ea4e784ee/interlap.py#L133-L145
brentp/interlap
interlap.py
InterLap.find
def find(self, other): """Return an interable of elements that overlap other in the tree.""" iset = self._iset l = binsearch_left_start(iset, other[0] - self._maxlen, 0, len(iset)) r = binsearch_right_end(iset, other[1], 0, len(iset)) iopts = iset[l:r] iiter = (s for s in iopts if s[0] <= other[1] and s[1] >= other[0]) for o in iiter: yield o
python
def find(self, other): """Return an interable of elements that overlap other in the tree.""" iset = self._iset l = binsearch_left_start(iset, other[0] - self._maxlen, 0, len(iset)) r = binsearch_right_end(iset, other[1], 0, len(iset)) iopts = iset[l:r] iiter = (s for s in iopts if s[0] <= other[1] and s[1] >= other[0]) for o in iiter: yield o
[ "def", "find", "(", "self", ",", "other", ")", ":", "iset", "=", "self", ".", "_iset", "l", "=", "binsearch_left_start", "(", "iset", ",", "other", "[", "0", "]", "-", "self", ".", "_maxlen", ",", "0", ",", "len", "(", "iset", ")", ")", "r", "=", "binsearch_right_end", "(", "iset", ",", "other", "[", "1", "]", ",", "0", ",", "len", "(", "iset", ")", ")", "iopts", "=", "iset", "[", "l", ":", "r", "]", "iiter", "=", "(", "s", "for", "s", "in", "iopts", "if", "s", "[", "0", "]", "<=", "other", "[", "1", "]", "and", "s", "[", "1", "]", ">=", "other", "[", "0", "]", ")", "for", "o", "in", "iiter", ":", "yield", "o" ]
Return an interable of elements that overlap other in the tree.
[ "Return", "an", "interable", "of", "elements", "that", "overlap", "other", "in", "the", "tree", "." ]
train
https://github.com/brentp/interlap/blob/3c4a5923c97a5d9a11571e0c9ea5bb7ea4e784ee/interlap.py#L153-L160
gumblex/zhconv
zhconv/zhconv.py
loaddict
def loaddict(filename=DICTIONARY): """ Load the dictionary from a specific JSON file. """ global zhcdicts if zhcdicts: return if filename == _DEFAULT_DICT: zhcdicts = json.loads(get_module_res(filename).read().decode('utf-8')) else: with open(filename, 'rb') as f: zhcdicts = json.loads(f.read().decode('utf-8')) zhcdicts['SIMPONLY'] = frozenset(zhcdicts['SIMPONLY']) zhcdicts['TRADONLY'] = frozenset(zhcdicts['TRADONLY'])
python
def loaddict(filename=DICTIONARY): """ Load the dictionary from a specific JSON file. """ global zhcdicts if zhcdicts: return if filename == _DEFAULT_DICT: zhcdicts = json.loads(get_module_res(filename).read().decode('utf-8')) else: with open(filename, 'rb') as f: zhcdicts = json.loads(f.read().decode('utf-8')) zhcdicts['SIMPONLY'] = frozenset(zhcdicts['SIMPONLY']) zhcdicts['TRADONLY'] = frozenset(zhcdicts['TRADONLY'])
[ "def", "loaddict", "(", "filename", "=", "DICTIONARY", ")", ":", "global", "zhcdicts", "if", "zhcdicts", ":", "return", "if", "filename", "==", "_DEFAULT_DICT", ":", "zhcdicts", "=", "json", ".", "loads", "(", "get_module_res", "(", "filename", ")", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "else", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "zhcdicts", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", "zhcdicts", "[", "'SIMPONLY'", "]", "=", "frozenset", "(", "zhcdicts", "[", "'SIMPONLY'", "]", ")", "zhcdicts", "[", "'TRADONLY'", "]", "=", "frozenset", "(", "zhcdicts", "[", "'TRADONLY'", "]", ")" ]
Load the dictionary from a specific JSON file.
[ "Load", "the", "dictionary", "from", "a", "specific", "JSON", "file", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L68-L81
gumblex/zhconv
zhconv/zhconv.py
getdict
def getdict(locale): """ Generate or get convertion dict cache for certain locale. Dictionaries are loaded on demand. """ global zhcdicts, dict_zhcn, dict_zhsg, dict_zhtw, dict_zhhk, pfsdict if zhcdicts is None: loaddict(DICTIONARY) if locale == 'zh-cn': if dict_zhcn: got = dict_zhcn else: dict_zhcn = zhcdicts['zh2Hans'].copy() dict_zhcn.update(zhcdicts['zh2CN']) got = dict_zhcn elif locale == 'zh-tw': if dict_zhtw: got = dict_zhtw else: dict_zhtw = zhcdicts['zh2Hant'].copy() dict_zhtw.update(zhcdicts['zh2TW']) got = dict_zhtw elif locale == 'zh-hk' or locale == 'zh-mo': if dict_zhhk: got = dict_zhhk else: dict_zhhk = zhcdicts['zh2Hant'].copy() dict_zhhk.update(zhcdicts['zh2HK']) got = dict_zhhk elif locale == 'zh-sg' or locale == 'zh-my': if dict_zhsg: got = dict_zhsg else: dict_zhsg = zhcdicts['zh2Hans'].copy() dict_zhsg.update(zhcdicts['zh2SG']) got = dict_zhsg elif locale == 'zh-hans': got = zhcdicts['zh2Hans'] elif locale == 'zh-hant': got = zhcdicts['zh2Hant'] else: got = {} if locale not in pfsdict: pfsdict[locale] = getpfset(got) return got
python
def getdict(locale): """ Generate or get convertion dict cache for certain locale. Dictionaries are loaded on demand. """ global zhcdicts, dict_zhcn, dict_zhsg, dict_zhtw, dict_zhhk, pfsdict if zhcdicts is None: loaddict(DICTIONARY) if locale == 'zh-cn': if dict_zhcn: got = dict_zhcn else: dict_zhcn = zhcdicts['zh2Hans'].copy() dict_zhcn.update(zhcdicts['zh2CN']) got = dict_zhcn elif locale == 'zh-tw': if dict_zhtw: got = dict_zhtw else: dict_zhtw = zhcdicts['zh2Hant'].copy() dict_zhtw.update(zhcdicts['zh2TW']) got = dict_zhtw elif locale == 'zh-hk' or locale == 'zh-mo': if dict_zhhk: got = dict_zhhk else: dict_zhhk = zhcdicts['zh2Hant'].copy() dict_zhhk.update(zhcdicts['zh2HK']) got = dict_zhhk elif locale == 'zh-sg' or locale == 'zh-my': if dict_zhsg: got = dict_zhsg else: dict_zhsg = zhcdicts['zh2Hans'].copy() dict_zhsg.update(zhcdicts['zh2SG']) got = dict_zhsg elif locale == 'zh-hans': got = zhcdicts['zh2Hans'] elif locale == 'zh-hant': got = zhcdicts['zh2Hant'] else: got = {} if locale not in pfsdict: pfsdict[locale] = getpfset(got) return got
[ "def", "getdict", "(", "locale", ")", ":", "global", "zhcdicts", ",", "dict_zhcn", ",", "dict_zhsg", ",", "dict_zhtw", ",", "dict_zhhk", ",", "pfsdict", "if", "zhcdicts", "is", "None", ":", "loaddict", "(", "DICTIONARY", ")", "if", "locale", "==", "'zh-cn'", ":", "if", "dict_zhcn", ":", "got", "=", "dict_zhcn", "else", ":", "dict_zhcn", "=", "zhcdicts", "[", "'zh2Hans'", "]", ".", "copy", "(", ")", "dict_zhcn", ".", "update", "(", "zhcdicts", "[", "'zh2CN'", "]", ")", "got", "=", "dict_zhcn", "elif", "locale", "==", "'zh-tw'", ":", "if", "dict_zhtw", ":", "got", "=", "dict_zhtw", "else", ":", "dict_zhtw", "=", "zhcdicts", "[", "'zh2Hant'", "]", ".", "copy", "(", ")", "dict_zhtw", ".", "update", "(", "zhcdicts", "[", "'zh2TW'", "]", ")", "got", "=", "dict_zhtw", "elif", "locale", "==", "'zh-hk'", "or", "locale", "==", "'zh-mo'", ":", "if", "dict_zhhk", ":", "got", "=", "dict_zhhk", "else", ":", "dict_zhhk", "=", "zhcdicts", "[", "'zh2Hant'", "]", ".", "copy", "(", ")", "dict_zhhk", ".", "update", "(", "zhcdicts", "[", "'zh2HK'", "]", ")", "got", "=", "dict_zhhk", "elif", "locale", "==", "'zh-sg'", "or", "locale", "==", "'zh-my'", ":", "if", "dict_zhsg", ":", "got", "=", "dict_zhsg", "else", ":", "dict_zhsg", "=", "zhcdicts", "[", "'zh2Hans'", "]", ".", "copy", "(", ")", "dict_zhsg", ".", "update", "(", "zhcdicts", "[", "'zh2SG'", "]", ")", "got", "=", "dict_zhsg", "elif", "locale", "==", "'zh-hans'", ":", "got", "=", "zhcdicts", "[", "'zh2Hans'", "]", "elif", "locale", "==", "'zh-hant'", ":", "got", "=", "zhcdicts", "[", "'zh2Hant'", "]", "else", ":", "got", "=", "{", "}", "if", "locale", "not", "in", "pfsdict", ":", "pfsdict", "[", "locale", "]", "=", "getpfset", "(", "got", ")", "return", "got" ]
Generate or get convertion dict cache for certain locale. Dictionaries are loaded on demand.
[ "Generate", "or", "get", "convertion", "dict", "cache", "for", "certain", "locale", ".", "Dictionaries", "are", "loaded", "on", "demand", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L83-L127
gumblex/zhconv
zhconv/zhconv.py
issimp
def issimp(s, full=False): """ Detect text is whether Simplified Chinese or Traditional Chinese. Returns True for Simplified; False for Traditional; None for unknown. If full=False, it returns once first simplified- or traditional-only character is encountered, so it's for quick and rough identification; else, it compares the count and returns the most likely one. Use `is` (True/False/None) to check the result. `s` must be unicode (Python 2) or str (Python 3), or you'll get None. """ if zhcdicts is None: loaddict(DICTIONARY) simp, trad = 0, 0 if full: for ch in s: if ch in zhcdicts['SIMPONLY']: simp += 1 elif ch in zhcdicts['TRADONLY']: trad += 1 if simp > trad: return True elif simp < trad: return False else: return None else: for ch in s: if ch in zhcdicts['SIMPONLY']: return True elif ch in zhcdicts['TRADONLY']: return False return None
python
def issimp(s, full=False): """ Detect text is whether Simplified Chinese or Traditional Chinese. Returns True for Simplified; False for Traditional; None for unknown. If full=False, it returns once first simplified- or traditional-only character is encountered, so it's for quick and rough identification; else, it compares the count and returns the most likely one. Use `is` (True/False/None) to check the result. `s` must be unicode (Python 2) or str (Python 3), or you'll get None. """ if zhcdicts is None: loaddict(DICTIONARY) simp, trad = 0, 0 if full: for ch in s: if ch in zhcdicts['SIMPONLY']: simp += 1 elif ch in zhcdicts['TRADONLY']: trad += 1 if simp > trad: return True elif simp < trad: return False else: return None else: for ch in s: if ch in zhcdicts['SIMPONLY']: return True elif ch in zhcdicts['TRADONLY']: return False return None
[ "def", "issimp", "(", "s", ",", "full", "=", "False", ")", ":", "if", "zhcdicts", "is", "None", ":", "loaddict", "(", "DICTIONARY", ")", "simp", ",", "trad", "=", "0", ",", "0", "if", "full", ":", "for", "ch", "in", "s", ":", "if", "ch", "in", "zhcdicts", "[", "'SIMPONLY'", "]", ":", "simp", "+=", "1", "elif", "ch", "in", "zhcdicts", "[", "'TRADONLY'", "]", ":", "trad", "+=", "1", "if", "simp", ">", "trad", ":", "return", "True", "elif", "simp", "<", "trad", ":", "return", "False", "else", ":", "return", "None", "else", ":", "for", "ch", "in", "s", ":", "if", "ch", "in", "zhcdicts", "[", "'SIMPONLY'", "]", ":", "return", "True", "elif", "ch", "in", "zhcdicts", "[", "'TRADONLY'", "]", ":", "return", "False", "return", "None" ]
Detect text is whether Simplified Chinese or Traditional Chinese. Returns True for Simplified; False for Traditional; None for unknown. If full=False, it returns once first simplified- or traditional-only character is encountered, so it's for quick and rough identification; else, it compares the count and returns the most likely one. Use `is` (True/False/None) to check the result. `s` must be unicode (Python 2) or str (Python 3), or you'll get None.
[ "Detect", "text", "is", "whether", "Simplified", "Chinese", "or", "Traditional", "Chinese", ".", "Returns", "True", "for", "Simplified", ";", "False", "for", "Traditional", ";", "None", "for", "unknown", ".", "If", "full", "=", "False", "it", "returns", "once", "first", "simplified", "-", "or", "traditional", "-", "only", "character", "is", "encountered", "so", "it", "s", "for", "quick", "and", "rough", "identification", ";", "else", "it", "compares", "the", "count", "and", "returns", "the", "most", "likely", "one", ".", "Use", "is", "(", "True", "/", "False", "/", "None", ")", "to", "check", "the", "result", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L136-L168
gumblex/zhconv
zhconv/zhconv.py
convtable2dict
def convtable2dict(convtable, locale, update=None): """ Convert a list of conversion dict to a dict for a certain locale. >>> sorted(convtable2dict([{'zh-hk': '列斯', 'zh-hans': '利兹', 'zh': '利兹', 'zh-tw': '里茲'}, {':uni': '巨集', 'zh-cn': '宏'}], 'zh-cn').items()) [('列斯', '利兹'), ('利兹', '利兹'), ('巨集', '宏'), ('里茲', '利兹')] """ rdict = update.copy() if update else {} for r in convtable: if ':uni' in r: if locale in r: rdict[r[':uni']] = r[locale] elif locale[:-1] == 'zh-han': if locale in r: for word in r.values(): rdict[word] = r[locale] else: v = fallback(locale, r) for word in r.values(): rdict[word] = v return rdict
python
def convtable2dict(convtable, locale, update=None): """ Convert a list of conversion dict to a dict for a certain locale. >>> sorted(convtable2dict([{'zh-hk': '列斯', 'zh-hans': '利兹', 'zh': '利兹', 'zh-tw': '里茲'}, {':uni': '巨集', 'zh-cn': '宏'}], 'zh-cn').items()) [('列斯', '利兹'), ('利兹', '利兹'), ('巨集', '宏'), ('里茲', '利兹')] """ rdict = update.copy() if update else {} for r in convtable: if ':uni' in r: if locale in r: rdict[r[':uni']] = r[locale] elif locale[:-1] == 'zh-han': if locale in r: for word in r.values(): rdict[word] = r[locale] else: v = fallback(locale, r) for word in r.values(): rdict[word] = v return rdict
[ "def", "convtable2dict", "(", "convtable", ",", "locale", ",", "update", "=", "None", ")", ":", "rdict", "=", "update", ".", "copy", "(", ")", "if", "update", "else", "{", "}", "for", "r", "in", "convtable", ":", "if", "':uni'", "in", "r", ":", "if", "locale", "in", "r", ":", "rdict", "[", "r", "[", "':uni'", "]", "]", "=", "r", "[", "locale", "]", "elif", "locale", "[", ":", "-", "1", "]", "==", "'zh-han'", ":", "if", "locale", "in", "r", ":", "for", "word", "in", "r", ".", "values", "(", ")", ":", "rdict", "[", "word", "]", "=", "r", "[", "locale", "]", "else", ":", "v", "=", "fallback", "(", "locale", ",", "r", ")", "for", "word", "in", "r", ".", "values", "(", ")", ":", "rdict", "[", "word", "]", "=", "v", "return", "rdict" ]
Convert a list of conversion dict to a dict for a certain locale. >>> sorted(convtable2dict([{'zh-hk': '列斯', 'zh-hans': '利兹', 'zh': '利兹', 'zh-tw': '里茲'}, {':uni': '巨集', 'zh-cn': '宏'}], 'zh-cn').items()) [('列斯', '利兹'), ('利兹', '利兹'), ('巨集', '宏'), ('里茲', '利兹')]
[ "Convert", "a", "list", "of", "conversion", "dict", "to", "a", "dict", "for", "a", "certain", "locale", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L176-L196
gumblex/zhconv
zhconv/zhconv.py
tokenize
def tokenize(s, locale, update=None): """ Tokenize `s` according to corresponding locale dictionary. Don't use this for serious text processing. """ zhdict = getdict(locale) pfset = pfsdict[locale] if update: zhdict = zhdict.copy() zhdict.update(update) newset = set() for word in update: for ch in range(len(word)): newset.add(word[:ch+1]) pfset = pfset | newset ch = [] N = len(s) pos = 0 while pos < N: i = pos frag = s[pos] maxword = None maxpos = 0 while i < N and frag in pfset: if frag in zhdict: maxword = frag maxpos = i i += 1 frag = s[pos:i+1] if maxword is None: maxword = s[pos] pos += 1 else: pos = maxpos + 1 ch.append(maxword) return ch
python
def tokenize(s, locale, update=None): """ Tokenize `s` according to corresponding locale dictionary. Don't use this for serious text processing. """ zhdict = getdict(locale) pfset = pfsdict[locale] if update: zhdict = zhdict.copy() zhdict.update(update) newset = set() for word in update: for ch in range(len(word)): newset.add(word[:ch+1]) pfset = pfset | newset ch = [] N = len(s) pos = 0 while pos < N: i = pos frag = s[pos] maxword = None maxpos = 0 while i < N and frag in pfset: if frag in zhdict: maxword = frag maxpos = i i += 1 frag = s[pos:i+1] if maxword is None: maxword = s[pos] pos += 1 else: pos = maxpos + 1 ch.append(maxword) return ch
[ "def", "tokenize", "(", "s", ",", "locale", ",", "update", "=", "None", ")", ":", "zhdict", "=", "getdict", "(", "locale", ")", "pfset", "=", "pfsdict", "[", "locale", "]", "if", "update", ":", "zhdict", "=", "zhdict", ".", "copy", "(", ")", "zhdict", ".", "update", "(", "update", ")", "newset", "=", "set", "(", ")", "for", "word", "in", "update", ":", "for", "ch", "in", "range", "(", "len", "(", "word", ")", ")", ":", "newset", ".", "add", "(", "word", "[", ":", "ch", "+", "1", "]", ")", "pfset", "=", "pfset", "|", "newset", "ch", "=", "[", "]", "N", "=", "len", "(", "s", ")", "pos", "=", "0", "while", "pos", "<", "N", ":", "i", "=", "pos", "frag", "=", "s", "[", "pos", "]", "maxword", "=", "None", "maxpos", "=", "0", "while", "i", "<", "N", "and", "frag", "in", "pfset", ":", "if", "frag", "in", "zhdict", ":", "maxword", "=", "frag", "maxpos", "=", "i", "i", "+=", "1", "frag", "=", "s", "[", "pos", ":", "i", "+", "1", "]", "if", "maxword", "is", "None", ":", "maxword", "=", "s", "[", "pos", "]", "pos", "+=", "1", "else", ":", "pos", "=", "maxpos", "+", "1", "ch", ".", "append", "(", "maxword", ")", "return", "ch" ]
Tokenize `s` according to corresponding locale dictionary. Don't use this for serious text processing.
[ "Tokenize", "s", "according", "to", "corresponding", "locale", "dictionary", ".", "Don", "t", "use", "this", "for", "serious", "text", "processing", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L198-L233
gumblex/zhconv
zhconv/zhconv.py
convert_for_mw
def convert_for_mw(s, locale, update=None): """ Recognizes MediaWiki's human conversion format. Use locale='zh' for no conversion. Reference: (all tests passed) https://zh.wikipedia.org/wiki/Help:高级字词转换语法 https://www.mediawiki.org/wiki/Writing_systems/Syntax >>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk')) 在現代,機械計算機的應用已經完全被電子計算機所取代 >>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw')) 資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-hant')) 張國榮曾在英國里茲大學學習。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg')) 张国荣曾在英国利兹大学学习。 >>> convert_for_mw('-{zh-hant:;\\nzh-cn:}-', 'zh-tw') == '' True >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-tw')) 毫米(毫公分),符號mm,是長度單位和降雨量單位,港澳和大陸稱為毫米(台灣亦有使用,但較常使用名稱為毫公分)。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-cn')) 毫米(毫公分),符号mm,是长度单位和降雨量单位,台湾作公釐或公厘。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘', 'zh-hk')) # unbalanced test 毫米(毫公分),符號mm,是長度單位和降雨量單位,台灣作公釐或公厘 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-tw')) 報頭的「參攷消息」四字摘自魯迅筆跡,「攷」是「考」的異體字,讀音kǎo,與「考」字相同。 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-cn')) 报头的“參攷消息”四字摘自鲁迅笔迹,“參”是“参”的繁体字,读音cān,与简体的“参”字相同;“攷”是“考”的异体字,读音kǎo,与“考”字相同。 >>> print(convert_for_mw('{{Col-break}}--&gt;', 'zh-hant')) {{Col-break}}--&gt; """ ch = [] rules = [] ruledict = update.copy() if update else {} nested = 0 block = '' for frag in RE_langconv.split(s): if frag == '-{': nested += 1 block += frag elif frag == '}-': if not nested: # bogus }- ch.append(frag) continue block += frag nested -= 1 if nested: continue newrules = [] delim = RE_splitflag.split(block[2:-2].strip(' \t\n\r\f\v;')) if len(delim) == 1: flag = None mapping = RE_splitmap.split(delim[0]) else: flag = RE_splitmap.split(delim[0].strip(' \t\n\r\f\v;')) mapping = RE_splitmap.split(delim[1]) rule = {} for m in mapping: uni = RE_splituni.split(m) if len(uni) == 1: pair = RE_splitpair.split(uni[0]) else: if rule: newrules.append(rule) rule = {':uni': uni[0]} else: rule[':uni'] = uni[0] pair = RE_splitpair.split(uni[1]) if len(pair) == 1: rule['zh'] = convert_for_mw(pair[0], 'zh', ruledict) else: rule[pair[0]] = convert_for_mw(pair[1], pair[0], ruledict) newrules.append(rule) if not flag: ch.append(fallback(locale, newrules[0])) elif any(ch in flag for ch in 'ATRD-HN'): for f in flag: # A: add rule for convert code (all text convert) # H: Insert a conversion rule without output if f in ('A', 'H'): for r in newrules: if not r in rules: rules.append(r) if f == 'A': if ':uni' in r: if locale in r: ch.append(r[locale]) else: ch.append(convert(r[':uni'], locale)) else: ch.append(fallback(locale, newrules[0])) # -: remove convert elif f == '-': for r in newrules: try: rules.remove(r) except ValueError: pass # D: convert description (useless) #elif f == 'D': #ch.append('; '.join(': '.join(x) for x in newrules[0].items())) # T: title convert (useless) # R: raw content (implied above) # N: current variant name (useless) #elif f == 'N': #ch.append(locale) ruledict = convtable2dict(rules, locale, update) else: fblimit = frozenset(flag) & frozenset(Locales[locale]) limitedruledict = update.copy() if update else {} for r in rules: if ':uni' in r: if locale in r: limitedruledict[r[':uni']] = r[locale] else: v = None for l in Locales[locale]: if l in r and l in fblimit: v = r[l] break for word in r.values(): limitedruledict[word] = v if v else convert(word, locale) ch.append(convert(delim[1], locale, limitedruledict)) block = '' elif nested: block += frag else: ch.append(convert(frag, locale, ruledict)) if nested: # unbalanced ch.append(convert_for_mw(block + '}-'*nested, locale, ruledict)) return ''.join(ch)
python
def convert_for_mw(s, locale, update=None): """ Recognizes MediaWiki's human conversion format. Use locale='zh' for no conversion. Reference: (all tests passed) https://zh.wikipedia.org/wiki/Help:高级字词转换语法 https://www.mediawiki.org/wiki/Writing_systems/Syntax >>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk')) 在現代,機械計算機的應用已經完全被電子計算機所取代 >>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw')) 資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-hant')) 張國榮曾在英國里茲大學學習。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg')) 张国荣曾在英国利兹大学学习。 >>> convert_for_mw('-{zh-hant:;\\nzh-cn:}-', 'zh-tw') == '' True >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-tw')) 毫米(毫公分),符號mm,是長度單位和降雨量單位,港澳和大陸稱為毫米(台灣亦有使用,但較常使用名稱為毫公分)。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-cn')) 毫米(毫公分),符号mm,是长度单位和降雨量单位,台湾作公釐或公厘。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘', 'zh-hk')) # unbalanced test 毫米(毫公分),符號mm,是長度單位和降雨量單位,台灣作公釐或公厘 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-tw')) 報頭的「參攷消息」四字摘自魯迅筆跡,「攷」是「考」的異體字,讀音kǎo,與「考」字相同。 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-cn')) 报头的“參攷消息”四字摘自鲁迅笔迹,“參”是“参”的繁体字,读音cān,与简体的“参”字相同;“攷”是“考”的异体字,读音kǎo,与“考”字相同。 >>> print(convert_for_mw('{{Col-break}}--&gt;', 'zh-hant')) {{Col-break}}--&gt; """ ch = [] rules = [] ruledict = update.copy() if update else {} nested = 0 block = '' for frag in RE_langconv.split(s): if frag == '-{': nested += 1 block += frag elif frag == '}-': if not nested: # bogus }- ch.append(frag) continue block += frag nested -= 1 if nested: continue newrules = [] delim = RE_splitflag.split(block[2:-2].strip(' \t\n\r\f\v;')) if len(delim) == 1: flag = None mapping = RE_splitmap.split(delim[0]) else: flag = RE_splitmap.split(delim[0].strip(' \t\n\r\f\v;')) mapping = RE_splitmap.split(delim[1]) rule = {} for m in mapping: uni = RE_splituni.split(m) if len(uni) == 1: pair = RE_splitpair.split(uni[0]) else: if rule: newrules.append(rule) rule = {':uni': uni[0]} else: rule[':uni'] = uni[0] pair = RE_splitpair.split(uni[1]) if len(pair) == 1: rule['zh'] = convert_for_mw(pair[0], 'zh', ruledict) else: rule[pair[0]] = convert_for_mw(pair[1], pair[0], ruledict) newrules.append(rule) if not flag: ch.append(fallback(locale, newrules[0])) elif any(ch in flag for ch in 'ATRD-HN'): for f in flag: # A: add rule for convert code (all text convert) # H: Insert a conversion rule without output if f in ('A', 'H'): for r in newrules: if not r in rules: rules.append(r) if f == 'A': if ':uni' in r: if locale in r: ch.append(r[locale]) else: ch.append(convert(r[':uni'], locale)) else: ch.append(fallback(locale, newrules[0])) # -: remove convert elif f == '-': for r in newrules: try: rules.remove(r) except ValueError: pass # D: convert description (useless) #elif f == 'D': #ch.append('; '.join(': '.join(x) for x in newrules[0].items())) # T: title convert (useless) # R: raw content (implied above) # N: current variant name (useless) #elif f == 'N': #ch.append(locale) ruledict = convtable2dict(rules, locale, update) else: fblimit = frozenset(flag) & frozenset(Locales[locale]) limitedruledict = update.copy() if update else {} for r in rules: if ':uni' in r: if locale in r: limitedruledict[r[':uni']] = r[locale] else: v = None for l in Locales[locale]: if l in r and l in fblimit: v = r[l] break for word in r.values(): limitedruledict[word] = v if v else convert(word, locale) ch.append(convert(delim[1], locale, limitedruledict)) block = '' elif nested: block += frag else: ch.append(convert(frag, locale, ruledict)) if nested: # unbalanced ch.append(convert_for_mw(block + '}-'*nested, locale, ruledict)) return ''.join(ch)
[ "def", "convert_for_mw", "(", "s", ",", "locale", ",", "update", "=", "None", ")", ":", "ch", "=", "[", "]", "rules", "=", "[", "]", "ruledict", "=", "update", ".", "copy", "(", ")", "if", "update", "else", "{", "}", "nested", "=", "0", "block", "=", "''", "for", "frag", "in", "RE_langconv", ".", "split", "(", "s", ")", ":", "if", "frag", "==", "'-{'", ":", "nested", "+=", "1", "block", "+=", "frag", "elif", "frag", "==", "'}-'", ":", "if", "not", "nested", ":", "# bogus }-", "ch", ".", "append", "(", "frag", ")", "continue", "block", "+=", "frag", "nested", "-=", "1", "if", "nested", ":", "continue", "newrules", "=", "[", "]", "delim", "=", "RE_splitflag", ".", "split", "(", "block", "[", "2", ":", "-", "2", "]", ".", "strip", "(", "' \\t\\n\\r\\f\\v;'", ")", ")", "if", "len", "(", "delim", ")", "==", "1", ":", "flag", "=", "None", "mapping", "=", "RE_splitmap", ".", "split", "(", "delim", "[", "0", "]", ")", "else", ":", "flag", "=", "RE_splitmap", ".", "split", "(", "delim", "[", "0", "]", ".", "strip", "(", "' \\t\\n\\r\\f\\v;'", ")", ")", "mapping", "=", "RE_splitmap", ".", "split", "(", "delim", "[", "1", "]", ")", "rule", "=", "{", "}", "for", "m", "in", "mapping", ":", "uni", "=", "RE_splituni", ".", "split", "(", "m", ")", "if", "len", "(", "uni", ")", "==", "1", ":", "pair", "=", "RE_splitpair", ".", "split", "(", "uni", "[", "0", "]", ")", "else", ":", "if", "rule", ":", "newrules", ".", "append", "(", "rule", ")", "rule", "=", "{", "':uni'", ":", "uni", "[", "0", "]", "}", "else", ":", "rule", "[", "':uni'", "]", "=", "uni", "[", "0", "]", "pair", "=", "RE_splitpair", ".", "split", "(", "uni", "[", "1", "]", ")", "if", "len", "(", "pair", ")", "==", "1", ":", "rule", "[", "'zh'", "]", "=", "convert_for_mw", "(", "pair", "[", "0", "]", ",", "'zh'", ",", "ruledict", ")", "else", ":", "rule", "[", "pair", "[", "0", "]", "]", "=", "convert_for_mw", "(", "pair", "[", "1", "]", ",", "pair", "[", "0", "]", ",", "ruledict", ")", "newrules", ".", "append", "(", "rule", ")", "if", "not", "flag", ":", "ch", ".", "append", "(", "fallback", "(", "locale", ",", "newrules", "[", "0", "]", ")", ")", "elif", "any", "(", "ch", "in", "flag", "for", "ch", "in", "'ATRD-HN'", ")", ":", "for", "f", "in", "flag", ":", "# A: add rule for convert code (all text convert)", "# H: Insert a conversion rule without output", "if", "f", "in", "(", "'A'", ",", "'H'", ")", ":", "for", "r", "in", "newrules", ":", "if", "not", "r", "in", "rules", ":", "rules", ".", "append", "(", "r", ")", "if", "f", "==", "'A'", ":", "if", "':uni'", "in", "r", ":", "if", "locale", "in", "r", ":", "ch", ".", "append", "(", "r", "[", "locale", "]", ")", "else", ":", "ch", ".", "append", "(", "convert", "(", "r", "[", "':uni'", "]", ",", "locale", ")", ")", "else", ":", "ch", ".", "append", "(", "fallback", "(", "locale", ",", "newrules", "[", "0", "]", ")", ")", "# -: remove convert", "elif", "f", "==", "'-'", ":", "for", "r", "in", "newrules", ":", "try", ":", "rules", ".", "remove", "(", "r", ")", "except", "ValueError", ":", "pass", "# D: convert description (useless)", "#elif f == 'D':", "#ch.append('; '.join(': '.join(x) for x in newrules[0].items()))", "# T: title convert (useless)", "# R: raw content (implied above)", "# N: current variant name (useless)", "#elif f == 'N':", "#ch.append(locale)", "ruledict", "=", "convtable2dict", "(", "rules", ",", "locale", ",", "update", ")", "else", ":", "fblimit", "=", "frozenset", "(", "flag", ")", "&", "frozenset", "(", "Locales", "[", "locale", "]", ")", "limitedruledict", "=", "update", ".", "copy", "(", ")", "if", "update", "else", "{", "}", "for", "r", "in", "rules", ":", "if", "':uni'", "in", "r", ":", "if", "locale", "in", "r", ":", "limitedruledict", "[", "r", "[", "':uni'", "]", "]", "=", "r", "[", "locale", "]", "else", ":", "v", "=", "None", "for", "l", "in", "Locales", "[", "locale", "]", ":", "if", "l", "in", "r", "and", "l", "in", "fblimit", ":", "v", "=", "r", "[", "l", "]", "break", "for", "word", "in", "r", ".", "values", "(", ")", ":", "limitedruledict", "[", "word", "]", "=", "v", "if", "v", "else", "convert", "(", "word", ",", "locale", ")", "ch", ".", "append", "(", "convert", "(", "delim", "[", "1", "]", ",", "locale", ",", "limitedruledict", ")", ")", "block", "=", "''", "elif", "nested", ":", "block", "+=", "frag", "else", ":", "ch", ".", "append", "(", "convert", "(", "frag", ",", "locale", ",", "ruledict", ")", ")", "if", "nested", ":", "# unbalanced", "ch", ".", "append", "(", "convert_for_mw", "(", "block", "+", "'}-'", "*", "nested", ",", "locale", ",", "ruledict", ")", ")", "return", "''", ".", "join", "(", "ch", ")" ]
Recognizes MediaWiki's human conversion format. Use locale='zh' for no conversion. Reference: (all tests passed) https://zh.wikipedia.org/wiki/Help:高级字词转换语法 https://www.mediawiki.org/wiki/Writing_systems/Syntax >>> print(convert_for_mw('在现代,机械计算-{}-机的应用已经完全被电子计算-{}-机所取代', 'zh-hk')) 在現代,機械計算機的應用已經完全被電子計算機所取代 >>> print(convert_for_mw('-{zh-hant:資訊工程;zh-hans:计算机工程学;}-是电子工程的一个分支,主要研究计算机软硬件和二者间的彼此联系。', 'zh-tw')) 資訊工程是電子工程的一個分支,主要研究計算機軟硬體和二者間的彼此聯繫。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-hant')) 張國榮曾在英國里茲大學學習。 >>> print(convert_for_mw('張國榮曾在英國-{zh:利兹;zh-hans:利兹;zh-hk:列斯;zh-tw:里茲}-大学學習。', 'zh-sg')) 张国荣曾在英国利兹大学学习。 >>> convert_for_mw('-{zh-hant:;\\nzh-cn:}-', 'zh-tw') == '' True >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-tw')) 毫米(毫公分),符號mm,是長度單位和降雨量單位,港澳和大陸稱為毫米(台灣亦有使用,但較常使用名稱為毫公分)。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘}-;}-。', 'zh-cn')) 毫米(毫公分),符号mm,是长度单位和降雨量单位,台湾作公釐或公厘。 >>> print(convert_for_mw('毫米(毫公分),符號mm,是長度單位和降雨量單位,-{zh-hans:台湾作-{公釐}-或-{公厘}-;zh-hant:港澳和大陸稱為-{毫米}-(台灣亦有使用,但較常使用名稱為毫公分);zh-mo:台灣作-{公釐}-或-{公厘}-;zh-hk:台灣作-{公釐}-或-{公厘', 'zh-hk')) # unbalanced test 毫米(毫公分),符號mm,是長度單位和降雨量單位,台灣作公釐或公厘 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-tw')) 報頭的「參攷消息」四字摘自魯迅筆跡,「攷」是「考」的異體字,讀音kǎo,與「考」字相同。 >>> print(convert_for_mw('报头的“-{參攷消息}-”四字摘自鲁迅笔迹-{zh-hans:,“-{參}-”是“-{参}-”的繁体字,读音cān,与简体的“-{参}-”字相同;;zh-hant:,;}-“-{攷}-”是“考”的异体字,读音kǎo,与“考”字相同。', 'zh-cn')) 报头的“參攷消息”四字摘自鲁迅笔迹,“參”是“参”的繁体字,读音cān,与简体的“参”字相同;“攷”是“考”的异体字,读音kǎo,与“考”字相同。 >>> print(convert_for_mw('{{Col-break}}--&gt;', 'zh-hant')) {{Col-break}}--&gt;
[ "Recognizes", "MediaWiki", "s", "human", "conversion", "format", ".", "Use", "locale", "=", "zh", "for", "no", "conversion", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L292-L425
gumblex/zhconv
zhconv/zhconv.py
main
def main(): """ Simple stdin/stdout interface. """ if len(sys.argv) == 2 and sys.argv[1] in Locales: locale = sys.argv[1] convertfunc = convert elif len(sys.argv) == 3 and sys.argv[1] == '-w' and sys.argv[2] in Locales: locale = sys.argv[2] convertfunc = convert_for_mw else: thisfile = __file__ if __name__ == '__main__' else 'python -mzhconv' print("usage: %s [-w] {zh-cn|zh-tw|zh-hk|zh-sg|zh-hans|zh-hant|zh} < input > output" % thisfile) sys.exit(1) loaddict() ln = sys.stdin.readline() while ln: l = ln.rstrip('\r\n') if sys.version_info[0] < 3: l = unicode(l, 'utf-8') res = convertfunc(l, locale) if sys.version_info[0] < 3: print(res.encode('utf-8')) else: print(res) ln = sys.stdin.readline()
python
def main(): """ Simple stdin/stdout interface. """ if len(sys.argv) == 2 and sys.argv[1] in Locales: locale = sys.argv[1] convertfunc = convert elif len(sys.argv) == 3 and sys.argv[1] == '-w' and sys.argv[2] in Locales: locale = sys.argv[2] convertfunc = convert_for_mw else: thisfile = __file__ if __name__ == '__main__' else 'python -mzhconv' print("usage: %s [-w] {zh-cn|zh-tw|zh-hk|zh-sg|zh-hans|zh-hant|zh} < input > output" % thisfile) sys.exit(1) loaddict() ln = sys.stdin.readline() while ln: l = ln.rstrip('\r\n') if sys.version_info[0] < 3: l = unicode(l, 'utf-8') res = convertfunc(l, locale) if sys.version_info[0] < 3: print(res.encode('utf-8')) else: print(res) ln = sys.stdin.readline()
[ "def", "main", "(", ")", ":", "if", "len", "(", "sys", ".", "argv", ")", "==", "2", "and", "sys", ".", "argv", "[", "1", "]", "in", "Locales", ":", "locale", "=", "sys", ".", "argv", "[", "1", "]", "convertfunc", "=", "convert", "elif", "len", "(", "sys", ".", "argv", ")", "==", "3", "and", "sys", ".", "argv", "[", "1", "]", "==", "'-w'", "and", "sys", ".", "argv", "[", "2", "]", "in", "Locales", ":", "locale", "=", "sys", ".", "argv", "[", "2", "]", "convertfunc", "=", "convert_for_mw", "else", ":", "thisfile", "=", "__file__", "if", "__name__", "==", "'__main__'", "else", "'python -mzhconv'", "print", "(", "\"usage: %s [-w] {zh-cn|zh-tw|zh-hk|zh-sg|zh-hans|zh-hant|zh} < input > output\"", "%", "thisfile", ")", "sys", ".", "exit", "(", "1", ")", "loaddict", "(", ")", "ln", "=", "sys", ".", "stdin", ".", "readline", "(", ")", "while", "ln", ":", "l", "=", "ln", ".", "rstrip", "(", "'\\r\\n'", ")", "if", "sys", ".", "version_info", "[", "0", "]", "<", "3", ":", "l", "=", "unicode", "(", "l", ",", "'utf-8'", ")", "res", "=", "convertfunc", "(", "l", ",", "locale", ")", "if", "sys", ".", "version_info", "[", "0", "]", "<", "3", ":", "print", "(", "res", ".", "encode", "(", "'utf-8'", ")", ")", "else", ":", "print", "(", "res", ")", "ln", "=", "sys", ".", "stdin", ".", "readline", "(", ")" ]
Simple stdin/stdout interface.
[ "Simple", "stdin", "/", "stdout", "interface", "." ]
train
https://github.com/gumblex/zhconv/blob/925c0f9494f3439bc05526e7e89bb5f0ab3d185e/zhconv/zhconv.py#L449-L475
glasslion/django-qiniu-storage
qiniustorage/utils.py
bucket_lister
def bucket_lister(manager, bucket_name, prefix=None, marker=None, limit=None): """ A generator function for listing keys in a bucket. """ eof = False while not eof: ret, eof, info = manager.list(bucket_name, prefix=prefix, limit=limit, marker=marker) if ret is None: raise QiniuError(info) if not eof: marker = ret['marker'] for item in ret['items']: yield item
python
def bucket_lister(manager, bucket_name, prefix=None, marker=None, limit=None): """ A generator function for listing keys in a bucket. """ eof = False while not eof: ret, eof, info = manager.list(bucket_name, prefix=prefix, limit=limit, marker=marker) if ret is None: raise QiniuError(info) if not eof: marker = ret['marker'] for item in ret['items']: yield item
[ "def", "bucket_lister", "(", "manager", ",", "bucket_name", ",", "prefix", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ")", ":", "eof", "=", "False", "while", "not", "eof", ":", "ret", ",", "eof", ",", "info", "=", "manager", ".", "list", "(", "bucket_name", ",", "prefix", "=", "prefix", ",", "limit", "=", "limit", ",", "marker", "=", "marker", ")", "if", "ret", "is", "None", ":", "raise", "QiniuError", "(", "info", ")", "if", "not", "eof", ":", "marker", "=", "ret", "[", "'marker'", "]", "for", "item", "in", "ret", "[", "'items'", "]", ":", "yield", "item" ]
A generator function for listing keys in a bucket.
[ "A", "generator", "function", "for", "listing", "keys", "in", "a", "bucket", "." ]
train
https://github.com/glasslion/django-qiniu-storage/blob/b046ec0b67ebcf8cd9eb09c60f7db4a7e4fab7ad/qiniustorage/utils.py#L17-L31
glasslion/django-qiniu-storage
qiniustorage/backends.py
get_qiniu_config
def get_qiniu_config(name, default=None): """ Get configuration variable from environment variable or django setting.py """ config = os.environ.get(name, getattr(settings, name, default)) if config is not None: if isinstance(config, six.string_types): return config.strip() else: return config else: raise ImproperlyConfigured( "Can't find config for '%s' either in environment" "variable or in setting.py" % name)
python
def get_qiniu_config(name, default=None): """ Get configuration variable from environment variable or django setting.py """ config = os.environ.get(name, getattr(settings, name, default)) if config is not None: if isinstance(config, six.string_types): return config.strip() else: return config else: raise ImproperlyConfigured( "Can't find config for '%s' either in environment" "variable or in setting.py" % name)
[ "def", "get_qiniu_config", "(", "name", ",", "default", "=", "None", ")", ":", "config", "=", "os", ".", "environ", ".", "get", "(", "name", ",", "getattr", "(", "settings", ",", "name", ",", "default", ")", ")", "if", "config", "is", "not", "None", ":", "if", "isinstance", "(", "config", ",", "six", ".", "string_types", ")", ":", "return", "config", ".", "strip", "(", ")", "else", ":", "return", "config", "else", ":", "raise", "ImproperlyConfigured", "(", "\"Can't find config for '%s' either in environment\"", "\"variable or in setting.py\"", "%", "name", ")" ]
Get configuration variable from environment variable or django setting.py
[ "Get", "configuration", "variable", "from", "environment", "variable", "or", "django", "setting", ".", "py" ]
train
https://github.com/glasslion/django-qiniu-storage/blob/b046ec0b67ebcf8cd9eb09c60f7db4a7e4fab7ad/qiniustorage/backends.py#L27-L41
non-Jedi/gyr
gyr/api.py
MatrixASHttpAPI.register
def register(self, username=""): """Performs /register with type: m.login.application_service Args: username(str): Username to register. """ if not username: username = utils.mxid2localpart(self.identity) content = { "type": "m.login.application_service", "username": username, } return self._send("POST", "/register", content, api_path=MATRIX_V2_API_PATH)
python
def register(self, username=""): """Performs /register with type: m.login.application_service Args: username(str): Username to register. """ if not username: username = utils.mxid2localpart(self.identity) content = { "type": "m.login.application_service", "username": username, } return self._send("POST", "/register", content, api_path=MATRIX_V2_API_PATH)
[ "def", "register", "(", "self", ",", "username", "=", "\"\"", ")", ":", "if", "not", "username", ":", "username", "=", "utils", ".", "mxid2localpart", "(", "self", ".", "identity", ")", "content", "=", "{", "\"type\"", ":", "\"m.login.application_service\"", ",", "\"username\"", ":", "username", ",", "}", "return", "self", ".", "_send", "(", "\"POST\"", ",", "\"/register\"", ",", "content", ",", "api_path", "=", "MATRIX_V2_API_PATH", ")" ]
Performs /register with type: m.login.application_service Args: username(str): Username to register.
[ "Performs", "/", "register", "with", "type", ":", "m", ".", "login", ".", "application_service" ]
train
https://github.com/non-Jedi/gyr/blob/9f7bfe033b9d3bbfd3a9e8aea02e35526b53125e/gyr/api.py#L58-L71
ocaballeror/LyricFetch
lyricfetch/cli.py
load_from_file
def load_from_file(filename): """ Load a list of filenames from an external text file. """ if os.path.isdir(filename): logger.error("Err: File '%s' is a directory", filename) return None if not os.path.isfile(filename): logger.error("Err: File '%s' does not exist", filename) return None try: with open(filename, 'r') as sourcefile: songs = [line.strip() for line in sourcefile] except IOError as error: logger.exception(error) return None songs = set(Song.from_filename(song) for song in songs) return songs.difference({None})
python
def load_from_file(filename): """ Load a list of filenames from an external text file. """ if os.path.isdir(filename): logger.error("Err: File '%s' is a directory", filename) return None if not os.path.isfile(filename): logger.error("Err: File '%s' does not exist", filename) return None try: with open(filename, 'r') as sourcefile: songs = [line.strip() for line in sourcefile] except IOError as error: logger.exception(error) return None songs = set(Song.from_filename(song) for song in songs) return songs.difference({None})
[ "def", "load_from_file", "(", "filename", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "filename", ")", ":", "logger", ".", "error", "(", "\"Err: File '%s' is a directory\"", ",", "filename", ")", "return", "None", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "logger", ".", "error", "(", "\"Err: File '%s' does not exist\"", ",", "filename", ")", "return", "None", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "sourcefile", ":", "songs", "=", "[", "line", ".", "strip", "(", ")", "for", "line", "in", "sourcefile", "]", "except", "IOError", "as", "error", ":", "logger", ".", "exception", "(", "error", ")", "return", "None", "songs", "=", "set", "(", "Song", ".", "from_filename", "(", "song", ")", "for", "song", "in", "songs", ")", "return", "songs", ".", "difference", "(", "{", "None", "}", ")" ]
Load a list of filenames from an external text file.
[ "Load", "a", "list", "of", "filenames", "from", "an", "external", "text", "file", "." ]
train
https://github.com/ocaballeror/LyricFetch/blob/86e62fb39c4c413ad7e1acf5bf0d28c9ed7c8fcb/lyricfetch/cli.py#L17-L35
ocaballeror/LyricFetch
lyricfetch/cli.py
parse_argv
def parse_argv(): """ Parse command line arguments. Settings will be stored in the global variables declared above. """ parser = argparse.ArgumentParser(description='Find lyrics for a set of mp3' ' files and embed them as metadata') parser.add_argument('-j', '--jobs', help='Number of parallel processes', type=int, metavar='N', default=1) parser.add_argument('-o', '--overwrite', help='Overwrite lyrics of songs' ' that already have them', action='store_true') parser.add_argument('-s', '--stats', help='Print a series of statistics at' ' the end of the execution', action='store_true') parser.add_argument('-v', '--verbose', help='Set verbosity level (pass it' ' up to three times)', action='count') parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') group = parser.add_mutually_exclusive_group() group.add_argument('-r', '--recursive', help='Recursively search for' ' mp3 files', metavar='path', nargs='?', const='.') group.add_argument('--from-file', help='Read a list of files from a text' ' file', type=str) parser.add_argument('songs', help='The files/songs to search lyrics for', nargs='*') args = parser.parse_args() CONFIG['overwrite'] = args.overwrite CONFIG['print_stats'] = args.stats if args.verbose is None or args.verbose == 0: logger.setLevel(logging.CRITICAL) elif args.verbose == 1: logger.setLevel(logging.INFO) else: logger.setLevel(logging.DEBUG) if args.jobs <= 0: msg = 'Argument -j/--jobs should have a value greater than zero' parser.error(msg) else: CONFIG['jobcount'] = args.jobs songs = set() if args.from_file: songs = load_from_file(args.from_file) if not songs: raise ValueError('No file names found in file') elif args.recursive: mp3files = glob.iglob(args.recursive + '/**/*.mp3', recursive=True) songs = set(Song.from_filename(f) for f in mp3files) elif args.songs: if os.path.exists(args.songs[0]): parser = Song.from_filename else: parser = Song.from_string songs.update(map(parser, args.songs)) else: songs.add(get_current_song()) # Just in case some song constructors failed, remove all the Nones return songs.difference({None})
python
def parse_argv(): """ Parse command line arguments. Settings will be stored in the global variables declared above. """ parser = argparse.ArgumentParser(description='Find lyrics for a set of mp3' ' files and embed them as metadata') parser.add_argument('-j', '--jobs', help='Number of parallel processes', type=int, metavar='N', default=1) parser.add_argument('-o', '--overwrite', help='Overwrite lyrics of songs' ' that already have them', action='store_true') parser.add_argument('-s', '--stats', help='Print a series of statistics at' ' the end of the execution', action='store_true') parser.add_argument('-v', '--verbose', help='Set verbosity level (pass it' ' up to three times)', action='count') parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') group = parser.add_mutually_exclusive_group() group.add_argument('-r', '--recursive', help='Recursively search for' ' mp3 files', metavar='path', nargs='?', const='.') group.add_argument('--from-file', help='Read a list of files from a text' ' file', type=str) parser.add_argument('songs', help='The files/songs to search lyrics for', nargs='*') args = parser.parse_args() CONFIG['overwrite'] = args.overwrite CONFIG['print_stats'] = args.stats if args.verbose is None or args.verbose == 0: logger.setLevel(logging.CRITICAL) elif args.verbose == 1: logger.setLevel(logging.INFO) else: logger.setLevel(logging.DEBUG) if args.jobs <= 0: msg = 'Argument -j/--jobs should have a value greater than zero' parser.error(msg) else: CONFIG['jobcount'] = args.jobs songs = set() if args.from_file: songs = load_from_file(args.from_file) if not songs: raise ValueError('No file names found in file') elif args.recursive: mp3files = glob.iglob(args.recursive + '/**/*.mp3', recursive=True) songs = set(Song.from_filename(f) for f in mp3files) elif args.songs: if os.path.exists(args.songs[0]): parser = Song.from_filename else: parser = Song.from_string songs.update(map(parser, args.songs)) else: songs.add(get_current_song()) # Just in case some song constructors failed, remove all the Nones return songs.difference({None})
[ "def", "parse_argv", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Find lyrics for a set of mp3'", "' files and embed them as metadata'", ")", "parser", ".", "add_argument", "(", "'-j'", ",", "'--jobs'", ",", "help", "=", "'Number of parallel processes'", ",", "type", "=", "int", ",", "metavar", "=", "'N'", ",", "default", "=", "1", ")", "parser", ".", "add_argument", "(", "'-o'", ",", "'--overwrite'", ",", "help", "=", "'Overwrite lyrics of songs'", "' that already have them'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--stats'", ",", "help", "=", "'Print a series of statistics at'", "' the end of the execution'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "help", "=", "'Set verbosity level (pass it'", "' up to three times)'", ",", "action", "=", "'count'", ")", "parser", ".", "add_argument", "(", "'-d'", ",", "'--debug'", ",", "help", "=", "'Enable debug output'", ",", "action", "=", "'store_true'", ")", "group", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "'-r'", ",", "'--recursive'", ",", "help", "=", "'Recursively search for'", "' mp3 files'", ",", "metavar", "=", "'path'", ",", "nargs", "=", "'?'", ",", "const", "=", "'.'", ")", "group", ".", "add_argument", "(", "'--from-file'", ",", "help", "=", "'Read a list of files from a text'", "' file'", ",", "type", "=", "str", ")", "parser", ".", "add_argument", "(", "'songs'", ",", "help", "=", "'The files/songs to search lyrics for'", ",", "nargs", "=", "'*'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "CONFIG", "[", "'overwrite'", "]", "=", "args", ".", "overwrite", "CONFIG", "[", "'print_stats'", "]", "=", "args", ".", "stats", "if", "args", ".", "verbose", "is", "None", "or", "args", ".", "verbose", "==", "0", ":", "logger", ".", "setLevel", "(", "logging", ".", "CRITICAL", ")", "elif", "args", ".", "verbose", "==", "1", ":", "logger", ".", "setLevel", "(", "logging", ".", "INFO", ")", "else", ":", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "if", "args", ".", "jobs", "<=", "0", ":", "msg", "=", "'Argument -j/--jobs should have a value greater than zero'", "parser", ".", "error", "(", "msg", ")", "else", ":", "CONFIG", "[", "'jobcount'", "]", "=", "args", ".", "jobs", "songs", "=", "set", "(", ")", "if", "args", ".", "from_file", ":", "songs", "=", "load_from_file", "(", "args", ".", "from_file", ")", "if", "not", "songs", ":", "raise", "ValueError", "(", "'No file names found in file'", ")", "elif", "args", ".", "recursive", ":", "mp3files", "=", "glob", ".", "iglob", "(", "args", ".", "recursive", "+", "'/**/*.mp3'", ",", "recursive", "=", "True", ")", "songs", "=", "set", "(", "Song", ".", "from_filename", "(", "f", ")", "for", "f", "in", "mp3files", ")", "elif", "args", ".", "songs", ":", "if", "os", ".", "path", ".", "exists", "(", "args", ".", "songs", "[", "0", "]", ")", ":", "parser", "=", "Song", ".", "from_filename", "else", ":", "parser", "=", "Song", ".", "from_string", "songs", ".", "update", "(", "map", "(", "parser", ",", "args", ".", "songs", ")", ")", "else", ":", "songs", ".", "add", "(", "get_current_song", "(", ")", ")", "# Just in case some song constructors failed, remove all the Nones", "return", "songs", ".", "difference", "(", "{", "None", "}", ")" ]
Parse command line arguments. Settings will be stored in the global variables declared above.
[ "Parse", "command", "line", "arguments", ".", "Settings", "will", "be", "stored", "in", "the", "global", "variables", "declared", "above", "." ]
train
https://github.com/ocaballeror/LyricFetch/blob/86e62fb39c4c413ad7e1acf5bf0d28c9ed7c8fcb/lyricfetch/cli.py#L38-L99
ocaballeror/LyricFetch
lyricfetch/cli.py
main
def main(): """ Main function. """ msg = '' try: songs = parse_argv() if not songs: msg = 'No songs specified' except ValueError as error: msg = str(error) if msg: logger.error('%s: Error: %s', sys.argv[0], msg) return 1 logger.debug('Running with %s', songs) try: run(songs) except KeyboardInterrupt: print('Interrupted') return 1 return 0
python
def main(): """ Main function. """ msg = '' try: songs = parse_argv() if not songs: msg = 'No songs specified' except ValueError as error: msg = str(error) if msg: logger.error('%s: Error: %s', sys.argv[0], msg) return 1 logger.debug('Running with %s', songs) try: run(songs) except KeyboardInterrupt: print('Interrupted') return 1 return 0
[ "def", "main", "(", ")", ":", "msg", "=", "''", "try", ":", "songs", "=", "parse_argv", "(", ")", "if", "not", "songs", ":", "msg", "=", "'No songs specified'", "except", "ValueError", "as", "error", ":", "msg", "=", "str", "(", "error", ")", "if", "msg", ":", "logger", ".", "error", "(", "'%s: Error: %s'", ",", "sys", ".", "argv", "[", "0", "]", ",", "msg", ")", "return", "1", "logger", ".", "debug", "(", "'Running with %s'", ",", "songs", ")", "try", ":", "run", "(", "songs", ")", "except", "KeyboardInterrupt", ":", "print", "(", "'Interrupted'", ")", "return", "1", "return", "0" ]
Main function.
[ "Main", "function", "." ]
train
https://github.com/ocaballeror/LyricFetch/blob/86e62fb39c4c413ad7e1acf5bf0d28c9ed7c8fcb/lyricfetch/cli.py#L102-L124
taskcluster/slugid.py
slugid/slugid.py
decode
def decode(slug): """ Returns the uuid.UUID object represented by the given v4 or "nice" slug """ if sys.version_info.major != 2 and isinstance(slug, bytes): slug = slug.decode('ascii') slug = slug + '==' # base64 padding return uuid.UUID(bytes=base64.urlsafe_b64decode(slug))
python
def decode(slug): """ Returns the uuid.UUID object represented by the given v4 or "nice" slug """ if sys.version_info.major != 2 and isinstance(slug, bytes): slug = slug.decode('ascii') slug = slug + '==' # base64 padding return uuid.UUID(bytes=base64.urlsafe_b64decode(slug))
[ "def", "decode", "(", "slug", ")", ":", "if", "sys", ".", "version_info", ".", "major", "!=", "2", "and", "isinstance", "(", "slug", ",", "bytes", ")", ":", "slug", "=", "slug", ".", "decode", "(", "'ascii'", ")", "slug", "=", "slug", "+", "'=='", "# base64 padding", "return", "uuid", ".", "UUID", "(", "bytes", "=", "base64", ".", "urlsafe_b64decode", "(", "slug", ")", ")" ]
Returns the uuid.UUID object represented by the given v4 or "nice" slug
[ "Returns", "the", "uuid", ".", "UUID", "object", "represented", "by", "the", "given", "v4", "or", "nice", "slug" ]
train
https://github.com/taskcluster/slugid.py/blob/7c2c58e79d8684a54c578302ad60b384e52bb09b/slugid/slugid.py#L24-L31
taskcluster/slugid.py
slugid/slugid.py
nice
def nice(): """ Returns a randomly generated uuid v4 compliant slug which conforms to a set of "nice" properties, at the cost of some entropy. Currently this means one extra fixed bit (the first bit of the uuid is set to 0) which guarantees the slug will begin with [A-Za-f]. For example such slugs don't require special handling when used as command line parameters (whereas non-nice slugs may start with `-` which can confuse command line tools). Potentially other "nice" properties may be added in future to further restrict the range of potential uuids that may be generated. """ rawBytes = bytearray(uuid.uuid4().bytes) rawBytes[0] = rawBytes[0] & 0x7f # Ensure slug starts with [A-Za-f] return _convert_bytes_to_slug(rawBytes)
python
def nice(): """ Returns a randomly generated uuid v4 compliant slug which conforms to a set of "nice" properties, at the cost of some entropy. Currently this means one extra fixed bit (the first bit of the uuid is set to 0) which guarantees the slug will begin with [A-Za-f]. For example such slugs don't require special handling when used as command line parameters (whereas non-nice slugs may start with `-` which can confuse command line tools). Potentially other "nice" properties may be added in future to further restrict the range of potential uuids that may be generated. """ rawBytes = bytearray(uuid.uuid4().bytes) rawBytes[0] = rawBytes[0] & 0x7f # Ensure slug starts with [A-Za-f] return _convert_bytes_to_slug(rawBytes)
[ "def", "nice", "(", ")", ":", "rawBytes", "=", "bytearray", "(", "uuid", ".", "uuid4", "(", ")", ".", "bytes", ")", "rawBytes", "[", "0", "]", "=", "rawBytes", "[", "0", "]", "&", "0x7f", "# Ensure slug starts with [A-Za-f]", "return", "_convert_bytes_to_slug", "(", "rawBytes", ")" ]
Returns a randomly generated uuid v4 compliant slug which conforms to a set of "nice" properties, at the cost of some entropy. Currently this means one extra fixed bit (the first bit of the uuid is set to 0) which guarantees the slug will begin with [A-Za-f]. For example such slugs don't require special handling when used as command line parameters (whereas non-nice slugs may start with `-` which can confuse command line tools). Potentially other "nice" properties may be added in future to further restrict the range of potential uuids that may be generated.
[ "Returns", "a", "randomly", "generated", "uuid", "v4", "compliant", "slug", "which", "conforms", "to", "a", "set", "of", "nice", "properties", "at", "the", "cost", "of", "some", "entropy", ".", "Currently", "this", "means", "one", "extra", "fixed", "bit", "(", "the", "first", "bit", "of", "the", "uuid", "is", "set", "to", "0", ")", "which", "guarantees", "the", "slug", "will", "begin", "with", "[", "A", "-", "Za", "-", "f", "]", ".", "For", "example", "such", "slugs", "don", "t", "require", "special", "handling", "when", "used", "as", "command", "line", "parameters", "(", "whereas", "non", "-", "nice", "slugs", "may", "start", "with", "-", "which", "can", "confuse", "command", "line", "tools", ")", "." ]
train
https://github.com/taskcluster/slugid.py/blob/7c2c58e79d8684a54c578302ad60b384e52bb09b/slugid/slugid.py#L41-L55
inodb/sufam
sufam/mutation.py
MutationsAtSinglePosition.filter_against_normal
def filter_against_normal(self, normal_mutations, maf_min=0.2, maf_count_threshold=20, count_min=1): """Filters mutations that are in the given normal""" assert(normal_mutations.chrom == self.chrom) assert(normal_mutations.pos == self.pos) assert(normal_mutations.ref == self.ref) def passes_normal_criteria(mut): return (mut.count >= maf_count_threshold and mut.maf > maf_min) or \ (mut.count < maf_count_threshold and mut.count > count_min) nms = normal_mutations muts = MutationsAtSinglePosition(self.chrom, self.pos, self.cov, self.ref) for snv in self.snvs: if not (snv in nms.snvs and passes_normal_criteria(nms.snvs[snv])): muts.add_snv(self.snvs[snv]) for dlt in self.deletions: if not (dlt in nms.deletions and passes_normal_criteria(nms.deletions[dlt])): muts.add_deletion(self.deletions[dlt]) for ins in self.insertions: if not (ins in nms.insertions and passes_normal_criteria(nms.insertions[ins])): muts.add_insertion(self.insertions[ins]) return muts
python
def filter_against_normal(self, normal_mutations, maf_min=0.2, maf_count_threshold=20, count_min=1): """Filters mutations that are in the given normal""" assert(normal_mutations.chrom == self.chrom) assert(normal_mutations.pos == self.pos) assert(normal_mutations.ref == self.ref) def passes_normal_criteria(mut): return (mut.count >= maf_count_threshold and mut.maf > maf_min) or \ (mut.count < maf_count_threshold and mut.count > count_min) nms = normal_mutations muts = MutationsAtSinglePosition(self.chrom, self.pos, self.cov, self.ref) for snv in self.snvs: if not (snv in nms.snvs and passes_normal_criteria(nms.snvs[snv])): muts.add_snv(self.snvs[snv]) for dlt in self.deletions: if not (dlt in nms.deletions and passes_normal_criteria(nms.deletions[dlt])): muts.add_deletion(self.deletions[dlt]) for ins in self.insertions: if not (ins in nms.insertions and passes_normal_criteria(nms.insertions[ins])): muts.add_insertion(self.insertions[ins]) return muts
[ "def", "filter_against_normal", "(", "self", ",", "normal_mutations", ",", "maf_min", "=", "0.2", ",", "maf_count_threshold", "=", "20", ",", "count_min", "=", "1", ")", ":", "assert", "(", "normal_mutations", ".", "chrom", "==", "self", ".", "chrom", ")", "assert", "(", "normal_mutations", ".", "pos", "==", "self", ".", "pos", ")", "assert", "(", "normal_mutations", ".", "ref", "==", "self", ".", "ref", ")", "def", "passes_normal_criteria", "(", "mut", ")", ":", "return", "(", "mut", ".", "count", ">=", "maf_count_threshold", "and", "mut", ".", "maf", ">", "maf_min", ")", "or", "(", "mut", ".", "count", "<", "maf_count_threshold", "and", "mut", ".", "count", ">", "count_min", ")", "nms", "=", "normal_mutations", "muts", "=", "MutationsAtSinglePosition", "(", "self", ".", "chrom", ",", "self", ".", "pos", ",", "self", ".", "cov", ",", "self", ".", "ref", ")", "for", "snv", "in", "self", ".", "snvs", ":", "if", "not", "(", "snv", "in", "nms", ".", "snvs", "and", "passes_normal_criteria", "(", "nms", ".", "snvs", "[", "snv", "]", ")", ")", ":", "muts", ".", "add_snv", "(", "self", ".", "snvs", "[", "snv", "]", ")", "for", "dlt", "in", "self", ".", "deletions", ":", "if", "not", "(", "dlt", "in", "nms", ".", "deletions", "and", "passes_normal_criteria", "(", "nms", ".", "deletions", "[", "dlt", "]", ")", ")", ":", "muts", ".", "add_deletion", "(", "self", ".", "deletions", "[", "dlt", "]", ")", "for", "ins", "in", "self", ".", "insertions", ":", "if", "not", "(", "ins", "in", "nms", ".", "insertions", "and", "passes_normal_criteria", "(", "nms", ".", "insertions", "[", "ins", "]", ")", ")", ":", "muts", ".", "add_insertion", "(", "self", ".", "insertions", "[", "ins", "]", ")", "return", "muts" ]
Filters mutations that are in the given normal
[ "Filters", "mutations", "that", "are", "in", "the", "given", "normal" ]
train
https://github.com/inodb/sufam/blob/d4e41c5478ca9ba58be44d95106885c096c90a74/sufam/mutation.py#L55-L81
inodb/sufam
sufam/mutation.py
Mutation.to_oncotator
def to_oncotator(self): """Returns mutation in oncotator input format. Assumes mutations have vcf/mpileup style positions.""" if self.type == ".": ref = self.ref alt = self.change start = self.pos end = self.pos elif self.type == "-": ref = self.change alt = "-" start = self.pos + 1 end = start + len(self.change) elif self.type == "+": ref = "-" alt = self.change start = self.pos end = start + len(self.change) else: raise(Exception("Unexpected mutation type: {}".format(self.type))) return "{chrom}\t{start}\t{end}\t{ref}\t{alt}".format(chrom=self.chrom, start=start, end=end, ref=ref, alt=alt)
python
def to_oncotator(self): """Returns mutation in oncotator input format. Assumes mutations have vcf/mpileup style positions.""" if self.type == ".": ref = self.ref alt = self.change start = self.pos end = self.pos elif self.type == "-": ref = self.change alt = "-" start = self.pos + 1 end = start + len(self.change) elif self.type == "+": ref = "-" alt = self.change start = self.pos end = start + len(self.change) else: raise(Exception("Unexpected mutation type: {}".format(self.type))) return "{chrom}\t{start}\t{end}\t{ref}\t{alt}".format(chrom=self.chrom, start=start, end=end, ref=ref, alt=alt)
[ "def", "to_oncotator", "(", "self", ")", ":", "if", "self", ".", "type", "==", "\".\"", ":", "ref", "=", "self", ".", "ref", "alt", "=", "self", ".", "change", "start", "=", "self", ".", "pos", "end", "=", "self", ".", "pos", "elif", "self", ".", "type", "==", "\"-\"", ":", "ref", "=", "self", ".", "change", "alt", "=", "\"-\"", "start", "=", "self", ".", "pos", "+", "1", "end", "=", "start", "+", "len", "(", "self", ".", "change", ")", "elif", "self", ".", "type", "==", "\"+\"", ":", "ref", "=", "\"-\"", "alt", "=", "self", ".", "change", "start", "=", "self", ".", "pos", "end", "=", "start", "+", "len", "(", "self", ".", "change", ")", "else", ":", "raise", "(", "Exception", "(", "\"Unexpected mutation type: {}\"", ".", "format", "(", "self", ".", "type", ")", ")", ")", "return", "\"{chrom}\\t{start}\\t{end}\\t{ref}\\t{alt}\"", ".", "format", "(", "chrom", "=", "self", ".", "chrom", ",", "start", "=", "start", ",", "end", "=", "end", ",", "ref", "=", "ref", ",", "alt", "=", "alt", ")" ]
Returns mutation in oncotator input format. Assumes mutations have vcf/mpileup style positions.
[ "Returns", "mutation", "in", "oncotator", "input", "format", ".", "Assumes", "mutations", "have", "vcf", "/", "mpileup", "style", "positions", "." ]
train
https://github.com/inodb/sufam/blob/d4e41c5478ca9ba58be44d95106885c096c90a74/sufam/mutation.py#L116-L137
non-Jedi/gyr
gyr/server.py
Application.add_handlers
def add_handlers(self, room_handler=None, transaction_handler=None, user_handler=None): """Adds routes to Application that use specified handlers.""" # Add all the normal matrix API routes if room_handler: room = resources.Room(room_handler, self.Api) self.add_route("/rooms/{room_alias}", room) if transaction_handler: transaction = resources.Transaction(transaction_handler, self.Api) self.add_route("/transactions/{txn_id}", transaction) if user_handler: user = resources.User(user_handler, self.Api) self.add_route("/users/{user_id}", user)
python
def add_handlers(self, room_handler=None, transaction_handler=None, user_handler=None): """Adds routes to Application that use specified handlers.""" # Add all the normal matrix API routes if room_handler: room = resources.Room(room_handler, self.Api) self.add_route("/rooms/{room_alias}", room) if transaction_handler: transaction = resources.Transaction(transaction_handler, self.Api) self.add_route("/transactions/{txn_id}", transaction) if user_handler: user = resources.User(user_handler, self.Api) self.add_route("/users/{user_id}", user)
[ "def", "add_handlers", "(", "self", ",", "room_handler", "=", "None", ",", "transaction_handler", "=", "None", ",", "user_handler", "=", "None", ")", ":", "# Add all the normal matrix API routes", "if", "room_handler", ":", "room", "=", "resources", ".", "Room", "(", "room_handler", ",", "self", ".", "Api", ")", "self", ".", "add_route", "(", "\"/rooms/{room_alias}\"", ",", "room", ")", "if", "transaction_handler", ":", "transaction", "=", "resources", ".", "Transaction", "(", "transaction_handler", ",", "self", ".", "Api", ")", "self", ".", "add_route", "(", "\"/transactions/{txn_id}\"", ",", "transaction", ")", "if", "user_handler", ":", "user", "=", "resources", ".", "User", "(", "user_handler", ",", "self", ".", "Api", ")", "self", ".", "add_route", "(", "\"/users/{user_id}\"", ",", "user", ")" ]
Adds routes to Application that use specified handlers.
[ "Adds", "routes", "to", "Application", "that", "use", "specified", "handlers", "." ]
train
https://github.com/non-Jedi/gyr/blob/9f7bfe033b9d3bbfd3a9e8aea02e35526b53125e/gyr/server.py#L34-L49
HolmesNL/confidence
confidence/utils.py
_merge
def _merge(left, right, path=None, conflict=_Conflict.error): """ Merges values in place from *right* into *left*. :param left: mapping to merge into :param right: mapping to merge from :param path: `list` of keys processed before (used for error reporting only, should only need to be provided by recursive calls) :param conflict: action to be taken on merge conflict, raising an error or overwriting an existing value :return: *left*, for convenience """ path = path or [] conflict = _Conflict(conflict) for key in right: if key in left: if isinstance(left[key], Mapping) and isinstance(right[key], Mapping): # recurse, merge left and right dict values, update path for current 'step' _merge(left[key], right[key], path + [key], conflict=conflict) elif left[key] != right[key]: if conflict is _Conflict.error: # not both dicts we could merge, but also not the same, this doesn't work conflict_path = '.'.join(path + [key]) raise MergeConflictError('merge conflict at {}'.format(conflict_path), key=conflict_path) else: # overwrite left value with right value left[key] = right[key] # else: left[key] is already equal to right[key], no action needed else: # key not yet in left or not considering conflicts, simple addition of right's mapping to left left[key] = right[key] return left
python
def _merge(left, right, path=None, conflict=_Conflict.error): """ Merges values in place from *right* into *left*. :param left: mapping to merge into :param right: mapping to merge from :param path: `list` of keys processed before (used for error reporting only, should only need to be provided by recursive calls) :param conflict: action to be taken on merge conflict, raising an error or overwriting an existing value :return: *left*, for convenience """ path = path or [] conflict = _Conflict(conflict) for key in right: if key in left: if isinstance(left[key], Mapping) and isinstance(right[key], Mapping): # recurse, merge left and right dict values, update path for current 'step' _merge(left[key], right[key], path + [key], conflict=conflict) elif left[key] != right[key]: if conflict is _Conflict.error: # not both dicts we could merge, but also not the same, this doesn't work conflict_path = '.'.join(path + [key]) raise MergeConflictError('merge conflict at {}'.format(conflict_path), key=conflict_path) else: # overwrite left value with right value left[key] = right[key] # else: left[key] is already equal to right[key], no action needed else: # key not yet in left or not considering conflicts, simple addition of right's mapping to left left[key] = right[key] return left
[ "def", "_merge", "(", "left", ",", "right", ",", "path", "=", "None", ",", "conflict", "=", "_Conflict", ".", "error", ")", ":", "path", "=", "path", "or", "[", "]", "conflict", "=", "_Conflict", "(", "conflict", ")", "for", "key", "in", "right", ":", "if", "key", "in", "left", ":", "if", "isinstance", "(", "left", "[", "key", "]", ",", "Mapping", ")", "and", "isinstance", "(", "right", "[", "key", "]", ",", "Mapping", ")", ":", "# recurse, merge left and right dict values, update path for current 'step'", "_merge", "(", "left", "[", "key", "]", ",", "right", "[", "key", "]", ",", "path", "+", "[", "key", "]", ",", "conflict", "=", "conflict", ")", "elif", "left", "[", "key", "]", "!=", "right", "[", "key", "]", ":", "if", "conflict", "is", "_Conflict", ".", "error", ":", "# not both dicts we could merge, but also not the same, this doesn't work", "conflict_path", "=", "'.'", ".", "join", "(", "path", "+", "[", "key", "]", ")", "raise", "MergeConflictError", "(", "'merge conflict at {}'", ".", "format", "(", "conflict_path", ")", ",", "key", "=", "conflict_path", ")", "else", ":", "# overwrite left value with right value", "left", "[", "key", "]", "=", "right", "[", "key", "]", "# else: left[key] is already equal to right[key], no action needed", "else", ":", "# key not yet in left or not considering conflicts, simple addition of right's mapping to left", "left", "[", "key", "]", "=", "right", "[", "key", "]", "return", "left" ]
Merges values in place from *right* into *left*. :param left: mapping to merge into :param right: mapping to merge from :param path: `list` of keys processed before (used for error reporting only, should only need to be provided by recursive calls) :param conflict: action to be taken on merge conflict, raising an error or overwriting an existing value :return: *left*, for convenience
[ "Merges", "values", "in", "place", "from", "*", "right", "*", "into", "*", "left", "*", "." ]
train
https://github.com/HolmesNL/confidence/blob/e14d2d8769a01fa55676716f7a2f22714c2616d3/confidence/utils.py#L13-L46
HolmesNL/confidence
confidence/utils.py
_split_keys
def _split_keys(mapping, separator='.', colliding=None): """ Recursively walks *mapping* to split keys that contain the separator into nested mappings. .. note:: Keys not of type `str` are not supported and will raise errors. :param mapping: the mapping to process :param separator: the character (sequence) to use as the separator between keys :return: a mapping where keys containing *separator* are split into nested mappings """ result = {} for key, value in mapping.items(): if isinstance(value, Mapping): # recursively split key(s) in value value = _split_keys(value, separator) # reject non-str keys, avoid complicating access patterns if not isinstance(key, str): raise ValueError('non-str type keys ({0}, {0.__class__.__module__}.{0.__class__.__name__}) ' 'not supported'.format(key)) if separator in key: # update key to be the first part before the separator key, rest = key.split(separator, 1) # use rest as the new key of value, recursively split that and update value value = _split_keys({rest: value}, separator) if colliding and key in colliding: # warn about configured keys colliding with Configuration members warnings.warn('key {key} collides with a named member, use get() method to retrieve the ' 'value for {key}'.format(key=key), UserWarning) # merge the result so far with the (possibly updated / fixed / split) current key and value _merge(result, {key: value}) return result
python
def _split_keys(mapping, separator='.', colliding=None): """ Recursively walks *mapping* to split keys that contain the separator into nested mappings. .. note:: Keys not of type `str` are not supported and will raise errors. :param mapping: the mapping to process :param separator: the character (sequence) to use as the separator between keys :return: a mapping where keys containing *separator* are split into nested mappings """ result = {} for key, value in mapping.items(): if isinstance(value, Mapping): # recursively split key(s) in value value = _split_keys(value, separator) # reject non-str keys, avoid complicating access patterns if not isinstance(key, str): raise ValueError('non-str type keys ({0}, {0.__class__.__module__}.{0.__class__.__name__}) ' 'not supported'.format(key)) if separator in key: # update key to be the first part before the separator key, rest = key.split(separator, 1) # use rest as the new key of value, recursively split that and update value value = _split_keys({rest: value}, separator) if colliding and key in colliding: # warn about configured keys colliding with Configuration members warnings.warn('key {key} collides with a named member, use get() method to retrieve the ' 'value for {key}'.format(key=key), UserWarning) # merge the result so far with the (possibly updated / fixed / split) current key and value _merge(result, {key: value}) return result
[ "def", "_split_keys", "(", "mapping", ",", "separator", "=", "'.'", ",", "colliding", "=", "None", ")", ":", "result", "=", "{", "}", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "Mapping", ")", ":", "# recursively split key(s) in value", "value", "=", "_split_keys", "(", "value", ",", "separator", ")", "# reject non-str keys, avoid complicating access patterns", "if", "not", "isinstance", "(", "key", ",", "str", ")", ":", "raise", "ValueError", "(", "'non-str type keys ({0}, {0.__class__.__module__}.{0.__class__.__name__}) '", "'not supported'", ".", "format", "(", "key", ")", ")", "if", "separator", "in", "key", ":", "# update key to be the first part before the separator", "key", ",", "rest", "=", "key", ".", "split", "(", "separator", ",", "1", ")", "# use rest as the new key of value, recursively split that and update value", "value", "=", "_split_keys", "(", "{", "rest", ":", "value", "}", ",", "separator", ")", "if", "colliding", "and", "key", "in", "colliding", ":", "# warn about configured keys colliding with Configuration members", "warnings", ".", "warn", "(", "'key {key} collides with a named member, use get() method to retrieve the '", "'value for {key}'", ".", "format", "(", "key", "=", "key", ")", ",", "UserWarning", ")", "# merge the result so far with the (possibly updated / fixed / split) current key and value", "_merge", "(", "result", ",", "{", "key", ":", "value", "}", ")", "return", "result" ]
Recursively walks *mapping* to split keys that contain the separator into nested mappings. .. note:: Keys not of type `str` are not supported and will raise errors. :param mapping: the mapping to process :param separator: the character (sequence) to use as the separator between keys :return: a mapping where keys containing *separator* are split into nested mappings
[ "Recursively", "walks", "*", "mapping", "*", "to", "split", "keys", "that", "contain", "the", "separator", "into", "nested", "mappings", "." ]
train
https://github.com/HolmesNL/confidence/blob/e14d2d8769a01fa55676716f7a2f22714c2616d3/confidence/utils.py#L49-L91
tipsi/tipsi_tools
tipsi_tools/monitoring.py
log_mon_value
def log_mon_value(name, value=1, **kwargs): """ simplest monitoring function to be aggregated with sum """ message = '{} => {}'.format(name, value) log_mon.info({'metric_name': name, 'value': value, 'message': message, **kwargs})
python
def log_mon_value(name, value=1, **kwargs): """ simplest monitoring function to be aggregated with sum """ message = '{} => {}'.format(name, value) log_mon.info({'metric_name': name, 'value': value, 'message': message, **kwargs})
[ "def", "log_mon_value", "(", "name", ",", "value", "=", "1", ",", "*", "*", "kwargs", ")", ":", "message", "=", "'{} => {}'", ".", "format", "(", "name", ",", "value", ")", "log_mon", ".", "info", "(", "{", "'metric_name'", ":", "name", ",", "'value'", ":", "value", ",", "'message'", ":", "message", ",", "*", "*", "kwargs", "}", ")" ]
simplest monitoring function to be aggregated with sum
[ "simplest", "monitoring", "function", "to", "be", "aggregated", "with", "sum" ]
train
https://github.com/tipsi/tipsi_tools/blob/1aba960c9890ceef2fb5e215b98b1646056ee58e/tipsi_tools/monitoring.py#L11-L16
alfredodeza/notario
notario/store.py
create_store
def create_store(): """ A helper for setting the _proxy and slapping the store object for us. :return: A thread-local storage as a dictionary """ new_storage = _proxy('store') _state.store = type('store', (object,), {}) new_storage.store = dict() return new_storage.store
python
def create_store(): """ A helper for setting the _proxy and slapping the store object for us. :return: A thread-local storage as a dictionary """ new_storage = _proxy('store') _state.store = type('store', (object,), {}) new_storage.store = dict() return new_storage.store
[ "def", "create_store", "(", ")", ":", "new_storage", "=", "_proxy", "(", "'store'", ")", "_state", ".", "store", "=", "type", "(", "'store'", ",", "(", "object", ",", ")", ",", "{", "}", ")", "new_storage", ".", "store", "=", "dict", "(", ")", "return", "new_storage", ".", "store" ]
A helper for setting the _proxy and slapping the store object for us. :return: A thread-local storage as a dictionary
[ "A", "helper", "for", "setting", "the", "_proxy", "and", "slapping", "the", "store", "object", "for", "us", "." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/store.py#L25-L35
tipsi/tipsi_tools
tipsi_tools/drf/__init__.py
use_form
def use_form(form_class, request=None, **top_kwargs): """ Validate request (query_params or request body with args from url) with serializer and pass validated data dict to the view function instead of request object. """ def validated_form(request, **kwargs): # import ipdb; ipdb.set_trace() data = request.query_params.dict() if request.method in ['GET'] else request.data if isinstance(data, QueryDict): form = form_class(data={**data.dict(), **kwargs}) elif isinstance(data, dict): form = form_class(data={**data, **kwargs}) else: form = form_class(data=data, **kwargs) form.is_valid(raise_exception=True) return form if request: kwargs = {} if request.resolver_match: kwargs = {**request.resolver_match.kwargs} if top_kwargs: kwargs = {**kwargs, **top_kwargs} return validated_form(request, **kwargs).validated_data def wrap(func): def method_wrap(view, request, *args, **kwargs): form = validated_form(request, **kwargs) if hasattr(view, 'log'): form.log = view.log return func(view, form.validated_data, *args, **kwargs) def function_wrap(request, *args, **kwargs): form = validated_form(request, **kwargs) return func(form.validated_data, *args, **kwargs) def inner(*args, **kwargs): is_method = isinstance(args[0], APIView) return (method_wrap if is_method else function_wrap)(*args, **kwargs) return inner return wrap
python
def use_form(form_class, request=None, **top_kwargs): """ Validate request (query_params or request body with args from url) with serializer and pass validated data dict to the view function instead of request object. """ def validated_form(request, **kwargs): # import ipdb; ipdb.set_trace() data = request.query_params.dict() if request.method in ['GET'] else request.data if isinstance(data, QueryDict): form = form_class(data={**data.dict(), **kwargs}) elif isinstance(data, dict): form = form_class(data={**data, **kwargs}) else: form = form_class(data=data, **kwargs) form.is_valid(raise_exception=True) return form if request: kwargs = {} if request.resolver_match: kwargs = {**request.resolver_match.kwargs} if top_kwargs: kwargs = {**kwargs, **top_kwargs} return validated_form(request, **kwargs).validated_data def wrap(func): def method_wrap(view, request, *args, **kwargs): form = validated_form(request, **kwargs) if hasattr(view, 'log'): form.log = view.log return func(view, form.validated_data, *args, **kwargs) def function_wrap(request, *args, **kwargs): form = validated_form(request, **kwargs) return func(form.validated_data, *args, **kwargs) def inner(*args, **kwargs): is_method = isinstance(args[0], APIView) return (method_wrap if is_method else function_wrap)(*args, **kwargs) return inner return wrap
[ "def", "use_form", "(", "form_class", ",", "request", "=", "None", ",", "*", "*", "top_kwargs", ")", ":", "def", "validated_form", "(", "request", ",", "*", "*", "kwargs", ")", ":", "# import ipdb; ipdb.set_trace()", "data", "=", "request", ".", "query_params", ".", "dict", "(", ")", "if", "request", ".", "method", "in", "[", "'GET'", "]", "else", "request", ".", "data", "if", "isinstance", "(", "data", ",", "QueryDict", ")", ":", "form", "=", "form_class", "(", "data", "=", "{", "*", "*", "data", ".", "dict", "(", ")", ",", "*", "*", "kwargs", "}", ")", "elif", "isinstance", "(", "data", ",", "dict", ")", ":", "form", "=", "form_class", "(", "data", "=", "{", "*", "*", "data", ",", "*", "*", "kwargs", "}", ")", "else", ":", "form", "=", "form_class", "(", "data", "=", "data", ",", "*", "*", "kwargs", ")", "form", ".", "is_valid", "(", "raise_exception", "=", "True", ")", "return", "form", "if", "request", ":", "kwargs", "=", "{", "}", "if", "request", ".", "resolver_match", ":", "kwargs", "=", "{", "*", "*", "request", ".", "resolver_match", ".", "kwargs", "}", "if", "top_kwargs", ":", "kwargs", "=", "{", "*", "*", "kwargs", ",", "*", "*", "top_kwargs", "}", "return", "validated_form", "(", "request", ",", "*", "*", "kwargs", ")", ".", "validated_data", "def", "wrap", "(", "func", ")", ":", "def", "method_wrap", "(", "view", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "form", "=", "validated_form", "(", "request", ",", "*", "*", "kwargs", ")", "if", "hasattr", "(", "view", ",", "'log'", ")", ":", "form", ".", "log", "=", "view", ".", "log", "return", "func", "(", "view", ",", "form", ".", "validated_data", ",", "*", "args", ",", "*", "*", "kwargs", ")", "def", "function_wrap", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "form", "=", "validated_form", "(", "request", ",", "*", "*", "kwargs", ")", "return", "func", "(", "form", ".", "validated_data", ",", "*", "args", ",", "*", "*", "kwargs", ")", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "is_method", "=", "isinstance", "(", "args", "[", "0", "]", ",", "APIView", ")", "return", "(", "method_wrap", "if", "is_method", "else", "function_wrap", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "inner", "return", "wrap" ]
Validate request (query_params or request body with args from url) with serializer and pass validated data dict to the view function instead of request object.
[ "Validate", "request", "(", "query_params", "or", "request", "body", "with", "args", "from", "url", ")", "with", "serializer", "and", "pass", "validated", "data", "dict", "to", "the", "view", "function", "instead", "of", "request", "object", "." ]
train
https://github.com/tipsi/tipsi_tools/blob/1aba960c9890ceef2fb5e215b98b1646056ee58e/tipsi_tools/drf/__init__.py#L5-L49
craigahobbs/chisel
src/chisel/request.py
request
def request(request_callback=None, **kwargs): """ Chisel request decorator """ if request_callback is None: return lambda fn: request(fn, **kwargs) else: return Request(request_callback, **kwargs).decorate_module(request_callback)
python
def request(request_callback=None, **kwargs): """ Chisel request decorator """ if request_callback is None: return lambda fn: request(fn, **kwargs) else: return Request(request_callback, **kwargs).decorate_module(request_callback)
[ "def", "request", "(", "request_callback", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "request_callback", "is", "None", ":", "return", "lambda", "fn", ":", "request", "(", "fn", ",", "*", "*", "kwargs", ")", "else", ":", "return", "Request", "(", "request_callback", ",", "*", "*", "kwargs", ")", ".", "decorate_module", "(", "request_callback", ")" ]
Chisel request decorator
[ "Chisel", "request", "decorator" ]
train
https://github.com/craigahobbs/chisel/blob/d306a9eae2ff757647c6ca1c933bc944efa5c326/src/chisel/request.py#L13-L21
bierschenk/ode
examples/example_2d_orbit.py
dx_orbit_sys
def dx_orbit_sys(t, X): '''X = [ m1x, m1y, m2x, m2y, m3x, m3y, m4x, m4y, m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy ] ''' (m1x, m1y, m2x, m2y, m3x, m3y, m4x, m4y, m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy) = X m_moon1 = 7.342*(10**22) # kg m_moon2 = 7.342*(10**22) # kg m_moon3 = 7.342*(10**22) # kg m_moon4 = 7.342*(10**22) # kg G = 6.67408*(10**-11) # m**3 kg**−1 s**−2 dm12 = sqrt((m1x - m2x)**2 + (m1y - m2y)**2) dm13 = sqrt((m1x - m3x)**2 + (m1y - m3y)**2) dm14 = sqrt((m1x - m4x)**2 + (m1y - m4y)**2) dm23 = sqrt((m2x - m3x)**2 + (m2y - m3y)**2) dm24 = sqrt((m2x - m4x)**2 + (m2y - m4y)**2) dm34 = sqrt((m3x - m4x)**2 + (m3y - m4y)**2) f12 = G * m_moon1 * m_moon2 / (dm12 * dm12) f13 = G * m_moon1 * m_moon3 / (dm13 * dm13) f14 = G * m_moon1 * m_moon4 / (dm14 * dm14) f23 = G * m_moon2 * m_moon3 / (dm23 * dm23) f24 = G * m_moon2 * m_moon4 / (dm24 * dm24) f34 = G * m_moon3 * m_moon4 / (dm34 * dm34) dr12 = atan2(m2y - m1y, m2x - m1x) dr13 = atan2(m3y - m1y, m3x - m1x) dr14 = atan2(m4y - m1y, m4x - m1x) dr23 = atan2(m3y - m2y, m3x - m2x) dr24 = atan2(m4y - m2y, m4x - m2x) dr34 = atan2(m4y - m3y, m4x - m3x) f1x = f12 * cos(dr12) + f13 * cos(dr13) + f14 * cos(dr14) f1y = f12 * sin(dr12) + f13 * sin(dr13) + f14 * sin(dr14) f2x = f12 * cos(dr12 + pi) + f23 * cos(dr23) + f24 * cos(dr24) f2y = f12 * sin(dr12 + pi) + f23 * sin(dr23) + f24 * sin(dr24) f3x = f13 * cos(dr13 + pi) + f23 * cos(dr23 + pi) + f34 * cos(dr34) f3y = f13 * sin(dr13 + pi) + f23 * sin(dr23 + pi) + f34 * sin(dr34) f4x = f14 * cos(dr14 + pi) + f24 * cos(dr24 + pi) + f34 * cos(dr34 + pi) f4y = f14 * sin(dr14 + pi) + f24 * sin(dr24 + pi) + f34 * sin(dr34 + pi) dX = [ m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy, f1x / m_moon1, f1y / m_moon1, f2x / m_moon2, f2y / m_moon2, f3x / m_moon3, f3y / m_moon3, f4x / m_moon4, f4y / m_moon4, ] return dX
python
def dx_orbit_sys(t, X): '''X = [ m1x, m1y, m2x, m2y, m3x, m3y, m4x, m4y, m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy ] ''' (m1x, m1y, m2x, m2y, m3x, m3y, m4x, m4y, m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy) = X m_moon1 = 7.342*(10**22) # kg m_moon2 = 7.342*(10**22) # kg m_moon3 = 7.342*(10**22) # kg m_moon4 = 7.342*(10**22) # kg G = 6.67408*(10**-11) # m**3 kg**−1 s**−2 dm12 = sqrt((m1x - m2x)**2 + (m1y - m2y)**2) dm13 = sqrt((m1x - m3x)**2 + (m1y - m3y)**2) dm14 = sqrt((m1x - m4x)**2 + (m1y - m4y)**2) dm23 = sqrt((m2x - m3x)**2 + (m2y - m3y)**2) dm24 = sqrt((m2x - m4x)**2 + (m2y - m4y)**2) dm34 = sqrt((m3x - m4x)**2 + (m3y - m4y)**2) f12 = G * m_moon1 * m_moon2 / (dm12 * dm12) f13 = G * m_moon1 * m_moon3 / (dm13 * dm13) f14 = G * m_moon1 * m_moon4 / (dm14 * dm14) f23 = G * m_moon2 * m_moon3 / (dm23 * dm23) f24 = G * m_moon2 * m_moon4 / (dm24 * dm24) f34 = G * m_moon3 * m_moon4 / (dm34 * dm34) dr12 = atan2(m2y - m1y, m2x - m1x) dr13 = atan2(m3y - m1y, m3x - m1x) dr14 = atan2(m4y - m1y, m4x - m1x) dr23 = atan2(m3y - m2y, m3x - m2x) dr24 = atan2(m4y - m2y, m4x - m2x) dr34 = atan2(m4y - m3y, m4x - m3x) f1x = f12 * cos(dr12) + f13 * cos(dr13) + f14 * cos(dr14) f1y = f12 * sin(dr12) + f13 * sin(dr13) + f14 * sin(dr14) f2x = f12 * cos(dr12 + pi) + f23 * cos(dr23) + f24 * cos(dr24) f2y = f12 * sin(dr12 + pi) + f23 * sin(dr23) + f24 * sin(dr24) f3x = f13 * cos(dr13 + pi) + f23 * cos(dr23 + pi) + f34 * cos(dr34) f3y = f13 * sin(dr13 + pi) + f23 * sin(dr23 + pi) + f34 * sin(dr34) f4x = f14 * cos(dr14 + pi) + f24 * cos(dr24 + pi) + f34 * cos(dr34 + pi) f4y = f14 * sin(dr14 + pi) + f24 * sin(dr24 + pi) + f34 * sin(dr34 + pi) dX = [ m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy, f1x / m_moon1, f1y / m_moon1, f2x / m_moon2, f2y / m_moon2, f3x / m_moon3, f3y / m_moon3, f4x / m_moon4, f4y / m_moon4, ] return dX
[ "def", "dx_orbit_sys", "(", "t", ",", "X", ")", ":", "(", "m1x", ",", "m1y", ",", "m2x", ",", "m2y", ",", "m3x", ",", "m3y", ",", "m4x", ",", "m4y", ",", "m1vx", ",", "m1vy", ",", "m2vx", ",", "m2vy", ",", "m3vx", ",", "m3vy", ",", "m4vx", ",", "m4vy", ")", "=", "X", "m_moon1", "=", "7.342", "*", "(", "10", "**", "22", ")", "# kg", "m_moon2", "=", "7.342", "*", "(", "10", "**", "22", ")", "# kg", "m_moon3", "=", "7.342", "*", "(", "10", "**", "22", ")", "# kg", "m_moon4", "=", "7.342", "*", "(", "10", "**", "22", ")", "# kg", "G", "=", "6.67408", "*", "(", "10", "**", "-", "11", ")", "# m**3 kg**−1 s**−2", "dm12", "=", "sqrt", "(", "(", "m1x", "-", "m2x", ")", "**", "2", "+", "(", "m1y", "-", "m2y", ")", "**", "2", ")", "dm13", "=", "sqrt", "(", "(", "m1x", "-", "m3x", ")", "**", "2", "+", "(", "m1y", "-", "m3y", ")", "**", "2", ")", "dm14", "=", "sqrt", "(", "(", "m1x", "-", "m4x", ")", "**", "2", "+", "(", "m1y", "-", "m4y", ")", "**", "2", ")", "dm23", "=", "sqrt", "(", "(", "m2x", "-", "m3x", ")", "**", "2", "+", "(", "m2y", "-", "m3y", ")", "**", "2", ")", "dm24", "=", "sqrt", "(", "(", "m2x", "-", "m4x", ")", "**", "2", "+", "(", "m2y", "-", "m4y", ")", "**", "2", ")", "dm34", "=", "sqrt", "(", "(", "m3x", "-", "m4x", ")", "**", "2", "+", "(", "m3y", "-", "m4y", ")", "**", "2", ")", "f12", "=", "G", "*", "m_moon1", "*", "m_moon2", "/", "(", "dm12", "*", "dm12", ")", "f13", "=", "G", "*", "m_moon1", "*", "m_moon3", "/", "(", "dm13", "*", "dm13", ")", "f14", "=", "G", "*", "m_moon1", "*", "m_moon4", "/", "(", "dm14", "*", "dm14", ")", "f23", "=", "G", "*", "m_moon2", "*", "m_moon3", "/", "(", "dm23", "*", "dm23", ")", "f24", "=", "G", "*", "m_moon2", "*", "m_moon4", "/", "(", "dm24", "*", "dm24", ")", "f34", "=", "G", "*", "m_moon3", "*", "m_moon4", "/", "(", "dm34", "*", "dm34", ")", "dr12", "=", "atan2", "(", "m2y", "-", "m1y", ",", "m2x", "-", "m1x", ")", "dr13", "=", "atan2", "(", "m3y", "-", "m1y", ",", "m3x", "-", "m1x", ")", "dr14", "=", "atan2", "(", "m4y", "-", "m1y", ",", "m4x", "-", "m1x", ")", "dr23", "=", "atan2", "(", "m3y", "-", "m2y", ",", "m3x", "-", "m2x", ")", "dr24", "=", "atan2", "(", "m4y", "-", "m2y", ",", "m4x", "-", "m2x", ")", "dr34", "=", "atan2", "(", "m4y", "-", "m3y", ",", "m4x", "-", "m3x", ")", "f1x", "=", "f12", "*", "cos", "(", "dr12", ")", "+", "f13", "*", "cos", "(", "dr13", ")", "+", "f14", "*", "cos", "(", "dr14", ")", "f1y", "=", "f12", "*", "sin", "(", "dr12", ")", "+", "f13", "*", "sin", "(", "dr13", ")", "+", "f14", "*", "sin", "(", "dr14", ")", "f2x", "=", "f12", "*", "cos", "(", "dr12", "+", "pi", ")", "+", "f23", "*", "cos", "(", "dr23", ")", "+", "f24", "*", "cos", "(", "dr24", ")", "f2y", "=", "f12", "*", "sin", "(", "dr12", "+", "pi", ")", "+", "f23", "*", "sin", "(", "dr23", ")", "+", "f24", "*", "sin", "(", "dr24", ")", "f3x", "=", "f13", "*", "cos", "(", "dr13", "+", "pi", ")", "+", "f23", "*", "cos", "(", "dr23", "+", "pi", ")", "+", "f34", "*", "cos", "(", "dr34", ")", "f3y", "=", "f13", "*", "sin", "(", "dr13", "+", "pi", ")", "+", "f23", "*", "sin", "(", "dr23", "+", "pi", ")", "+", "f34", "*", "sin", "(", "dr34", ")", "f4x", "=", "f14", "*", "cos", "(", "dr14", "+", "pi", ")", "+", "f24", "*", "cos", "(", "dr24", "+", "pi", ")", "+", "f34", "*", "cos", "(", "dr34", "+", "pi", ")", "f4y", "=", "f14", "*", "sin", "(", "dr14", "+", "pi", ")", "+", "f24", "*", "sin", "(", "dr24", "+", "pi", ")", "+", "f34", "*", "sin", "(", "dr34", "+", "pi", ")", "dX", "=", "[", "m1vx", ",", "m1vy", ",", "m2vx", ",", "m2vy", ",", "m3vx", ",", "m3vy", ",", "m4vx", ",", "m4vy", ",", "f1x", "/", "m_moon1", ",", "f1y", "/", "m_moon1", ",", "f2x", "/", "m_moon2", ",", "f2y", "/", "m_moon2", ",", "f3x", "/", "m_moon3", ",", "f3y", "/", "m_moon3", ",", "f4x", "/", "m_moon4", ",", "f4y", "/", "m_moon4", ",", "]", "return", "dX" ]
X = [ m1x, m1y, m2x, m2y, m3x, m3y, m4x, m4y, m1vx, m1vy, m2vx, m2vy, m3vx, m3vy, m4vx, m4vy ]
[ "X", "=", "[", "m1x", "m1y", "m2x", "m2y", "m3x", "m3y", "m4x", "m4y", "m1vx", "m1vy", "m2vx", "m2vy", "m3vx", "m3vy", "m4vx", "m4vy", "]" ]
train
https://github.com/bierschenk/ode/blob/01fb714874926f0988a4bb250d2a0c8a2429e4f0/examples/example_2d_orbit.py#L7-L76
Parsely/redis-fluster
fluster/penalty_box.py
PenaltyBox.add
def add(self, client): """Add a client to the penalty box.""" if client.pool_id in self._client_ids: log.info("%r is already in the penalty box. Ignoring.", client) return release = time.time() + self._min_wait heapq.heappush(self._clients, (release, (client, self._min_wait))) self._client_ids.add(client.pool_id)
python
def add(self, client): """Add a client to the penalty box.""" if client.pool_id in self._client_ids: log.info("%r is already in the penalty box. Ignoring.", client) return release = time.time() + self._min_wait heapq.heappush(self._clients, (release, (client, self._min_wait))) self._client_ids.add(client.pool_id)
[ "def", "add", "(", "self", ",", "client", ")", ":", "if", "client", ".", "pool_id", "in", "self", ".", "_client_ids", ":", "log", ".", "info", "(", "\"%r is already in the penalty box. Ignoring.\"", ",", "client", ")", "return", "release", "=", "time", ".", "time", "(", ")", "+", "self", ".", "_min_wait", "heapq", ".", "heappush", "(", "self", ".", "_clients", ",", "(", "release", ",", "(", "client", ",", "self", ".", "_min_wait", ")", ")", ")", "self", ".", "_client_ids", ".", "add", "(", "client", ".", "pool_id", ")" ]
Add a client to the penalty box.
[ "Add", "a", "client", "to", "the", "penalty", "box", "." ]
train
https://github.com/Parsely/redis-fluster/blob/9fb3ccdc3e0b24906520cac1e933a775e8dfbd99/fluster/penalty_box.py#L21-L28
Parsely/redis-fluster
fluster/penalty_box.py
PenaltyBox.get
def get(self): """Get any clients ready to be used. :returns: Iterable of redis clients """ now = time.time() while self._clients and self._clients[0][0] < now: _, (client, last_wait) = heapq.heappop(self._clients) connect_start = time.time() try: client.echo("test") # reconnected if this succeeds. self._client_ids.remove(client.pool_id) yield client except (ConnectionError, TimeoutError): timer = time.time() - connect_start wait = min(int(last_wait * self._multiplier), self._max_wait) heapq.heappush(self._clients, (time.time() + wait, (client, wait))) log.info( "%r is still down after a %s second attempt to connect. Retrying in %ss.", client, timer, wait, )
python
def get(self): """Get any clients ready to be used. :returns: Iterable of redis clients """ now = time.time() while self._clients and self._clients[0][0] < now: _, (client, last_wait) = heapq.heappop(self._clients) connect_start = time.time() try: client.echo("test") # reconnected if this succeeds. self._client_ids.remove(client.pool_id) yield client except (ConnectionError, TimeoutError): timer = time.time() - connect_start wait = min(int(last_wait * self._multiplier), self._max_wait) heapq.heappush(self._clients, (time.time() + wait, (client, wait))) log.info( "%r is still down after a %s second attempt to connect. Retrying in %ss.", client, timer, wait, )
[ "def", "get", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "while", "self", ".", "_clients", "and", "self", ".", "_clients", "[", "0", "]", "[", "0", "]", "<", "now", ":", "_", ",", "(", "client", ",", "last_wait", ")", "=", "heapq", ".", "heappop", "(", "self", ".", "_clients", ")", "connect_start", "=", "time", ".", "time", "(", ")", "try", ":", "client", ".", "echo", "(", "\"test\"", ")", "# reconnected if this succeeds.", "self", ".", "_client_ids", ".", "remove", "(", "client", ".", "pool_id", ")", "yield", "client", "except", "(", "ConnectionError", ",", "TimeoutError", ")", ":", "timer", "=", "time", ".", "time", "(", ")", "-", "connect_start", "wait", "=", "min", "(", "int", "(", "last_wait", "*", "self", ".", "_multiplier", ")", ",", "self", ".", "_max_wait", ")", "heapq", ".", "heappush", "(", "self", ".", "_clients", ",", "(", "time", ".", "time", "(", ")", "+", "wait", ",", "(", "client", ",", "wait", ")", ")", ")", "log", ".", "info", "(", "\"%r is still down after a %s second attempt to connect. Retrying in %ss.\"", ",", "client", ",", "timer", ",", "wait", ",", ")" ]
Get any clients ready to be used. :returns: Iterable of redis clients
[ "Get", "any", "clients", "ready", "to", "be", "used", "." ]
train
https://github.com/Parsely/redis-fluster/blob/9fb3ccdc3e0b24906520cac1e933a775e8dfbd99/fluster/penalty_box.py#L30-L52
alfredodeza/notario
notario/validators/types.py
string
def string(_object): """ Validates a given input is of type string. Example usage:: data = {'a' : 21} schema = (string, 21) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, basestring), "not of type string") return _validator(value) return decorated ensure(isinstance(_object, basestring), "not of type string")
python
def string(_object): """ Validates a given input is of type string. Example usage:: data = {'a' : 21} schema = (string, 21) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, basestring), "not of type string") return _validator(value) return decorated ensure(isinstance(_object, basestring), "not of type string")
[ "def", "string", "(", "_object", ")", ":", "if", "is_callable", "(", "_object", ")", ":", "_validator", "=", "_object", "@", "wraps", "(", "_validator", ")", "def", "decorated", "(", "value", ")", ":", "ensure", "(", "isinstance", "(", "value", ",", "basestring", ")", ",", "\"not of type string\"", ")", "return", "_validator", "(", "value", ")", "return", "decorated", "ensure", "(", "isinstance", "(", "_object", ",", "basestring", ")", ",", "\"not of type string\"", ")" ]
Validates a given input is of type string. Example usage:: data = {'a' : 21} schema = (string, 21) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function.
[ "Validates", "a", "given", "input", "is", "of", "type", "string", "." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L10-L34
alfredodeza/notario
notario/validators/types.py
boolean
def boolean(_object): """ Validates a given input is of type boolean. Example usage:: data = {'a' : True} schema = ('a', boolean) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, bool), "not of type boolean") return _validator(value) return decorated ensure(isinstance(_object, bool), "not of type boolean")
python
def boolean(_object): """ Validates a given input is of type boolean. Example usage:: data = {'a' : True} schema = ('a', boolean) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, bool), "not of type boolean") return _validator(value) return decorated ensure(isinstance(_object, bool), "not of type boolean")
[ "def", "boolean", "(", "_object", ")", ":", "if", "is_callable", "(", "_object", ")", ":", "_validator", "=", "_object", "@", "wraps", "(", "_validator", ")", "def", "decorated", "(", "value", ")", ":", "ensure", "(", "isinstance", "(", "value", ",", "bool", ")", ",", "\"not of type boolean\"", ")", "return", "_validator", "(", "value", ")", "return", "decorated", "ensure", "(", "isinstance", "(", "_object", ",", "bool", ")", ",", "\"not of type boolean\"", ")" ]
Validates a given input is of type boolean. Example usage:: data = {'a' : True} schema = ('a', boolean) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function.
[ "Validates", "a", "given", "input", "is", "of", "type", "boolean", "." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L37-L62
alfredodeza/notario
notario/validators/types.py
dictionary
def dictionary(_object, *args): """ Validates a given input is of type dictionary. Example usage:: data = {'a' : {'b': 1}} schema = ('a', dictionary) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ error_msg = 'not of type dictionary' if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, dict), error_msg) return _validator(value) return decorated try: ensure(isinstance(_object, dict), error_msg) except AssertionError: if args: msg = 'did not pass validation against callable: dictionary' raise Invalid('', msg=msg, reason=error_msg, *args) raise
python
def dictionary(_object, *args): """ Validates a given input is of type dictionary. Example usage:: data = {'a' : {'b': 1}} schema = ('a', dictionary) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ error_msg = 'not of type dictionary' if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, dict), error_msg) return _validator(value) return decorated try: ensure(isinstance(_object, dict), error_msg) except AssertionError: if args: msg = 'did not pass validation against callable: dictionary' raise Invalid('', msg=msg, reason=error_msg, *args) raise
[ "def", "dictionary", "(", "_object", ",", "*", "args", ")", ":", "error_msg", "=", "'not of type dictionary'", "if", "is_callable", "(", "_object", ")", ":", "_validator", "=", "_object", "@", "wraps", "(", "_validator", ")", "def", "decorated", "(", "value", ")", ":", "ensure", "(", "isinstance", "(", "value", ",", "dict", ")", ",", "error_msg", ")", "return", "_validator", "(", "value", ")", "return", "decorated", "try", ":", "ensure", "(", "isinstance", "(", "_object", ",", "dict", ")", ",", "error_msg", ")", "except", "AssertionError", ":", "if", "args", ":", "msg", "=", "'did not pass validation against callable: dictionary'", "raise", "Invalid", "(", "''", ",", "msg", "=", "msg", ",", "reason", "=", "error_msg", ",", "*", "args", ")", "raise" ]
Validates a given input is of type dictionary. Example usage:: data = {'a' : {'b': 1}} schema = ('a', dictionary) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function.
[ "Validates", "a", "given", "input", "is", "of", "type", "dictionary", "." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L66-L98
alfredodeza/notario
notario/validators/types.py
array
def array(_object): """ Validates a given input is of type list. Example usage:: data = {'a' : [1,2]} schema = ('a', array) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, list), "not of type array") return _validator(value) return decorated ensure(isinstance(_object, list), "not of type array")
python
def array(_object): """ Validates a given input is of type list. Example usage:: data = {'a' : [1,2]} schema = ('a', array) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, list), "not of type array") return _validator(value) return decorated ensure(isinstance(_object, list), "not of type array")
[ "def", "array", "(", "_object", ")", ":", "if", "is_callable", "(", "_object", ")", ":", "_validator", "=", "_object", "@", "wraps", "(", "_validator", ")", "def", "decorated", "(", "value", ")", ":", "ensure", "(", "isinstance", "(", "value", ",", "list", ")", ",", "\"not of type array\"", ")", "return", "_validator", "(", "value", ")", "return", "decorated", "ensure", "(", "isinstance", "(", "_object", ",", "list", ")", ",", "\"not of type array\"", ")" ]
Validates a given input is of type list. Example usage:: data = {'a' : [1,2]} schema = ('a', array) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function.
[ "Validates", "a", "given", "input", "is", "of", "type", "list", "." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L101-L126
alfredodeza/notario
notario/validators/types.py
integer
def integer(_object): """ Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, int), "not of type int") return _validator(value) return decorated ensure(isinstance(_object, int), "not of type int")
python
def integer(_object): """ Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function. """ if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, int), "not of type int") return _validator(value) return decorated ensure(isinstance(_object, int), "not of type int")
[ "def", "integer", "(", "_object", ")", ":", "if", "is_callable", "(", "_object", ")", ":", "_validator", "=", "_object", "@", "wraps", "(", "_validator", ")", "def", "decorated", "(", "value", ")", ":", "ensure", "(", "isinstance", "(", "value", ",", "int", ")", ",", "\"not of type int\"", ")", "return", "_validator", "(", "value", ")", "return", "decorated", "ensure", "(", "isinstance", "(", "_object", ",", "int", ")", ",", "\"not of type int\"", ")" ]
Validates a given input is of type int.. Example usage:: data = {'a' : 21} schema = ('a', integer) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating behavior will be triggered, otherwise it will act as a normal function.
[ "Validates", "a", "given", "input", "is", "of", "type", "int", ".." ]
train
https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L129-L153
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.constant
def constant(cls, value: Value, dtype: tf.DType = tf.float32) -> 'TensorFluent': '''Returns a constant `value` TensorFluent with given `dtype`. Args: value: The constant value. dtype: The output's data type. Returns: A constant TensorFluent. ''' t = tf.constant(value, dtype=dtype) scope = [] # type: List batch = False return TensorFluent(t, scope, batch=batch)
python
def constant(cls, value: Value, dtype: tf.DType = tf.float32) -> 'TensorFluent': '''Returns a constant `value` TensorFluent with given `dtype`. Args: value: The constant value. dtype: The output's data type. Returns: A constant TensorFluent. ''' t = tf.constant(value, dtype=dtype) scope = [] # type: List batch = False return TensorFluent(t, scope, batch=batch)
[ "def", "constant", "(", "cls", ",", "value", ":", "Value", ",", "dtype", ":", "tf", ".", "DType", "=", "tf", ".", "float32", ")", "->", "'TensorFluent'", ":", "t", "=", "tf", ".", "constant", "(", "value", ",", "dtype", "=", "dtype", ")", "scope", "=", "[", "]", "# type: List", "batch", "=", "False", "return", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")" ]
Returns a constant `value` TensorFluent with given `dtype`. Args: value: The constant value. dtype: The output's data type. Returns: A constant TensorFluent.
[ "Returns", "a", "constant", "value", "TensorFluent", "with", "given", "dtype", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L67-L82
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.Bernoulli
def Bernoulli(cls, mean: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Bernoulli sampling op with given mean parameter. Args: mean: The mean parameter of the Bernoulli distribution. batch_size: The size of the batch (optional). Returns: The Bernoulli distribution and a TensorFluent sample drawn from the distribution. ''' probs = mean.tensor dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool) batch = mean.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
python
def Bernoulli(cls, mean: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Bernoulli sampling op with given mean parameter. Args: mean: The mean parameter of the Bernoulli distribution. batch_size: The size of the batch (optional). Returns: The Bernoulli distribution and a TensorFluent sample drawn from the distribution. ''' probs = mean.tensor dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool) batch = mean.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
[ "def", "Bernoulli", "(", "cls", ",", "mean", ":", "'TensorFluent'", ",", "batch_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "Distribution", ",", "'TensorFluent'", "]", ":", "probs", "=", "mean", ".", "tensor", "dist", "=", "tf", ".", "distributions", ".", "Bernoulli", "(", "probs", "=", "probs", ",", "dtype", "=", "tf", ".", "bool", ")", "batch", "=", "mean", ".", "batch", "if", "not", "batch", "and", "batch_size", "is", "not", "None", ":", "t", "=", "dist", ".", "sample", "(", "batch_size", ")", "batch", "=", "True", "else", ":", "t", "=", "dist", ".", "sample", "(", ")", "scope", "=", "mean", ".", "scope", ".", "as_list", "(", ")", "return", "(", "dist", ",", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")", ")" ]
Returns a TensorFluent for the Bernoulli sampling op with given mean parameter. Args: mean: The mean parameter of the Bernoulli distribution. batch_size: The size of the batch (optional). Returns: The Bernoulli distribution and a TensorFluent sample drawn from the distribution.
[ "Returns", "a", "TensorFluent", "for", "the", "Bernoulli", "sampling", "op", "with", "given", "mean", "parameter", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L85-L106
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.Uniform
def Uniform(cls, low: 'TensorFluent', high: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Uniform sampling op with given low and high parameters. Args: low: The low parameter of the Uniform distribution. high: The high parameter of the Uniform distribution. batch_size: The size of the batch (optional). Returns: The Uniform distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if low.scope != high.scope: raise ValueError('Uniform distribution: parameters must have same scope!') dist = tf.distributions.Uniform(low.tensor, high.tensor) batch = low.batch or high.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = low.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
python
def Uniform(cls, low: 'TensorFluent', high: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Uniform sampling op with given low and high parameters. Args: low: The low parameter of the Uniform distribution. high: The high parameter of the Uniform distribution. batch_size: The size of the batch (optional). Returns: The Uniform distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if low.scope != high.scope: raise ValueError('Uniform distribution: parameters must have same scope!') dist = tf.distributions.Uniform(low.tensor, high.tensor) batch = low.batch or high.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = low.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
[ "def", "Uniform", "(", "cls", ",", "low", ":", "'TensorFluent'", ",", "high", ":", "'TensorFluent'", ",", "batch_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "Distribution", ",", "'TensorFluent'", "]", ":", "if", "low", ".", "scope", "!=", "high", ".", "scope", ":", "raise", "ValueError", "(", "'Uniform distribution: parameters must have same scope!'", ")", "dist", "=", "tf", ".", "distributions", ".", "Uniform", "(", "low", ".", "tensor", ",", "high", ".", "tensor", ")", "batch", "=", "low", ".", "batch", "or", "high", ".", "batch", "if", "not", "batch", "and", "batch_size", "is", "not", "None", ":", "t", "=", "dist", ".", "sample", "(", "batch_size", ")", "batch", "=", "True", "else", ":", "t", "=", "dist", ".", "sample", "(", ")", "scope", "=", "low", ".", "scope", ".", "as_list", "(", ")", "return", "(", "dist", ",", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")", ")" ]
Returns a TensorFluent for the Uniform sampling op with given low and high parameters. Args: low: The low parameter of the Uniform distribution. high: The high parameter of the Uniform distribution. batch_size: The size of the batch (optional). Returns: The Uniform distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope.
[ "Returns", "a", "TensorFluent", "for", "the", "Uniform", "sampling", "op", "with", "given", "low", "and", "high", "parameters", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L109-L135
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.Normal
def Normal(cls, mean: 'TensorFluent', variance: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Normal sampling op with given mean and variance. Args: mean: The mean parameter of the Normal distribution. variance: The variance parameter of the Normal distribution. batch_size: The size of the batch (optional). Returns: The Normal distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if mean.scope != variance.scope: raise ValueError('Normal distribution: parameters must have same scope!') loc = mean.tensor scale = tf.sqrt(variance.tensor) dist = tf.distributions.Normal(loc, scale) batch = mean.batch or variance.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
python
def Normal(cls, mean: 'TensorFluent', variance: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Normal sampling op with given mean and variance. Args: mean: The mean parameter of the Normal distribution. variance: The variance parameter of the Normal distribution. batch_size: The size of the batch (optional). Returns: The Normal distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if mean.scope != variance.scope: raise ValueError('Normal distribution: parameters must have same scope!') loc = mean.tensor scale = tf.sqrt(variance.tensor) dist = tf.distributions.Normal(loc, scale) batch = mean.batch or variance.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
[ "def", "Normal", "(", "cls", ",", "mean", ":", "'TensorFluent'", ",", "variance", ":", "'TensorFluent'", ",", "batch_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "Distribution", ",", "'TensorFluent'", "]", ":", "if", "mean", ".", "scope", "!=", "variance", ".", "scope", ":", "raise", "ValueError", "(", "'Normal distribution: parameters must have same scope!'", ")", "loc", "=", "mean", ".", "tensor", "scale", "=", "tf", ".", "sqrt", "(", "variance", ".", "tensor", ")", "dist", "=", "tf", ".", "distributions", ".", "Normal", "(", "loc", ",", "scale", ")", "batch", "=", "mean", ".", "batch", "or", "variance", ".", "batch", "if", "not", "batch", "and", "batch_size", "is", "not", "None", ":", "t", "=", "dist", ".", "sample", "(", "batch_size", ")", "batch", "=", "True", "else", ":", "t", "=", "dist", ".", "sample", "(", ")", "scope", "=", "mean", ".", "scope", ".", "as_list", "(", ")", "return", "(", "dist", ",", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")", ")" ]
Returns a TensorFluent for the Normal sampling op with given mean and variance. Args: mean: The mean parameter of the Normal distribution. variance: The variance parameter of the Normal distribution. batch_size: The size of the batch (optional). Returns: The Normal distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope.
[ "Returns", "a", "TensorFluent", "for", "the", "Normal", "sampling", "op", "with", "given", "mean", "and", "variance", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L138-L166
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.Gamma
def Gamma(cls, shape: 'TensorFluent', scale: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Gamma sampling op with given shape and scale parameters. Args: shape: The shape parameter of the Gamma distribution. scale: The scale parameter of the Gamma distribution. batch_size: The size of the batch (optional). Returns: The Gamma distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if shape.scope != scale.scope: raise ValueError('Gamma distribution: parameters must have same scope!') concentration = shape.tensor rate = 1 / scale.tensor dist = tf.distributions.Gamma(concentration, rate) batch = shape.batch or scale.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = shape.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
python
def Gamma(cls, shape: 'TensorFluent', scale: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Gamma sampling op with given shape and scale parameters. Args: shape: The shape parameter of the Gamma distribution. scale: The scale parameter of the Gamma distribution. batch_size: The size of the batch (optional). Returns: The Gamma distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope. ''' if shape.scope != scale.scope: raise ValueError('Gamma distribution: parameters must have same scope!') concentration = shape.tensor rate = 1 / scale.tensor dist = tf.distributions.Gamma(concentration, rate) batch = shape.batch or scale.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = shape.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
[ "def", "Gamma", "(", "cls", ",", "shape", ":", "'TensorFluent'", ",", "scale", ":", "'TensorFluent'", ",", "batch_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "Distribution", ",", "'TensorFluent'", "]", ":", "if", "shape", ".", "scope", "!=", "scale", ".", "scope", ":", "raise", "ValueError", "(", "'Gamma distribution: parameters must have same scope!'", ")", "concentration", "=", "shape", ".", "tensor", "rate", "=", "1", "/", "scale", ".", "tensor", "dist", "=", "tf", ".", "distributions", ".", "Gamma", "(", "concentration", ",", "rate", ")", "batch", "=", "shape", ".", "batch", "or", "scale", ".", "batch", "if", "not", "batch", "and", "batch_size", "is", "not", "None", ":", "t", "=", "dist", ".", "sample", "(", "batch_size", ")", "batch", "=", "True", "else", ":", "t", "=", "dist", ".", "sample", "(", ")", "scope", "=", "shape", ".", "scope", ".", "as_list", "(", ")", "return", "(", "dist", ",", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")", ")" ]
Returns a TensorFluent for the Gamma sampling op with given shape and scale parameters. Args: shape: The shape parameter of the Gamma distribution. scale: The scale parameter of the Gamma distribution. batch_size: The size of the batch (optional). Returns: The Gamma distribution and a TensorFluent sample drawn from the distribution. Raises: ValueError: If parameters do not have the same scope.
[ "Returns", "a", "TensorFluent", "for", "the", "Gamma", "sampling", "op", "with", "given", "shape", "and", "scale", "parameters", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L200-L229
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.Exponential
def Exponential(cls, mean: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Exponential sampling op with given mean parameter. Args: mean: The mean parameter of the Exponential distribution. batch_size: The size of the batch (optional). Returns: The Exponential distribution and a TensorFluent sample drawn from the distribution. ''' rate = 1 / mean.tensor dist = tf.distributions.Exponential(rate) batch = mean.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
python
def Exponential(cls, mean: 'TensorFluent', batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']: '''Returns a TensorFluent for the Exponential sampling op with given mean parameter. Args: mean: The mean parameter of the Exponential distribution. batch_size: The size of the batch (optional). Returns: The Exponential distribution and a TensorFluent sample drawn from the distribution. ''' rate = 1 / mean.tensor dist = tf.distributions.Exponential(rate) batch = mean.batch if not batch and batch_size is not None: t = dist.sample(batch_size) batch = True else: t = dist.sample() scope = mean.scope.as_list() return (dist, TensorFluent(t, scope, batch=batch))
[ "def", "Exponential", "(", "cls", ",", "mean", ":", "'TensorFluent'", ",", "batch_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "Distribution", ",", "'TensorFluent'", "]", ":", "rate", "=", "1", "/", "mean", ".", "tensor", "dist", "=", "tf", ".", "distributions", ".", "Exponential", "(", "rate", ")", "batch", "=", "mean", ".", "batch", "if", "not", "batch", "and", "batch_size", "is", "not", "None", ":", "t", "=", "dist", ".", "sample", "(", "batch_size", ")", "batch", "=", "True", "else", ":", "t", "=", "dist", ".", "sample", "(", ")", "scope", "=", "mean", ".", "scope", ".", "as_list", "(", ")", "return", "(", "dist", ",", "TensorFluent", "(", "t", ",", "scope", ",", "batch", "=", "batch", ")", ")" ]
Returns a TensorFluent for the Exponential sampling op with given mean parameter. Args: mean: The mean parameter of the Exponential distribution. batch_size: The size of the batch (optional). Returns: The Exponential distribution and a TensorFluent sample drawn from the distribution.
[ "Returns", "a", "TensorFluent", "for", "the", "Exponential", "sampling", "op", "with", "given", "mean", "parameter", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L232-L253
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.stop_gradient
def stop_gradient(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch return TensorFluent(tf.stop_gradient(x.tensor), scope, batch)
python
def stop_gradient(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch return TensorFluent(tf.stop_gradient(x.tensor), scope, batch)
[ "def", "stop_gradient", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "scope", "=", "x", ".", "scope", ".", "as_list", "(", ")", "batch", "=", "x", ".", "batch", "return", "TensorFluent", "(", "tf", ".", "stop_gradient", "(", "x", ".", "tensor", ")", ",", "scope", ",", "batch", ")" ]
Returns a copy of the input fluent with stop_gradient at tensor level. Args: x: The input fluent. Returns: A TensorFluent that stops backpropagation of gradient computations.
[ "Returns", "a", "copy", "of", "the", "input", "fluent", "with", "stop_gradient", "at", "tensor", "level", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L256-L267
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.stop_batch_gradient
def stop_batch_gradient(cls, x: 'TensorFluent', stop_batch: tf.Tensor) -> 'TensorFluent': '''Returns a copy of the inputs fluent with stop_gradient applied at batch level. Args: x: The input fluent. stop_batch: A boolean tf.Tensor with shape=(batch_size, ...) Returns: A TensorFluent that conditionally stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch tensor = tf.where(stop_batch, tf.stop_gradient(x.tensor), x.tensor) return TensorFluent(tensor, scope, batch)
python
def stop_batch_gradient(cls, x: 'TensorFluent', stop_batch: tf.Tensor) -> 'TensorFluent': '''Returns a copy of the inputs fluent with stop_gradient applied at batch level. Args: x: The input fluent. stop_batch: A boolean tf.Tensor with shape=(batch_size, ...) Returns: A TensorFluent that conditionally stops backpropagation of gradient computations. ''' scope = x.scope.as_list() batch = x.batch tensor = tf.where(stop_batch, tf.stop_gradient(x.tensor), x.tensor) return TensorFluent(tensor, scope, batch)
[ "def", "stop_batch_gradient", "(", "cls", ",", "x", ":", "'TensorFluent'", ",", "stop_batch", ":", "tf", ".", "Tensor", ")", "->", "'TensorFluent'", ":", "scope", "=", "x", ".", "scope", ".", "as_list", "(", ")", "batch", "=", "x", ".", "batch", "tensor", "=", "tf", ".", "where", "(", "stop_batch", ",", "tf", ".", "stop_gradient", "(", "x", ".", "tensor", ")", ",", "x", ".", "tensor", ")", "return", "TensorFluent", "(", "tensor", ",", "scope", ",", "batch", ")" ]
Returns a copy of the inputs fluent with stop_gradient applied at batch level. Args: x: The input fluent. stop_batch: A boolean tf.Tensor with shape=(batch_size, ...) Returns: A TensorFluent that conditionally stops backpropagation of gradient computations.
[ "Returns", "a", "copy", "of", "the", "inputs", "fluent", "with", "stop_gradient", "applied", "at", "batch", "level", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L270-L283
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.abs
def abs(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the abs function. Args: x: The input fluent. Returns: A TensorFluent wrapping the abs function. ''' return cls._unary_op(x, tf.abs, tf.float32)
python
def abs(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the abs function. Args: x: The input fluent. Returns: A TensorFluent wrapping the abs function. ''' return cls._unary_op(x, tf.abs, tf.float32)
[ "def", "abs", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "abs", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the abs function. Args: x: The input fluent. Returns: A TensorFluent wrapping the abs function.
[ "Returns", "a", "TensorFluent", "for", "the", "abs", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L286-L295
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.exp
def exp(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function. ''' return cls._unary_op(x, tf.exp, tf.float32)
python
def exp(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function. ''' return cls._unary_op(x, tf.exp, tf.float32)
[ "def", "exp", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "exp", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function.
[ "Returns", "a", "TensorFluent", "for", "the", "exp", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L298-L307
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.log
def log(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the log function. Args: x: The input fluent. Returns: A TensorFluent wrapping the log function. ''' return cls._unary_op(x, tf.log, tf.float32)
python
def log(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the log function. Args: x: The input fluent. Returns: A TensorFluent wrapping the log function. ''' return cls._unary_op(x, tf.log, tf.float32)
[ "def", "log", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "log", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the log function. Args: x: The input fluent. Returns: A TensorFluent wrapping the log function.
[ "Returns", "a", "TensorFluent", "for", "the", "log", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L310-L319
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.sqrt
def sqrt(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the sqrt function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sqrt function. ''' return cls._unary_op(x, tf.sqrt, tf.float32)
python
def sqrt(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the sqrt function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sqrt function. ''' return cls._unary_op(x, tf.sqrt, tf.float32)
[ "def", "sqrt", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "sqrt", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the sqrt function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sqrt function.
[ "Returns", "a", "TensorFluent", "for", "the", "sqrt", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L322-L331
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.cos
def cos(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the cos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the cos function. ''' return cls._unary_op(x, tf.cos, tf.float32)
python
def cos(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the cos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the cos function. ''' return cls._unary_op(x, tf.cos, tf.float32)
[ "def", "cos", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "cos", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the cos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the cos function.
[ "Returns", "a", "TensorFluent", "for", "the", "cos", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L334-L343
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.sin
def sin(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the sin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sin function. ''' return cls._unary_op(x, tf.sin, tf.float32)
python
def sin(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the sin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sin function. ''' return cls._unary_op(x, tf.sin, tf.float32)
[ "def", "sin", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "sin", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the sin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the sin function.
[ "Returns", "a", "TensorFluent", "for", "the", "sin", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L346-L355
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.tan
def tan(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the tan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the tan function. ''' return cls._unary_op(x, tf.tan, tf.float32)
python
def tan(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the tan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the tan function. ''' return cls._unary_op(x, tf.tan, tf.float32)
[ "def", "tan", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "tan", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the tan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the tan function.
[ "Returns", "a", "TensorFluent", "for", "the", "tan", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L358-L367
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.acos
def acos(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arccos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arccos function. ''' return cls._unary_op(x, tf.acos, tf.float32)
python
def acos(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arccos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arccos function. ''' return cls._unary_op(x, tf.acos, tf.float32)
[ "def", "acos", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "acos", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the arccos function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arccos function.
[ "Returns", "a", "TensorFluent", "for", "the", "arccos", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L370-L379
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.asin
def asin(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arcsin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arcsin function. ''' return cls._unary_op(x, tf.asin, tf.float32)
python
def asin(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arcsin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arcsin function. ''' return cls._unary_op(x, tf.asin, tf.float32)
[ "def", "asin", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "asin", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the arcsin function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arcsin function.
[ "Returns", "a", "TensorFluent", "for", "the", "arcsin", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L382-L391
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.atan
def atan(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arctan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arctan function. ''' return cls._unary_op(x, tf.atan2, tf.float32)
python
def atan(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the arctan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arctan function. ''' return cls._unary_op(x, tf.atan2, tf.float32)
[ "def", "atan", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "atan2", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the arctan function. Args: x: The input fluent. Returns: A TensorFluent wrapping the arctan function.
[ "Returns", "a", "TensorFluent", "for", "the", "arctan", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L394-L403
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.round
def round(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the round function. Args: x: The input fluent. Returns: A TensorFluent wrapping the round function. ''' return cls._unary_op(x, tf.round, tf.float32)
python
def round(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the round function. Args: x: The input fluent. Returns: A TensorFluent wrapping the round function. ''' return cls._unary_op(x, tf.round, tf.float32)
[ "def", "round", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "round", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the round function. Args: x: The input fluent. Returns: A TensorFluent wrapping the round function.
[ "Returns", "a", "TensorFluent", "for", "the", "round", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L406-L415
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.ceil
def ceil(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the ceil function. Args: x: The input fluent. Returns: A TensorFluent wrapping the ceil function. ''' return cls._unary_op(x, tf.ceil, tf.float32)
python
def ceil(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the ceil function. Args: x: The input fluent. Returns: A TensorFluent wrapping the ceil function. ''' return cls._unary_op(x, tf.ceil, tf.float32)
[ "def", "ceil", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "ceil", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the ceil function. Args: x: The input fluent. Returns: A TensorFluent wrapping the ceil function.
[ "Returns", "a", "TensorFluent", "for", "the", "ceil", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L418-L427
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.floor
def floor(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the floor function. Args: x: The input fluent. Returns: A TensorFluent wrapping the floor function. ''' return cls._unary_op(x, tf.floor, tf.float32)
python
def floor(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the floor function. Args: x: The input fluent. Returns: A TensorFluent wrapping the floor function. ''' return cls._unary_op(x, tf.floor, tf.float32)
[ "def", "floor", "(", "cls", ",", "x", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_unary_op", "(", "x", ",", "tf", ".", "floor", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the floor function. Args: x: The input fluent. Returns: A TensorFluent wrapping the floor function.
[ "Returns", "a", "TensorFluent", "for", "the", "floor", "function", "." ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L430-L439
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.pow
def pow(cls, x: 'TensorFluent', y: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the pow function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the pow function. ''' return cls._binary_op(x, y, tf.pow, tf.float32)
python
def pow(cls, x: 'TensorFluent', y: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the pow function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the pow function. ''' return cls._binary_op(x, y, tf.pow, tf.float32)
[ "def", "pow", "(", "cls", ",", "x", ":", "'TensorFluent'", ",", "y", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_binary_op", "(", "x", ",", "y", ",", "tf", ".", "pow", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the pow function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the pow function.
[ "Returns", "a", "TensorFluent", "for", "the", "pow", "function", ".", "TensorFluent" ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L442-L452
thiagopbueno/rddl2tf
rddl2tf/fluent.py
TensorFluent.max
def max(cls, x: 'TensorFluent', y: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the maximum function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the maximum function. ''' return cls._binary_op(x, y, tf.maximum, tf.float32)
python
def max(cls, x: 'TensorFluent', y: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the maximum function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the maximum function. ''' return cls._binary_op(x, y, tf.maximum, tf.float32)
[ "def", "max", "(", "cls", ",", "x", ":", "'TensorFluent'", ",", "y", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "return", "cls", ".", "_binary_op", "(", "x", ",", "y", ",", "tf", ".", "maximum", ",", "tf", ".", "float32", ")" ]
Returns a TensorFluent for the maximum function.TensorFluent Args: x: The first operand. y: The second operand. Returns: A TensorFluent wrapping the maximum function.
[ "Returns", "a", "TensorFluent", "for", "the", "maximum", "function", ".", "TensorFluent" ]
train
https://github.com/thiagopbueno/rddl2tf/blob/f7c03d3a74d2663807c1e23e04eeed2e85166b71/rddl2tf/fluent.py#L455-L465