Search is not available for this dataset
text
stringlengths 75
104k
|
---|
def create_package(self, path=None, name=None, mode=None):
"""
Create a tar file package of all the repository files and directories.
Only files and directories that are stored in the repository info
are stored in the package tar file.
**N.B. On some systems packaging requires root permissions.**
:Parameters:
#. path (None, string): The real absolute path where to create the package.
If None, it will be created in the same directory as the repository
If '.' or an empty string is passed, the current working directory will be used.
#. name (None, string): The name to give to the package file
If None, the package directory name will be used with the appropriate extension added.
#. mode (None, string): The writing mode of the tarfile.
If None, automatically the best compression mode will be chose.
Available modes are ('w', 'w:', 'w:gz', 'w:bz2')
"""
# check mode
assert mode in (None, 'w', 'w:', 'w:gz', 'w:bz2'), 'unkown archive mode %s'%str(mode)
if mode is None:
mode = 'w:bz2'
mode = 'w:'
# get root
if path is None:
root = os.path.split(self.__path)[0]
elif path.strip() in ('','.'):
root = os.getcwd()
else:
root = os.path.realpath( os.path.expanduser(path) )
assert os.path.isdir(root), 'absolute path %s is not a valid directory'%path
# get name
if name is None:
ext = mode.split(":")
if len(ext) == 2:
if len(ext[1]):
ext = "."+ext[1]
else:
ext = '.tar'
else:
ext = '.tar'
name = os.path.split(self.__path)[1]+ext
# save repository
self.save()
# create tar file
tarfilePath = os.path.join(root, name)
try:
tarHandler = tarfile.TarFile.open(tarfilePath, mode=mode)
except Exception as e:
raise Exception("Unable to create package (%s)"%e)
# walk directory and create empty directories
for directory in sorted(list(self.walk_directories_relative_path())):
t = tarfile.TarInfo( directory )
t.type = tarfile.DIRTYPE
tarHandler.addfile(t)
# walk files and add to tar
for file in self.walk_files_relative_path():
tarHandler.add(os.path.join(self.__path,file), arcname=file)
# save repository .pyrepinfo
tarHandler.add(os.path.join(self.__path,".pyrepinfo"), arcname=".pyrepinfo")
# close tar file
tarHandler.close() |
def is_repository(self, path):
"""
Check if there is a Repository in path.
:Parameters:
#. path (string): The real path of the directory where to check if there is a repository.
:Returns:
#. result (boolean): Whether its a repository or not.
"""
realPath = os.path.realpath( os.path.expanduser(path) )
if not os.path.isdir(realPath):
return False
if ".pyrepinfo" not in os.listdir(realPath):
return False
return True |
def get_directory_info(self, relativePath):
"""
get directory info from the Repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory.
:Returns:
#. info (None, dictionary): The directory information dictionary.
If None, it means an error has occurred.
#. error (string): The error message if any error occurred.
"""
relativePath = os.path.normpath(relativePath)
# if root directory
if relativePath in ('','.'):
return self, ""
currentDir = self.__path
dirInfoDict = self
for dir in relativePath.split(os.sep):
dirInfoDict = dict.__getitem__(dirInfoDict, "directories")
currentDir = os.path.join(currentDir, dir)
# check if path exists
if not os.path.exists(currentDir):
return None, "directory '%s' is not found"%currentDir
val = dirInfoDict.get(dir, None)
# check if directory is registered in repository
if val is None:
return None, "directory '%s' is not registered in PyrepInfo"%currentDir
dirInfoDict = val
return dirInfoDict, "" |
def get_parent_directory_info(self, relativePath):
"""
get parent directory info of a file or directory from the Repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the file or directory of which the parent directory info is requested.
:Returns:
#. info (None, dictionary): The directory information dictionary.
If None, it means an error has occurred.
#. error (string): The error message if any error occurred.
"""
relativePath = os.path.normpath(relativePath)
# if root directory
if relativePath in ('','.'):
return self, "relativePath is empty pointing to the repostitory itself."
# split path
parentDirPath, _ = os.path.split(relativePath)
# get parent directory info
return self.get_directory_info(parentDirPath) |
def get_file_info(self, relativePath, name=None):
"""
get file information dict from the repository given its relative path and name.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file is.
#. name (string): The file name.
If None is given, name will be split from relativePath.
:Returns:
#. info (None, dictionary): The file information dictionary.
If None, it means an error has occurred.
#. errorMessage (string): The error message if any error occurred.
"""
# normalize relative path and name
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' can't be a file name."
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath,name = os.path.split(relativePath)
# initialize message
errorMessage = ""
# get directory info
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
if dirInfoDict is None:
return None, errorMessage
# get file info
fileInfo = dict.__getitem__(dirInfoDict, "files").get(name, None)
if fileInfo is None:
errorMessage = "file %s does not exist in relative path '%s'"%(name, relativePath)
return fileInfo, errorMessage |
def get_file_info_by_id(self, id):
"""
Given an id, get the corresponding file info as the following:\n
(relative path joined with file name, file info dict)
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found.
#. info (None, dictionary): The file information dictionary.
If None, it means file was not found.
"""
for path, info in self.walk_files_info():
if info['id']==id:
return path, info
# none was found
return None, None |
def get_file_relative_path_by_id(self, id):
"""
Given an id, get the corresponding file info relative path joined with file name.
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found.
"""
for path, info in self.walk_files_info():
if info['id']==id:
return path
# none was found
return None |
def get_file_relative_path_by_name(self, name, skip=0):
"""
Get file relative path given the file name. If file name is redundant in different
directories in the repository, this method ensures to return all or some of the
files according to skip value.
Parameters:
#. name (string): The file name.
#. skip (None, integer): As file names can be identical, skip determines
the number of satisfying files name to skip before returning.\n
If None is given, a list of all files relative path will be returned.
:Returns:
#. relativePath (string, list): The file relative path.
If None, it means file was not found.\n
If skip is None a list of all found files relative paths will be returned.
"""
if skip is None:
paths = []
else:
paths = None
for path, info in self.walk_files_info():
_, n = os.path.split(path)
if n==name:
if skip is None:
paths.append(path)
elif skip>0:
skip -= 1
else:
paths = path
break
return paths |
def add_directory(self, relativePath, info=None):
"""
Adds a directory in the repository and creates its
attribute in the Repository with utc timestamp.
It insures adding all the missing directories in the path.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory to add in the repository.
#. info (None, string, pickable object): Any random info about the folder.
:Returns:
#. info (dict): The directory info dict.
"""
path = os.path.normpath(relativePath)
# create directories
currentDir = self.path
currentDict = self
if path in ("","."):
return currentDict
save = False
for dir in path.split(os.sep):
dirPath = os.path.join(currentDir, dir)
# create directory
if not os.path.exists(dirPath):
os.mkdir(dirPath)
# create dictionary key
currentDict = dict.__getitem__(currentDict, "directories")
if currentDict.get(dir, None) is None:
save = True
currentDict[dir] = {"directories":{}, "files":{},
"timestamp":datetime.utcnow(),
"id":str(uuid.uuid1()),
"info": info} # INFO MUST BE SET ONLY FOR THE LAST DIRECTORY
currentDict = currentDict[dir]
currentDir = dirPath
# save repository
if save:
self.save()
# return currentDict
return currentDict |
def remove_directory(self, relativePath, removeFromSystem=False):
"""
Remove directory from repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory to remove from the repository.
#. removeFromSystem (boolean): Whether to also remove directory and all files from the system.\n
Only files saved in the repository will be removed and empty left directories.
"""
# get parent directory info
relativePath = os.path.normpath(relativePath)
parentDirInfoDict, errorMessage = self.get_parent_directory_info(relativePath)
assert parentDirInfoDict is not None, errorMessage
# split path
path, name = os.path.split(relativePath)
if dict.__getitem__(parentDirInfoDict, 'directories').get(name, None) is None:
raise Exception("'%s' is not a registered directory in repository relative path '%s'"%(name, path))
# remove from system
if removeFromSystem:
# remove files
for rp in self.walk_files_relative_path(relativePath=relativePath):
ap = os.path.join(self.__path, relativePath, rp)
if not os.path.isfile(ap):
continue
if not os.path.exists(ap):
continue
if os.path.isfile(ap):
os.remove( ap )
# remove directories
for rp in self.walk_directories_relative_path(relativePath=relativePath):
ap = os.path.join(self.__path, relativePath, rp)
if not os.path.isdir(ap):
continue
if not os.path.exists(ap):
continue
if not len(os.listdir(ap)):
os.rmdir(ap)
# pop directory from repo
dict.__getitem__(parentDirInfoDict, 'directories').pop(name, None)
ap = os.path.join(self.__path, relativePath)
if not os.path.isdir(ap):
if not len(os.listdir(ap)):
os.rmdir(ap)
# save repository
self.save() |
def move_directory(self, relativePath, relativeDestination, replace=False, verbose=True):
"""
Move a directory in the repository from one place to another. It insures moving all the
files and subdirectories in the system.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory to be moved.
#. relativeDestination (string): The new relative to the repository path of the directory.
#. replace (boolean): Whether to replace existing files with the same name in the new created directory.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
# normalize path
relativePath = os.path.normpath(relativePath)
relativeDestination = os.path.normpath(relativeDestination)
# get files and directories
filesInfo = list( self.walk_files_info(relativePath=relativePath) )
dirsPath = list( self.walk_directories_relative_path(relativePath=relativePath) )
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
assert dirInfoDict is not None, errorMessage
# remove directory info only
self.remove_directory(relativePath=relativePath, removeFromSystem=False)
# create new relative path
self.add_directory(relativeDestination)
# move files
for RP, info in filesInfo:
source = os.path.join(self.__path, relativePath, RP)
destination = os.path.join(self.__path, relativeDestination, RP)
# add directory
newDirRP, fileName = os.path.split(os.path.join(relativeDestination, RP))
dirInfoDict = self.add_directory( newDirRP )
# move file
if os.path.isfile(destination):
if replace:
os.remove(destination)
if verbose:
warnings.warn("file '%s' is copied replacing existing one in destination '%s'."%(fileName, newDirRP))
else:
if verbose:
warnings.warn("file '%s' is not copied because the same file exists in destination '%s'."%(fileName,destination))
continue
os.rename(source, destination)
# set file information
dict.__getitem__(dirInfoDict, "files")[fileName] = info
# save repository
self.save() |
def rename_directory(self, relativePath, newName, replace=False, verbose=True):
"""
Rename a directory in the repository. It insures renaming the directory in the system.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory to be renamed.
#. newName (string): The new directory name.
#. replace (boolean): Whether to force renaming when new name exists in the system.
It fails when new folder name is registered in repository.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
# normalize path
relativePath = os.path.normpath(relativePath)
parentDirInfoDict, errorMessage = self.get_parent_directory_info(relativePath)
assert parentDirInfoDict is not None, errorMessage
# split path
parentDirPath, dirName = os.path.split(relativePath)
# get real path
realPath = os.path.join(self.__path, relativePath)
assert os.path.isdir( realPath ), "directory '%s' is not found in system"%realPath
# check directory in repository
assert dirName in dict.__getitem__(parentDirInfoDict, "directories"), "directory '%s' is not found in repository relative path '%s'"%(dirName, parentDirPath)
# assert directory new name doesn't exist in repository
assert newName not in dict.__getitem__(parentDirInfoDict, "directories"), "directory '%s' already exists in repository, relative path '%s'"%(newName, parentDirPath)
# check new directory in system
newRealPath = os.path.join(self.__path, parentDirPath, newName)
if os.path.isdir( newRealPath ):
if replace:
shutil.rmtree(newRealPath)
if verbose:
warnings.warn( "directory '%s' already exists found in system, it is therefore deleted."%newRealPath )
else:
raise Exception( "directory '%s' already exists in system"%newRealPath )
# rename directory
os.rename(realPath, newRealPath)
dict.__setitem__( dict.__getitem__(parentDirInfoDict, "directories"),
newName,
dict.__getitem__(parentDirInfoDict, "directories").pop(dirName) )
# save repository
self.save() |
def rename_file(self, relativePath, name, newName, replace=False, verbose=True):
"""
Rename a directory in the repository. It insures renaming the file in the system.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file is located.
#. name (string): The file name.
#. newName (string): The file new name.
#. replace (boolean): Whether to force renaming when new folder name exists in the system.
It fails when new folder name is registered in repository.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
# normalize path
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
assert dirInfoDict is not None, errorMessage
# check directory in repository
assert name in dict.__getitem__(dirInfoDict, "files"), "file '%s' is not found in repository relative path '%s'"%(name, relativePath)
# get real path
realPath = os.path.join(self.__path, relativePath, name)
assert os.path.isfile(realPath), "file '%s' is not found in system"%realPath
# assert directory new name doesn't exist in repository
assert newName not in dict.__getitem__(dirInfoDict, "files"), "file '%s' already exists in repository relative path '%s'"%(newName, relativePath)
# check new directory in system
newRealPath = os.path.join(self.__path, relativePath, newName)
if os.path.isfile( newRealPath ):
if replace:
os.remove(newRealPath)
if verbose:
warnings.warn( "file '%s' already exists found in system, it is now replaced by '%s' because 'replace' flag is True."%(newRealPath,realPath) )
else:
raise Exception( "file '%s' already exists in system but not registered in repository."%newRealPath )
# rename file
os.rename(realPath, newRealPath)
dict.__setitem__( dict.__getitem__(dirInfoDict, "files"),
newName,
dict.__getitem__(dirInfoDict, "files").pop(name) )
# save repository
self.save() |
def remove_file(self, relativePath, name=None, removeFromSystem=False):
"""
Remove file from repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file should be dumped.
If relativePath does not exist, it will be created automatically.
#. name (string): The file name.
If None is given, name will be split from relativePath.
#. removeFromSystem (boolean): Whether to also remove directory and all files from the system.\n
Only files saved in the repository will be removed and empty left directories.
"""
# get relative path normalized
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' is not allowed as file name in main repository directory"
assert name != '.pyrepstate', "'.pyrepstate' is not allowed as file name in main repository directory"
assert name != '.pyreplock', "'.pyreplock' is not allowed as file name in main repository directory"
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath, name = os.path.split(relativePath)
# get file info dict
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
assert dirInfoDict is not None, errorMessage
# check directory in repository
assert name in dict.__getitem__(dirInfoDict, "files"), "file '%s' is not found in repository relative path '%s'"%(name, relativePath)
# remove file from repo
dict.__getitem__(dirInfoDict, "files").pop(name)
# remove file from system
if removeFromSystem:
ap = os.path.join(self.__path, relativePath, name )
if os.path.isfile(ap):
os.remove( ap )
# save repository
self.save() |
def dump_copy(self, path, relativePath, name=None,
description=None,
replace=False, verbose=False):
"""
Copy an exisitng system file to the repository.
attribute in the Repository with utc timestamp.
:Parameters:
#. path (str): The full path of the file to copy into the repository.
#. relativePath (str): The relative to the repository path of the directory where the file should be dumped.
If relativePath does not exist, it will be created automatically.
#. name (string): The file name.
If None is given, name will be split from path.
#. description (None, string, pickable object): Any random description about the file.
#. replace (boolean): Whether to replace any existing file with the same name if existing.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
if name is None:
_,name = os.path.split(path)
# ensure directory added
self.add_directory(relativePath)
# ger real path
realPath = os.path.join(self.__path, relativePath)
# get directory info dict
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
assert dirInfoDict is not None, errorMessage
if name in dict.__getitem__(dirInfoDict, "files"):
if not replace:
if verbose:
warnings.warn("a file with the name '%s' is already defined in repository dictionary info. Set replace flag to True if you want to replace the existing file"%(name))
return
# convert dump and pull methods to strings
dump = "raise Exception(\"dump is ambiguous for copied file '$FILE_PATH' \")"
pull = "raise Exception(\"pull is ambiguous for copied file '$FILE_PATH' \")"
# dump file
try:
shutil.copyfile(path, os.path.join(realPath,name))
except Exception as e:
if verbose:
warnings.warn(e)
return
# set info
klass = None
# save the new file to the repository
dict.__getitem__(dirInfoDict, "files")[name] = {"dump":dump,
"pull":pull,
"timestamp":datetime.utcnow(),
"id":str(uuid.uuid1()),
"class": klass,
"description":description}
# save repository
self.save() |
def dump_file(self, value, relativePath, name=None,
description=None, klass=None,
dump=None, pull=None,
replace=False, ACID=None, verbose=False):
"""
Dump a file using its value to the system and creates its
attribute in the Repository with utc timestamp.
:Parameters:
#. value (object): The value of a file to dump and add to the repository. It is any python object or file.
#. relativePath (str): The relative to the repository path of the directory where the file should be dumped.
If relativePath does not exist, it will be created automatically.
#. name (string): The file name.
If None is given, name will be split from relativePath.
#. description (None, string, pickable object): Any random description about the file.
#. klass (None, class): The dumped object class. If None is given
klass will be automatically set to the following value.__class__
#. dump (None, string): The dumping method.
If None it will be set automatically to pickle and therefore the object must be pickleable.
If a string is given, the string should include all the necessary imports
and a '$FILE_PATH' that replaces the absolute file path when the dumping will be performed.\n
e.g. "import numpy as np; np.savetxt(fname='$FILE_PATH', X=value, fmt='%.6e')"
#. pull (None, string): The pulling method.
If None it will be set automatically to pickle and therefore the object must be pickleable.
If a string is given, the string should include all the necessary imports,
a '$FILE_PATH' that replaces the absolute file path when the dumping will be performed
and finally a PULLED_DATA variable.\n
e.g "import numpy as np; PULLED_DATA=np.loadtxt(fname='$FILE_PATH')"
#. replace (boolean): Whether to replace any existing file with the same name if existing.
#. ACID (None, boolean): Whether to ensure the ACID (Atomicity, Consistency, Isolation, Durability)
properties of the repository upon dumping a file. This is ensured by dumping the file in
a temporary path first and then moving it to the desired path.
If None is given, repository ACID property will be used.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
# check ACID
if ACID is None:
ACID = self.__ACID
assert isinstance(ACID, bool), "ACID must be boolean"
# check name and path
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' is not allowed as file name in main repository directory"
assert name != '.pyrepstate', "'.pyrepstate' is not allowed as file name in main repository directory"
assert name != '.pyreplock', "'.pyreplock' is not allowed as file name in main repository directory"
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath,name = os.path.split(relativePath)
# ensure directory added
self.add_directory(relativePath)
# get real path
realPath = os.path.join(self.__path, relativePath)
# get directory info dict
dirInfoDict, errorMessage = self.get_directory_info(relativePath)
assert dirInfoDict is not None, errorMessage
if name in dict.__getitem__(dirInfoDict, "files"):
if not replace:
if verbose:
warnings.warn("a file with the name '%s' is already defined in repository dictionary info. Set replace flag to True if you want to replace the existing file"%(name))
return
# convert dump and pull methods to strings
if dump is None:
dump=DEFAULT_DUMP
if pull is None:
pull=DEFAULT_PULL
# get savePath
if ACID:
savePath = os.path.join(tempfile.gettempdir(), str(uuid.uuid1()))
else:
savePath = os.path.join(realPath,name)
# dump file
try:
exec( dump.replace("$FILE_PATH", str(savePath)) )
except Exception as e:
message = "unable to dump the file (%s)"%e
if 'pickle.dump(' in dump:
message += '\nmore info: %s'%str(get_pickling_errors(value))
raise Exception( message )
# copy if ACID
if ACID:
try:
shutil.copyfile(savePath, os.path.join(realPath,name))
except Exception as e:
os.remove(savePath)
if verbose:
warnings.warn(e)
return
os.remove(savePath)
# set info
if klass is None and value is not None:
klass = value.__class__
if klass is not None:
assert inspect.isclass(klass), "klass must be a class definition"
# MUST TRY PICLKING KLASS TEMPORARILY FIRST
# save the new file to the repository
dict.__getitem__(dirInfoDict, "files")[name] = {"dump":dump,
"pull":pull,
"timestamp":datetime.utcnow(),
"id":str(uuid.uuid1()),
"class": klass,
"description":description}
# save repository
self.save() |
def update_file(self, value, relativePath, name=None,
description=False, klass=False,
dump=False, pull=False,
ACID=None, verbose=False):
"""
Update the value and the utc timestamp of a file that is already in the Repository.\n
If file is not registered in repository, and error will be thrown.\n
If file is missing in the system, it will be regenerated as dump method is called.
:Parameters:
#. value (object): The value of the file to update. It is any python object or a file.
#. relativePath (str): The relative to the repository path of the directory where the file should be dumped.
#. name (None, string): The file name.
If None is given, name will be split from relativePath.
#. description (False, string, pickable object): Any random description about the file.
If False is given, the description info won't be updated,
otherwise it will be update to what description argument value is.
#. klass (False, class): The dumped object class. If False is given,
the class info won't be updated, otherwise it will be update to what klass argument value is.
#. dump (False, string): The new dump method. If False is given, the old one will be used.
#. pull (False, string): The new pull method. If False is given, the old one will be used.
#. ACID (boolean): Whether to ensure the ACID (Atomicity, Consistency, Isolation, Durability)
properties of the repository upon dumping a file. This is ensured by dumping the file in
a temporary path first and then moving it to the desired path.
If None is given, repository ACID property will be used.
#. verbose (boolean): Whether to be warn and informed about any abnormalities.
"""
# check ACID
if ACID is None:
ACID = self.__ACID
assert isinstance(ACID, bool), "ACID must be boolean"
# get relative path normalized
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "'.pyrepinfo' is not allowed as file name in main repository directory"
assert name != '.pyrepstate', "'.pyrepstate' is not allowed as file name in main repository directory"
assert name != '.pyreplock', "'.pyreplock' is not allowed as file name in main repository directory"
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath,name = os.path.split(relativePath)
# get file info dict
fileInfoDict, errorMessage = self.get_file_info(relativePath, name)
assert fileInfoDict is not None, errorMessage
# get real path
realPath = os.path.join(self.__path, relativePath)
# check if file exists
if verbose:
if not os.path.isfile( os.path.join(realPath, name) ):
warnings.warn("file '%s' is in repository but does not exist in the system. It is therefore being recreated."%os.path.join(realPath, name))
# convert dump and pull methods to strings
if not dump:
dump = fileInfoDict["dump"]
if not pull:
pull = fileInfoDict["pull"]
# get savePath
if ACID:
savePath = os.path.join(tempfile.gettempdir(), name)
else:
savePath = os.path.join(realPath,name)
# dump file
try:
exec( dump.replace("$FILE_PATH", str(savePath)) )
except Exception as e:
message = "unable to dump the file (%s)"%e
if 'pickle.dump(' in dump:
message += '\nmore info: %s'%str(get_pickling_errors(value))
raise Exception( message )
# copy if ACID
if ACID:
try:
shutil.copyfile(savePath, os.path.join(realPath,name))
except Exception as e:
os.remove(savePath)
if verbose:
warnings.warn(e)
return
os.remove(savePath)
# update timestamp
fileInfoDict["timestamp"] = datetime.utcnow()
if description is not False:
fileInfoDict["description"] = description
if klass is not False:
assert inspect.isclass(klass), "klass must be a class definition"
fileInfoDict["class"] = klass
# save repository
self.save() |
def pull_file(self, relativePath, name=None, pull=None, update=True):
"""
Pull a file's data from the Repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the directory where the file should be pulled.
#. name (string): The file name.
If None is given, name will be split from relativePath.
#. pull (None, string): The pulling method.
If None, the pull method saved in the file info will be used.
If a string is given, the string should include all the necessary imports,
a '$FILE_PATH' that replaces the absolute file path when the dumping will be performed
and finally a PULLED_DATA variable.
e.g "import numpy as np; PULLED_DATA=np.loadtxt(fname='$FILE_PATH')"
#. update (boolean): If pull is not None, Whether to update the pull method stored in the file info by the given pull method.
:Returns:
#. data (object): The pulled data from the file.
"""
# get relative path normalized
relativePath = os.path.normpath(relativePath)
if relativePath == '.':
relativePath = ''
assert name != '.pyrepinfo', "pulling '.pyrepinfo' from main repository directory is not allowed."
assert name != '.pyrepstate', "pulling '.pyrepstate' from main repository directory is not allowed."
assert name != '.pyreplock', "pulling '.pyreplock' from main repository directory is not allowed."
if name is None:
assert len(relativePath), "name must be given when relative path is given as empty string or as a simple dot '.'"
relativePath,name = os.path.split(relativePath)
# get file info
fileInfo, errorMessage = self.get_file_info(relativePath, name)
assert fileInfo is not None, errorMessage
# get absolute path
realPath = os.path.join(self.__path, relativePath)
assert os.path.exists(realPath), "relative path '%s'within repository '%s' does not exist"%(relativePath, self.__path)
# file path
fileAbsPath = os.path.join(realPath, name)
assert os.path.isfile(fileAbsPath), "file '%s' does not exist in absolute path '%s'"%(name,realPath)
if pull is None:
pull = fileInfo["pull"]
# try to pull file
try:
namespace = {}
namespace.update( globals() )
exec( pull.replace("$FILE_PATH", str(os.path.join(realPath,name)) ), namespace )
except Exception as e:
m = pull.replace("$FILE_PATH", str(os.path.join(realPath,name)) )
raise Exception( "unable to pull data using '%s' from file (%s)"%(m,e) )
# update
if update:
fileInfo["pull"] = pull
# return data
return namespace['PULLED_DATA'] |
def ensure_str(value):
"""
Ensure value is string.
"""
if isinstance(value, six.string_types):
return value
else:
return six.text_type(value) |
def ensure_list(path_or_path_list):
"""
Pre-process input argument, whether if it is:
1. abspath
2. Path instance
3. string
4. list or set of any of them
It returns list of path.
:return path_or_path_list: always return list of path in string
**中文文档**
预处理输入参数。
"""
if isinstance(path_or_path_list, (tuple, list, set)):
return [ensure_str(path) for path in path_or_path_list]
else:
return [ensure_str(path_or_path_list), ] |
def stream(self, report):
"""Stream reports to application logs"""
with self.ClientSession() as session:
lines = []
for job in report['traces']:
key = '%s:%s' % (self.name, job)
for minute in report['traces'][job]:
for k, v in report['traces'][job][minute].items():
lines.append('# TYPE %s_%s gauge' % (key, k))
lines.append('%s_%s %0.2f' % (key, k, v))
# Empty is required at the end of the payload
lines.append("")
data = "\n".join(lines)
logger.info(data)
yield from session.post(self.url, data=bytes(data.encode('utf-8'))) |
def stream(self, report):
"""Stream reports to application logs"""
payload = {
"agent": {
"host": report['instance']['hostname'],
"version": "1.0.0"
},
"components": [
{
"name": self.name,
"guid": "com.darwinmonroy.aiometrics",
"duration": 60,
"metrics": {
'Component/{}'.format(key): {
"total": metric['count'] * metric['avg'],
"count": metric['count'],
"min": metric['min'],
"max": metric['max'],
"sum_of_squares": metric['min']**2 + metric['max']**2,
} for key, metric in report['traces'].items()
}
}
]
}
with self.ClientSession() as session:
try:
r = yield from session.post(
'https://platform-api.newrelic.com/platform/v1/metrics',
data=json.dumps(payload),
headers=(
('X-License-Key', self.license_key),
('Content-Type', 'application/json'),
('Accept', 'application/json'),
)
)
r.close()
except Exception as e:
# Any exception should affect the execution of the main
# program, so we must explicitly silence any error caused by
# by the streaming of metrics
# TODO: consider the implementation of a retry logic
logger.exception(e) |
def stats(cls, traces):
"""Build per minute stats for each key"""
data = {}
stats = {}
# Group traces by key and minute
for trace in traces:
key = trace['key']
if key not in data:
data[key] = []
stats[key] = {}
data[key].append(trace['total_time'])
cls._traces.pop(trace['id'])
for key in data:
times = data[key]
stats[key] = dict(
count=len(times),
max=max(times),
min=min(times),
avg=sum(times)/len(times)
)
return stats |
def no_error_extract_data_from_file(self, file_path: str) -> Iterable[DataSourceType]:
"""
Proxy for `extract_data_from_file` that suppresses any errors and instead just returning an empty list.
:param file_path: see `extract_data_from_file`
:return: see `extract_data_from_file`
"""
try:
return self.extract_data_from_file(file_path)
except Exception as e:
logging.warning(e)
return [] |
def _load_all_in_directory(self) -> Dict[str, Iterable[DataSourceType]]:
"""
Loads all of the data from the files in directory location.
:return: a origin map of all the loaded data
"""
origin_mapped_data = dict() # type: Dict[str, Iterable[DataSourceType]]
for file_path in glob.iglob("%s/**/*" % self._directory_location, recursive=True):
if self.is_data_file(file_path):
origin_mapped_data[file_path] = self.no_error_extract_data_from_file(file_path)
return origin_mapped_data |
def _extract_data_from_origin_map(origin_mapped_data: Dict[str, Iterable[DataSourceType]]) \
-> Iterable[DataSourceType]:
"""
Extracts the data from a data origin map.
:param origin_mapped_data: a map containing the origin of the data as the key string and the data as the value
:return: the data contained within the map
"""
data = []
for _, data_item in origin_mapped_data.items():
data.extend(data_item)
return data |
def start(self):
"""
Monitors data kept in files in the predefined directory in a new thread.
Note: Due to the underlying library, it may take a few milliseconds after this method is started for changes to
start to being noticed.
"""
with self._status_lock:
if self._running:
raise RuntimeError("Already running")
self._running = True
# Cannot re-use Observer after stopped
self._observer = Observer()
self._observer.schedule(self._event_handler, self._directory_location, recursive=True)
self._observer.start()
# Load all in directory afterwards to ensure no undetected changes between loading all and observing
self._origin_mapped_data = self._load_all_in_directory() |
def stop(self):
"""
Stops monitoring the predefined directory.
"""
with self._status_lock:
if self._running:
assert self._observer is not None
self._observer.stop()
self._running = False
self._origin_mapped_data = dict() |
def _on_file_created(self, event: FileSystemEvent):
"""
Called when a file in the monitored directory has been created.
:param event: the file system event
"""
if not event.is_directory and self.is_data_file(event.src_path):
assert event.src_path not in self._origin_mapped_data
self._origin_mapped_data[event.src_path] = self.no_error_extract_data_from_file(event.src_path)
self.notify_listeners(FileSystemChange.CREATE) |
def _on_file_modified(self, event: FileSystemEvent):
"""
Called when a file in the monitored directory has been modified.
:param event: the file system event
"""
if not event.is_directory and self.is_data_file(event.src_path):
assert event.src_path in self._origin_mapped_data
self._origin_mapped_data[event.src_path] = self.no_error_extract_data_from_file(event.src_path)
self.notify_listeners(FileSystemChange.MODIFY) |
def _on_file_deleted(self, event: FileSystemEvent):
"""
Called when a file in the monitored directory has been deleted.
:param event: the file system event
"""
if not event.is_directory and self.is_data_file(event.src_path):
assert event.src_path in self._origin_mapped_data
del(self._origin_mapped_data[event.src_path])
self.notify_listeners(FileSystemChange.DELETE) |
def _on_file_moved(self, event: FileSystemMovedEvent):
"""
Called when a file in the monitored directory has been moved.
Breaks move down into a delete and a create (which it is sometimes detected as!).
:param event: the file system event
"""
if not event.is_directory and self.is_data_file(event.src_path):
delete_event = FileSystemEvent(event.src_path)
delete_event.event_type = EVENT_TYPE_DELETED
self._on_file_deleted(delete_event)
create_event = FileSystemEvent(event.dest_path)
create_event.event_type = EVENT_TYPE_CREATED
self._on_file_created(create_event) |
def tear_down(self):
"""
Tears down all temp files and directories.
"""
while len(self._temp_directories) > 0:
directory = self._temp_directories.pop()
shutil.rmtree(directory, ignore_errors=True)
while len(self._temp_files) > 0:
file = self._temp_files.pop()
try:
os.remove(file)
except OSError:
pass |
def create_temp_directory(self, **mkdtemp_kwargs) -> str:
"""
Creates a temp directory.
:param mkdtemp_kwargs: named arguments to be passed to `tempfile.mkdtemp`
:return: the location of the temp directory
"""
kwargs = {**self.default_mkdtemp_kwargs, **mkdtemp_kwargs}
location = tempfile.mkdtemp(**kwargs)
self._temp_directories.add(location)
return location |
def create_temp_file(self, **mkstemp_kwargs) -> Tuple[int, str]:
"""
Creates a temp file.
:param mkstemp_kwargs: named arguments to be passed to `tempfile.mkstemp`
:return: tuple where the first element is the file handle and the second is the location of the temp file
"""
kwargs = {**self.default_mkstemp_kwargs, **mkstemp_kwargs}
handle, location = tempfile.mkstemp(**kwargs)
self._temp_files.add(location)
return handle, location |
def change(self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None):
"""
Return a new :class:`pathlib_mate.pathlib2.Path` object with updated information.
"""
if new_abspath is not None:
p = self.__class__(new_abspath)
return p
if (new_dirpath is None) and (new_dirname is not None):
new_dirpath = os.path.join(self.parent.dirpath, new_dirname)
elif (new_dirpath is not None) and (new_dirname is None):
new_dirpath = new_dirpath
elif (new_dirpath is None) and (new_dirname is None):
new_dirpath = self.dirpath
elif (new_dirpath is not None) and (new_dirname is not None):
raise ValueError("Cannot having both new_dirpath and new_dirname!")
if new_basename is None:
if new_fname is None:
new_fname = self.fname
if new_ext is None:
new_ext = self.ext
new_basename = new_fname + new_ext
else:
if new_fname is not None or new_ext is not None:
raise ValueError("Cannot having both new_basename, "
"new_fname, new_ext!")
return self.__class__(new_dirpath, new_basename) |
def is_not_exist_or_allow_overwrite(self, overwrite=False):
"""
Test whether a file target is not exists or it exists but allow
overwrite.
"""
if self.exists() and overwrite is False:
return False
else: # pragma: no cover
return True |
def moveto(self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None,
overwrite=False,
makedirs=False):
"""
An advanced :meth:`pathlib_mate.pathlib2.Path.rename` method provide ability to rename by
each components of a path. A new ``Path`` instance will returns.
**中文文档**
高级重命名函数, 允许用于根据路径的各个组成部分进行重命名。但和os.rename
方法一样, 需要保证母文件夹存在。
"""
self.assert_exists()
p = self.change(
new_abspath=new_abspath,
new_dirpath=new_dirpath,
new_dirname=new_dirname,
new_basename=new_basename,
new_fname=new_fname,
new_ext=new_ext,
)
if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):
# 如果两个路径不同, 才进行move
if self.abspath != p.abspath:
if makedirs:
parent = p.parent
if not parent.exists():
os.makedirs(parent.abspath)
self.rename(p)
return p |
def copyto(self,
new_abspath=None,
new_dirpath=None,
new_dirname=None,
new_basename=None,
new_fname=None,
new_ext=None,
overwrite=False,
makedirs=False):
"""
Copy this file to other place.
"""
self.assert_exists()
p = self.change(
new_abspath=new_abspath,
new_dirpath=new_dirpath,
new_dirname=new_dirname,
new_basename=new_basename,
new_fname=new_fname,
new_ext=new_ext,
)
if p.is_not_exist_or_allow_overwrite(overwrite=overwrite):
# 如果两个路径不同, 才进行copy
if self.abspath != p.abspath:
try:
shutil.copy(self.abspath, p.abspath)
except IOError as e:
if makedirs:
os.makedirs(p.parent.abspath)
shutil.copy(self.abspath, p.abspath)
else:
raise e
return p |
def _create_client(base_url: str, tls: TLSConfig=False) -> Optional[APIClient]:
"""
Creates a Docker client with the given details.
:param base_url: the base URL of the Docker daemon
:param tls: the Docker daemon's TLS config (if any)
:return: the created client else None if unable to connect the client to the daemon
"""
try:
client = APIClient(base_url=base_url, tls=tls, version="auto")
return client if client.ping() else None
except:
return None |
def create_client() -> APIClient:
"""
Clients a Docker client.
Will raise a `ConnectionError` if the Docker daemon is not accessible.
:return: the Docker client
"""
global _client
client = _client()
if client is None:
# First try looking at the environment variables for specification of the daemon's location
docker_environment = kwargs_from_env(assert_hostname=False)
if "base_url" in docker_environment:
client = _create_client(docker_environment.get("base_url"), docker_environment.get("tls"))
if client is None:
raise ConnectionError(
"Could not connect to the Docker daemon specified by the `DOCKER_X` environment variables: %s"
% docker_environment)
else:
logging.info("Connected to Docker daemon specified by the environment variables")
else:
# Let's see if the Docker daemon is accessible via the UNIX socket
client = _create_client("unix://var/run/docker.sock")
if client is not None:
logging.info("Connected to Docker daemon running on UNIX socket")
else:
raise ConnectionError(
"Cannot connect to Docker - is the Docker daemon running? `$DOCKER_HOST` should be set or the "
"daemon should be accessible via the standard UNIX socket.")
_client = weakref.ref(client)
assert isinstance(client, APIClient)
return client |
def get_dump_method(dump, protocol=-1):
"""Get dump function code string"""
if dump is None:
dump = 'pickle'
if dump.startswith('pickle'):
if dump == 'pickle':
proto = protocol
else:
proto = dump.strip('pickle')
try:
proto = int(proto)
assert proto>=-1
except:
raise Exception("protocol must be an integer >=-1")
code = """
try:
import cPickle as pickle
except:
import pickle
with open('$FILE_PATH', 'wb') as fd:
pickle.dump( value, fd, protocol=%i )
fd.flush()
os.fsync(fd.fileno())
"""%proto
elif dump.startswith('dill'):
if dump == 'dill':
proto = 2
else:
proto = dump.strip('dill')
try:
proto = int(proto)
assert proto>=-1
except:
raise Exception("protocol must be an integer >=-1")
code = """
import dill
with open('$FILE_PATH', 'wb') as fd:
dill.dump( value, fd, protocol=%i )
fd.flush()
os.fsync(fd.fileno())
"""%proto
elif dump == 'json':
code = """
import json
with open('$FILE_PATH', 'wb') as fd:
json.dump( value,fd, ensure_ascii=True, indent=4 )
fd.flush()
os.fsync(fd.fileno())
"""
elif dump == 'numpy':
code = """
import numpy
with open('$FILE_PATH', 'wb') as fd:
numpy.save(file=fd, arr=value)
fd.flush()
os.fsync(fd.fileno())
"""
elif dump == 'numpy_text':
code = """
import numpy
numpy.savetxt(fname='$FILE_PATH', X=value, fmt='%.6e')
"""
else:
assert isinstance(dump, basestring), "dump must be None or a string"
assert '$FILE_PATH' in dump, "string dump code must inlcude '$FILE_PATH'"
code = dump
# return
return code |
def get_pull_method(pull):
"""Get pull function code string"""
if pull is None or pull.startswith('pickle'):
code = """
import os
try:
import cPickle as pickle
except:
import pickle
with open('$FILE_PATH', 'rb') as fd:
PULLED_DATA = pickle.load( fd )
"""
elif pull.startswith('dill'):
code = """
import dill
with open('$FILE_PATH', 'rb') as fd:
PULLED_DATA = dill.load( fd )
"""
elif pull == 'json':
code = """
import json
with open('$FILE_PATH', 'rb') as fd:
PULLED_DATA = json.load(fd)
"""
elif pull == 'numpy':
code = """
import numpy
with open('$FILE_PATH', 'rb') as fd:
PULLED_DATA=numpy.load(file=fd)
"""
elif pull == 'numpy_text':
code = """
import numpy
with open('$FILE_PATH', 'rb') as fd:
PULLED_DATA=numpy.loadtxt(fname=fd)
"""
else:
assert isinstance(pull, basestring), "pull must be None or a string"
assert 'PULLED_DATA' in pull, "string pull code must inlcude 'PULLED_DATA'"
assert '$FILE_PATH' in pull, "string pull code must inlcude '$FILE_PATH'"
code = pull
# return
return code |
def path_required(func):
"""Decorate methods when repository path is required."""
@wraps(func)
def wrapper(self, *args, **kwargs):
if self.path is None:
warnings.warn('Must load (Repository.load_repository) or initialize (Repository.create_repository) the repository first !')
return
return func(self, *args, **kwargs)
return wrapper |
def __clean_before_after(self, stateBefore, stateAfter, keepNoneEmptyDirectory=True):
"""clean repository given before and after states"""
# prepare after for faster search
errors = []
afterDict = {}
[afterDict.setdefault(list(aitem)[0],[]).append(aitem) for aitem in stateAfter]
# loop before
for bitem in reversed(stateBefore):
relaPath = list(bitem)[0]
basename = os.path.basename(relaPath)
btype = bitem[relaPath]['type']
alist = afterDict.get(relaPath, [])
aitem = [a for a in alist if a[relaPath]['type']==btype]
if len(aitem)>1:
errors.append("Multiple '%s' of type '%s' where found in '%s', this should never had happened. Please report issue"%(basename,btype,relaPath))
continue
if not len(aitem):
removeDirs = []
removeFiles = []
if btype == 'dir':
if not len(relaPath):
errors.append("Removing main repository directory is not allowed")
continue
removeDirs.append(os.path.join(self.__path,relaPath))
removeFiles.append(os.path.join(self.__path,relaPath,self.__dirInfo))
removeFiles.append(os.path.join(self.__path,relaPath,self.__dirLock))
elif btype == 'file':
removeFiles.append(os.path.join(self.__path,relaPath))
removeFiles.append(os.path.join(self.__path,relaPath,self.__fileInfo%basename))
removeFiles.append(os.path.join(self.__path,relaPath,self.__fileLock%basename))
else:
### MUST VERIFY THAT ONCE pyrepobjectdir IS IMPLEMENTED
removeDirs.append(os.path.join(self.__path,relaPath))
removeFiles.append(os.path.join(self.__path,relaPath,self.__fileInfo%basename))
# remove files
for fpath in removeFiles:
if os.path.isfile(fpath):
try:
os.remove(fpath)
except Exception as err:
errors.append("Unable to clean file '%s' (%s)"%(fpath, str(err)))
# remove directories
for dpath in removeDirs:
if os.path.isdir(dpath):
if keepNoneEmptyDirectory or not len(os.listdir(dpath)):
try:
shutil.rmtree(dpath)
except Exception as err:
errors.append("Unable to clean directory '%s' (%s)"%(fpath, str(err)))
# return result and errors list
return len(errors)==0, errors |
def get_stats(self):
"""
Get repository descriptive stats
:Returns:
#. numberOfDirectories (integer): Number of diretories in repository
#. numberOfFiles (integer): Number of files in repository
"""
if self.__path is None:
return 0,0
nfiles = 0
ndirs = 0
for fdict in self.get_repository_state():
fdname = list(fdict)[0]
if fdname == '':
continue
if fdict[fdname].get('pyrepfileinfo', False):
nfiles += 1
elif fdict[fdname].get('pyrepdirinfo', False):
ndirs += 1
else:
raise Exception('Not sure what to do next. Please report issue')
return ndirs,nfiles |
def reset(self):
"""Reset repository instance.
"""
self.__path = None
self.__repo = {'repository_unique_name': str(uuid.uuid1()),
'create_utctime': time.time(),
'last_update_utctime': None,
'pyrep_version': str(__version__),
'repository_information': '',
'walk_repo': []} |
def is_repository(self, path):
"""
Check if there is a Repository in path.
:Parameters:
#. path (string): The real path of the directory where to check if
there is a repository.
:Returns:
#. result (boolean): Whether it's a repository or not.
"""
if path.strip() in ('','.'):
path = os.getcwd()
repoPath = os.path.realpath( os.path.expanduser(path) )
if os.path.isfile( os.path.join(repoPath,self.__repoFile) ):
return True
else:
try:
from .OldRepository import Repository
REP = Repository()
result = REP.is_repository(repoPath)
except:
return False
else:
if result:
warnings.warn("This is an old repository version 2.x.y! Make sure to start using repositories 3.x.y ")
return result |
def load_repository(self, path, verbose=True, ntrials=3):
"""
Load repository from a directory path and update the current instance.
First, new repository still will be loaded. If failed, then old
style repository load will be tried.
:Parameters:
#. path (string): The path of the directory from where to load
the repository from. If '.' or an empty string is passed,
the current working directory will be used.
#. verbose (boolean): Whether to be verbose about abnormalities
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. repository (pyrep.Repository): returns self repository with loaded data.
"""
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
repo = None
for _trial in range(ntrials):
try:
self.__load_repository(path=path, verbose=True)
except Exception as err1:
try:
from .OldRepository import Repository
REP = Repository(path)
except Exception as err2:
#traceback.print_exc()
error = "Unable to load repository using neiher new style (%s) nor old style (%s)"%(err1, err2)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
repo = REP
break
else:
error = None
repo = self
break
# check and return
assert error is None, error
return repo |
def create_repository(self, path, info=None, description=None, replace=True, allowNoneEmpty=True, raiseError=True):
"""
create a repository in a directory. This method insures the creation of
the directory in the system if it is missing.\n
**N.B. If replace is True and existing repository is found in path, create_repository erases all existing files and directories in path.**
:Parameters:
#. path (string): The real absolute path where to create the Repository.
If '.' or an empty string is passed, the current working directory will be used.
#. description (None, str): Repository main directory information.
#. info (None, object): Repository information. It can
be None or any pickle writable type of data.
#. replace (boolean): Whether to replace existing repository.
#. allowNoneEmpty (boolean): Allow creating repository in none-empty
directory.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
:Returns:
#. success (boolean): Whether creating repository was successful
#. message (None, str): Any returned message.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(allowNoneEmpty, bool), "allowNoneEmpty must be boolean"
assert isinstance(replace, bool), "replace must be boolean"
assert isinstance(path, basestring), "path must be string"
if info is None:
info = ''
try:
pickle.dumps(info)
except Exception as err:
raise Exception("info must be None or any pickle writable type of data (%s)"%str(err))
#assert isinstance(info, basestring), "info must be None or a string"
if description is None:
description = ''
assert isinstance(description, basestring), "description must be None or a string"
# get real path
if path.strip() in ('','.'):
path = os.getcwd()
realPath = os.path.realpath( os.path.expanduser(path) )
# reset if replace is set to True
message = []
if self.is_repository(realPath):
if not replace:
message.append("A pyrep Repository already exists in the given path '%s' set replace to True if you need to proceed."%path)
return False, message
else:
message.append("Old existing pyrep repository existing in the given path '%s' has been replaced."%path)
try:
for _df in os.listdir(realPath):
_p = os.path.join(realPath, _df)
if os.path.isdir(_p):
shutil.rmtree( _p )
else:
os.remove(_p)
except Exception as err:
message.append("Unable to clean remove repository before create (%s)"%(str(err)))
return False, '\n'.join(message)
if not os.path.isdir(realPath):
os.makedirs(realPath)
elif len(os.listdir(realPath)) and not allowNoneEmpty:
return False, "Not allowed to create repository in a non empty directory"
# reset repository
oldRepo = self.__repo
self.reset()
self.__path = realPath.rstrip(os.sep)
self.__repo['repository_information'] = info
# save repository
saved = self.save(description=description)
if not saved:
self.__repo = oldRepo
message.append("Absolute path and directories might be created but no pyrep Repository is created.")
return False, '\n'.join(message)
# return
return True, '\n'.join(message) |
def remove_repository(self, path=None, removeEmptyDirs=True):
"""
Remove all repository from path along with all repository tracked files.
:Parameters:
#. path (None, string): The path the repository to remove.
#. removeEmptyDirs (boolean): Whether to remove remaining empty
directories.
"""
assert isinstance(removeEmptyDirs, bool), "removeEmptyDirs must be boolean"
if path is not None:
if path != self.__path:
repo = Repository()
repo.load_repository(path)
else:
repo = self
else:
repo = self
assert repo.path is not None, "path is not given and repository is not initialized"
# remove repo files and directories
for fdict in reversed(repo.get_repository_state()):
relaPath = list(fdict)[0]
realPath = os.path.join(repo.path, relaPath)
path, name = os.path.split(realPath)
if fdict[relaPath]['type'] == 'file':
if os.path.isfile(realPath):
os.remove(realPath)
if os.path.isfile(os.path.join(repo.path,path,self.__fileInfo%name)):
os.remove(os.path.join(repo.path,path,self.__fileInfo%name))
if os.path.isfile(os.path.join(repo.path,path,self.__fileLock%name)):
os.remove(os.path.join(repo.path,path,self.__fileLock%name))
if os.path.isfile(os.path.join(repo.path,path,self.__fileClass%name)):
os.remove(os.path.join(repo.path,path,self.__fileClass%name))
elif fdict[relaPath]['type'] == 'dir':
if os.path.isfile(os.path.join(realPath,self.__dirInfo)):
os.remove(os.path.join(realPath,self.__dirInfo))
if os.path.isfile(os.path.join(realPath,self.__dirLock)):
os.remove(os.path.join(realPath,self.__dirLock))
if not len(os.listdir(realPath)) and removeEmptyDirs:
shutil.rmtree( realPath )
# remove repo information file
if os.path.isfile(os.path.join(repo.path,self.__repoFile)):
os.remove(os.path.join(repo.path,self.__repoFile))
if os.path.isfile(os.path.join(repo.path,self.__repoLock)):
os.remove(os.path.join(repo.path,self.__repoLock)) |
def save(self, description=None, raiseError=True, ntrials=3):
"""
Save repository '.pyreprepo' to disk and create (if missing) or
update (if description is not None) '.pyrepdirinfo'.
:Parameters:
#. description (None, str): Repository main directory information.
If given will be replaced.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (bool): Whether saving was successful.
#. error (None, string): Fail to save repository message in case
saving is not successful. If success is True, error will be None.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# get description
if description is not None:
assert isinstance(description, basestring), "description must be None or a string"
dirInfoPath = os.path.join(self.__path, self.__dirInfo)
if description is None and not os.path.isfile(dirInfoPath):
description = ''
# create and acquire lock
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
# check if acquired.
m = "code %s. Unable to aquire the lock when calling 'save'. You may try again!"%(code,)
if not acquired:
assert not raiseError, Exception(m)
return False, m
# save repository
for _trial in range(ntrials):
try:
# open file
repoInfoPath = os.path.join(self.__path, self.__repoFile)
error = None
self.__save_dirinfo(description=description, dirInfoPath=dirInfoPath)
# load and update repository info if existing
if os.path.isfile(repoInfoPath):
with open(repoInfoPath, 'rb') as fd:
repo = self.__load_repository_pickle_file(os.path.join(self.__path, self.__repoFile))
self.__repo['walk_repo'] = repo['walk_repo']
# create repository
with open(repoInfoPath, 'wb') as fd:
self.__repo["last_update_utctime"] = time.time()
pickle.dump( self.__repo,fd, protocol=self._DEFAULT_PICKLE_PROTOCOL )
fd.flush()
os.fsync(fd.fileno())
except Exception as err:
error = "Unable to save repository (%s)"%err
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
break
# release lock
LR.release_lock()
# return
assert error is None or not raiseError, error
return error is None, error |
def is_name_allowed(self, path):
"""
Get whether creating a file or a directory from the basenane of the given
path is allowed
:Parameters:
#. path (str): The absolute or relative path or simply the file
or directory name.
:Returns:
#. allowed (bool): Whether name is allowed.
#. message (None, str): Reason for the name to be forbidden.
"""
assert isinstance(path, basestring), "given path must be a string"
name = os.path.basename(path)
if not len(name):
return False, "empty name is not allowed"
# exact match
for em in [self.__repoLock,self.__repoFile,self.__dirInfo,self.__dirLock]:
if name == em:
return False, "name '%s' is reserved for pyrep internal usage"%em
# pattern match
for pm in [self.__fileInfo,self.__fileLock]:#,self.__objectDir]:
if name == pm or (name.endswith(pm[3:]) and name.startswith('.')):
return False, "name pattern '%s' is not allowed as result may be reserved for pyrep internal usage"%pm
# name is ok
return True, None |
def to_repo_relative_path(self, path, split=False):
"""
Given a path, return relative path to diretory
:Parameters:
#. path (str): Path as a string
#. split (boolean): Whether to split path to its components
:Returns:
#. relativePath (str, list): Relative path as a string or as a list
of components if split is True
"""
path = os.path.normpath(path)
if path == '.':
path = ''
path = path.split(self.__path)[-1].strip(os.sep)
if split:
return path.split(os.sep)
else:
return path |
def get_repository_state(self, relaPath=None):
"""
Get a list representation of repository state along with useful
information. List state is ordered relativeley to directories level
:Parameters:
#. relaPath (None, str): relative directory path from where to
start. If None all repository representation is returned.
:Returns:
#. state (list): List representation of the repository.
List items are all dictionaries. Every dictionary has a single
key which is the file or the directory name and the value is a
dictionary of information including:
* 'type': the type of the tracked whether it's file, dir, or objectdir
* 'exists': whether file or directory actually exists on disk
* 'pyrepfileinfo': In case of a file or an objectdir whether .%s_pyrepfileinfo exists
* 'pyrepdirinfo': In case of a directory whether .pyrepdirinfo exists
"""
state = []
def _walk_dir(relaPath, dirList):
dirDict = {'type':'dir',
'exists':os.path.isdir(os.path.join(self.__path,relaPath)),
'pyrepdirinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__dirInfo)),
}
state.append({relaPath:dirDict})
# loop files and dirobjects
for fname in sorted([f for f in dirList if isinstance(f, basestring)]):
relaFilePath = os.path.join(relaPath,fname)
realFilePath = os.path.join(self.__path,relaFilePath)
#if os.path.isdir(realFilePath) and df.startswith('.') and df.endswith(self.__objectDir[3:]):
# fileDict = {'type':'objectdir',
# 'exists':True,
# 'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)),
# }
#else:
# fileDict = {'type':'file',
# 'exists':os.path.isfile(realFilePath),
# 'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)),
# }
fileDict = {'type':'file',
'exists':os.path.isfile(realFilePath),
'pyrepfileinfo':os.path.isfile(os.path.join(self.__path,relaPath,self.__fileInfo%fname)),
}
state.append({relaFilePath:fileDict})
# loop directories
#for ddict in sorted([d for d in dirList if isinstance(d, dict) and len(d)], key=lambda k: list(k)[0]):
for ddict in sorted([d for d in dirList if isinstance(d, dict)], key=lambda k: list(k)[0]):
dirname = list(ddict)[0]
_walk_dir(relaPath=os.path.join(relaPath,dirname), dirList=ddict[dirname])
# call recursive _walk_dir
if relaPath is None:
_walk_dir(relaPath='', dirList=self.__repo['walk_repo'])
else:
assert isinstance(relaPath, basestring), "relaPath must be None or a str"
relaPath = self.to_repo_relative_path(path=relaPath, split=False)
spath = relaPath.split(os.sep)
dirList = self.__repo['walk_repo']
while len(spath):
dirname = spath.pop(0)
dList = [d for d in dirList if isinstance(d, dict)]
if not len(dList):
dirList = None
break
cDict = [d for d in dList if dirname in d]
if not len(cDict):
dirList = None
break
dirList = cDict[0][dirname]
if dirList is not None:
_walk_dir(relaPath=relaPath, dirList=dirList)
# return state list
return state |
def get_file_info(self, relativePath):
"""
Get file information dict from the repository given its relative path.
:Parameters:
#. relativePath (string): The relative to the repository path of
the file.
:Returns:
#. info (None, dictionary): The file information dictionary.
If None, it means an error has occurred.
#. errorMessage (string): The error message if any error occurred.
"""
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
fileName = os.path.basename(relativePath)
isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
if not isRepoFile:
return None, "file is not a registered repository file."
if not infoOnDisk:
return None, "file is a registered repository file but info file missing"
fileInfoPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%fileName)
try:
with open(fileInfoPath, 'rb') as fd:
info = pickle.load(fd)
except Exception as err:
return None, "Unable to read file info from disk (%s)"%str(err)
return info, '' |
def is_repository_file(self, relativePath):
"""
Check whether a given relative path is a repository file path
:Parameters:
#. relativePath (string): File relative path
:Returns:
#. isRepoFile (boolean): Whether file is a repository file.
#. isFileOnDisk (boolean): Whether file is found on disk.
#. isFileInfoOnDisk (boolean): Whether file info is found on disk.
#. isFileClassOnDisk (boolean): Whether file class is found on disk.
"""
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
if relativePath == '':
return False, False, False, False
relaDir, name = os.path.split(relativePath)
fileOnDisk = os.path.isfile(os.path.join(self.__path, relativePath))
infoOnDisk = os.path.isfile(os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%name))
classOnDisk = os.path.isfile(os.path.join(self.__path,os.path.dirname(relativePath),self.__fileClass%name))
cDir = self.__repo['walk_repo']
if len(relaDir):
for dirname in relaDir.split(os.sep):
dList = [d for d in cDir if isinstance(d, dict)]
if not len(dList):
cDir = None
break
cDict = [d for d in dList if dirname in d]
if not len(cDict):
cDir = None
break
cDir = cDict[0][dirname]
if cDir is None:
return False, fileOnDisk, infoOnDisk, classOnDisk
#if name not in cDir:
if str(name) not in [str(i) for i in cDir]:
return False, fileOnDisk, infoOnDisk, classOnDisk
# this is a repository registered file. check whether all is on disk
return True, fileOnDisk, infoOnDisk, classOnDisk |
def walk_files_path(self, relativePath="", fullPath=False, recursive=False):
"""
Walk the repository relative path and yield file relative/full path.
:parameters:
#. relativePath (string): The relative path from which start the walk.
#. fullPath (boolean): Whether to return full or relative path.
#. recursive (boolean): Whether walk all directories files recursively
"""
assert isinstance(fullPath, bool), "fullPath must be boolean"
assert isinstance(recursive, bool), "recursive must be boolean"
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
dirList = self.__get_repository_directory(relativePath=relativePath)
assert dirList is not None, "given relative path '%s' is not a repository directory"%relativePath
# walk recursive function
def _walk(rpath, dlist,recursive):
# walk files
for fname in dlist:
if isinstance(fname, basestring):
if fullPath:
yield os.path.join(self.__path, rpath, fname)
else:
yield os.path.join(rpath, fname)
if recursive:
for ddict in dlist:
if isinstance(ddict, dict):
dname = list(ddict)[0]
for p in _walk(rpath=os.path.join(rpath,dname), dlist=ddict[dname],recursive=recursive):
yield p
# walk all files
return _walk(rpath=relativePath, dlist=dirList, recursive=recursive) |
def walk_files_info(self, relativePath="", fullPath=False, recursive=False):
"""
Walk the repository relative path and yield tuple of two items where
first item is file relative/full path and second item is file info.
If file info is not found on disk, second item will be None.
:parameters:
#. relativePath (string): The relative path from which start the walk.
#. fullPath (boolean): Whether to return full or relative path.
#. recursive (boolean): Whether walk all directories files recursively
"""
assert isinstance(fullPath, bool), "fullPath must be boolean"
assert isinstance(recursive, bool), "recursive must be boolean"
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
for relaPath in self.walk_files_path(relativePath=relativePath, fullPath=False, recursive=recursive):
fpath, fname = os.path.split(relaPath)
fileInfoPath = os.path.join(self.__path,fpath,self.__fileInfo%fname)
if os.path.isfile(fileInfoPath):
with open(fileInfoPath, 'rb') as fd:
info = pickle.load(fd)
else:
info = None
if fullPath:
yield (os.path.join(self.__path, relaPath), info)
else:
yield (relaPath, info) |
def walk_directories_info(self, relativePath="", fullPath=False, recursive=False):
"""
Walk the repository relative path and yield tuple of two items where
first item is directory relative/full path and second item is directory
info. If directory file info is not found on disk, second item will be None.
:parameters:
#. relativePath (string): The relative path from which start the walk.
#. fullPath (boolean): Whether to return full or relative path.
#. recursive (boolean): Whether walk all directories files recursively.
"""
assert isinstance(fullPath, bool), "fullPath must be boolean"
assert isinstance(recursive, bool), "recursive must be boolean"
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
# walk directories
for dpath in self.walk_directories_path(relativePath=relativePath, fullPath=False, recursive=recursive):
dirInfoPath = os.path.join(self.__path,dpath,self.__dirInfo)
if os.path.isfile(dirInfoPath):
with open(dirInfoPath, 'rb') as fd:
info = pickle.load(fd)
else:
info = None
if fullPath:
yield (os.path.join(self.__path, dpath), info)
else:
yield (dpath, info) |
def create_package(self, path=None, name=None, mode=None):
"""
Create a tar file package of all the repository files and directories.
Only files and directories that are tracked in the repository
are stored in the package tar file.
**N.B. On some systems packaging requires root permissions.**
:Parameters:
#. path (None, string): The real absolute path where to create the
package. If None, it will be created in the same directory as
the repository. If '.' or an empty string is passed, the current
working directory will be used.
#. name (None, string): The name to give to the package file
If None, the package directory name will be used with the
appropriate extension added.
#. mode (None, string): The writing mode of the tarfile.
If None, automatically the best compression mode will be chose.
Available modes are ('w', 'w:', 'w:gz', 'w:bz2')
"""
# check mode
assert mode in (None, 'w', 'w:', 'w:gz', 'w:bz2'), 'unkown archive mode %s'%str(mode)
if mode is None:
#mode = 'w:bz2'
mode = 'w:'
# get root
if path is None:
root = os.path.split(self.__path)[0]
elif path.strip() in ('','.'):
root = os.getcwd()
else:
root = os.path.realpath( os.path.expanduser(path) )
assert os.path.isdir(root), 'absolute path %s is not a valid directory'%path
# get name
if name is None:
ext = mode.split(":")
if len(ext) == 2:
if len(ext[1]):
ext = "."+ext[1]
else:
ext = '.tar'
else:
ext = '.tar'
name = os.path.split(self.__path)[1]+ext
# create tar file
tarfilePath = os.path.join(root, name)
try:
tarHandler = tarfile.TarFile.open(tarfilePath, mode=mode)
except Exception as e:
raise Exception("Unable to create package (%s)"%e)
# walk directory and create empty directories
for dpath in sorted(list(self.walk_directories_path(recursive=True))):
t = tarfile.TarInfo( dpath )
t.type = tarfile.DIRTYPE
tarHandler.addfile(t)
tarHandler.add(os.path.join(self.__path,dpath,self.__dirInfo), arcname=self.__dirInfo)
# walk files and add to tar
for fpath in self.walk_files_path(recursive=True):
relaPath, fname = os.path.split(fpath)
tarHandler.add(os.path.join(self.__path,fpath), arcname=fname)
tarHandler.add(os.path.join(self.__path,relaPath,self.__fileInfo%fname), arcname=self.__fileInfo%fname)
tarHandler.add(os.path.join(self.__path,relaPath,self.__fileClass%fname), arcname=self.__fileClass%fname)
# save repository .pyrepinfo
tarHandler.add(os.path.join(self.__path,self.__repoFile), arcname=".pyrepinfo")
# close tar file
tarHandler.close() |
def add_directory(self, relativePath, description=None, clean=False,
raiseError=True, ntrials=3):
"""
Add a directory in the repository and creates its attribute in the
Repository with utc timestamp. It insures adding all the missing
directories in the path.
:Parameters:
#. relativePath (string): The relative to the repository path to
where directory must be added.
#. description (None, string): Any random description about the
added directory.
#. clean (boolean): Whether to remove existing non repository
tracked files and folders in all created directory chain tree.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether adding the directory was successful.
#. message (None, string): Reason why directory was not added or
random information.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(relativePath, basestring), "relativePath must be a string"
if description is not None:
assert isinstance(description, basestring), "description must be None or a string"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# normalise path
path = self.to_repo_relative_path(path=relativePath, split=False)
# whether to replace
if self.is_repository_directory(path):
return True, "Directory is already tracked in repository"
# check whether name is allowed
allowed, reason = self.is_name_allowed(path)
if not allowed:
if raiseError:
raise Exception(reason)
return False, reason
# lock repository and get __repo updated from disk
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
if not acquired:
m = "code %s. Unable to aquire the lock to add directory. You may try again!"%(code,)
if raiseError:
raise Exception(m)
return False,m
# load repository info
for _trial in range(ntrials):
try:
repo = self.__load_repository_pickle_file(os.path.join(self.__path, self.__repoFile))
self.__repo['walk_repo'] = repo['walk_repo']
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
if error is not None:
_ = LR.release_lock()
assert not raiseError, Exception(error)
return False, error
# create directories
error = None
posList = self.__repo['walk_repo']
dirPath = self.__path
spath = path.split(os.sep)
for idx, name in enumerate(spath):
# create and acquire lock.
LD = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(dirPath, self.__dirLock))
acquired, code = LD.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock when adding '%s'. All prior directories were added. You may try again, to finish adding directory"%(code,dirPath)
break
# add to directory
for _trial in range(ntrials):
try:
dirPath = os.path.join(dirPath, name)
riPath = os.path.join(dirPath, self.__dirInfo)
dList = [d for d in posList if isinstance(d, dict)]
dList = [d for d in dList if name in d]
# clean directory
if not len(dList) and clean and os.path.exists(dirPath):
try:
shutil.rmtree( dirPath, ignore_errors=True )
except Exception as err:
error = "Unable to clean directory '%s' (%s)"%(dirPath, err)
break
# create directory
if not os.path.exists(dirPath):
try:
os.mkdir(dirPath)
except Exception as err:
error = "Unable to create directory '%s' (%s)"%(dirPath, err)
break
# create and dump dirinfo
self.__save_dirinfo(description=[None, description][idx==len(spath)-1],
dirInfoPath=riPath, create=True)
# update directory list
if not len(dList):
rsd = {name:[]}
posList.append(rsd)
posList = rsd[name]
else:
assert len(dList) == 1, "Same directory name dict is found twice. This should'n have happened. Report issue"
posList = dList[0][name]
except Exception as err:
LD.release_lock()
error = "Unable to create directory '%s' info file (%s)"%(dirPath, str(err))
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
LD.release_lock()
break
if error is not None:
break
# save __repo
if error is None:
try:
_, error = self.__save_repository_pickle_file(lockFirst=False, raiseError=False)
except Exception as err:
error = str(err)
pass
try:
LD.release_lock()
except:
pass
try:
LR.release_lock()
except:
pass
# check and return
assert error is None or not raiseError, error
return error is None, error |
def remove_directory(self, relativePath, clean=False, raiseError=True, ntrials=3):
"""
Remove directory from repository tracking.
:Parameters:
#. relativePath (string): The relative to the repository path of the
directory to remove from the repository.
#. clean (boolean): Whether to os remove directory. If False only
tracked files will be removed along with left empty directories.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether removing the directory was successful.
#. reason (None, string): Reason why directory was not removed.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(clean, bool), "clean must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# normalise path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
parentPath, dirName = os.path.split(relativePath)
# check if this is main repository directory
if relativePath == '':
return False, "Removing main repository directory is not allowed"
# check if this is a repository directory
if not self.is_repository_directory(relativePath):
return False, "Given relative path '%s' is not a repository path"%relativePath
# check if directory actually exists on disk
realPath = os.path.join(self.__path,relativePath)
if not os.path.isdir(realPath):
error = "Repository relative directory '%s' seems to be missing. call maintain_repository to fix all issues"
assert not raiseError, error
return False, error
# get and acquire lock
LD = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path,parentPath,self.__dirLock))
acquired, code = LD.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock when adding '%s'. All prior directories were added. You may try again, to finish adding directory"%(code,realPath)
assert not raiseError, error
return False, error
# lock repository and get __repo updated from disk
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
if not acquired:
LD.release_lock()
m = "code %s. Unable to aquire the repository lock. You may try again!"%(code,)
assert raiseError, Exception(m)
return False,m
# remove directory
for _trial in range(ntrials):
error = None
try:
dirList = self.__get_repository_parent_directory(relativePath=relativePath)
assert dirList is not None, "Given relative path '%s' is not a repository directory"%(relativePath,)
stateBefore = self.get_repository_state(relaPath=parentPath)
_files = [f for f in dirList if isinstance(f, basestring)]
_dirs = [d for d in dirList if isinstance(d, dict)]
_dirs = [d for d in dirList if dirName not in d]
_ = [dirList.pop(0) for _ in range(len(dirList))]
dirList.extend(_files)
dirList.extend(_dirs)
if clean:
shutil.rmtree(realPath)
else:
stateAfter = self.get_repository_state(relaPath=parentPath)
success, errors = self.__clean_before_after(stateBefore=stateBefore, stateAfter=stateAfter, keepNoneEmptyDirectory=True)
assert success, "\n".join(errors)
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
break
# return
if error is None:
_, error = self.__save_repository_pickle_file(lockFirst=False, raiseError=False)
LD.release_lock()
LR.release_lock()
# check and return
assert error is None or not raiseError, "Unable to remove directory after %i trials '%s' (%s)"%(relativePath, ntrials, error,)
return error is None, error |
def rename_directory(self, relativePath, newName, raiseError=True, ntrials=3):
"""
Rename a directory in the repository. It insures renaming the directory in the system.
:Parameters:
#. relativePath (string): The relative to the repository path of
the directory to be renamed.
#. newName (string): The new directory name.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether renaming the directory was successful.
#. message (None, string): Some explanatory message or error reason
why directory was not renamed.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
parentPath, dirName = os.path.split(relativePath)
if relativePath == '':
error = "Renaming main repository directory is not allowed"
assert not raiseError, error
return False, error
realPath = os.path.join(self.__path,relativePath)
newRealPath = os.path.join(os.path.dirname(realPath), newName)
if os.path.isdir(newRealPath):
error = "New directory path '%s' already exist"%(newRealPath,)
assert not raiseError, error
return False, error
# get directory parent list
LD = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path,parentPath, self.__dirLock))
acquired, code = LD.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire repository lock when renaming '%s'. All prior directories were added. You may try again, to finish adding the directory"%(code,dirPath)
assert not raiseError, error
return False, error
error = None
# lock repository and get __repo updated from disk
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
if not acquired:
LD.release_lock()
m = "Code %s. Unable to aquire directory lock when renaming '%s'. All prior directories were added. You may try again, to finish adding the directory"%(code,dirPath)
assert raiseError, Exception(m)
return False,m
# load repository info
for _trial in range(ntrials):
try:
repo = self.__load_repository_pickle_file(os.path.join(self.__path, self.__repoFile))
self.__repo['walk_repo'] = repo['walk_repo']
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
if error is not None:
LD.release_lock()
LR.release_lock()
assert not raiseError, Exception(error)
return False, error
# rename directory
for _trial in range(ntrials):
error = None
try:
dirList = self.__get_repository_parent_directory(relativePath=relativePath)
assert dirList is not None, "Given relative path '%s' is not a repository directory"%(relativePath,)
# change dirName in dirList
_dirDict = [nd for nd in dirList if isinstance(nd,dict)]
_dirDict = [nd for nd in _dirDict if dirName in nd]
assert len(_dirDict) == 1, "This should not have happened. Directory not found in repository. Please report issue"
# rename directory
os.rename(realPath, newRealPath)
# update dirList
_dirDict[0][newName] = _dirDict[0][dirName]
_dirDict[0].pop(dirName)
# update and dump dirinfo
self.__save_dirinfo(description=None, dirInfoPath=parentPath, create=False)
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
if error is None:
_, error = self.__save_repository_pickle_file(lockFirst=False, raiseError=False)
LR.release_lock()
LD.release_lock()
# check and return
assert error is None or not raiseError, "Unable to rename directory '%s' to '%s' after %i trials (%s)"%(relativePath, newName, ntrials, error,)
return error is None, error |
def copy_directory(self, relativePath, newRelativePath,
overwrite=False, raiseError=True, ntrials=3):
"""
Copy a directory in the repository. New directory must not exist.
:Parameters:
#. relativePath (string): The relative to the repository path of
the directory to be copied.
#. newRelativePath (string): The new directory relative path.
#. overwrite (boolean): Whether to overwrite existing but not tracked
directory in repository.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether renaming the directory was successful.
#. message (None, string): Some explanatory message or error reason
why directory was not renamed.
"""
#from distutils.dir_util import copy_tree
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(overwrite, bool), "overwrite must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
if relativePath == '':
m = "Copying to repository main directory is not possible"
assert not raiseError, m
return False, m
realPath = os.path.join(self.__path,relativePath)
parentRealPath, dirName = os.path.split(realPath)
parentRelativePath = os.path.dirname(relativePath)
if not self.is_repository_directory(relativePath):
m = "Directory '%s' is not a tracked repository directory"%(relativePath)
assert not raiseError, m
return False, m
newRelativePath = self.to_repo_relative_path(path=newRelativePath, split=False)
newRealPath = os.path.join(self.__path,newRelativePath)
newParentRealPath, newDirName = os.path.split(newRealPath)
newParentRelativePath = os.path.dirname(newRelativePath)
if realPath == newRealPath:
m = "Copying to the same directory is not possible"
assert not raiseError, m
return False, m
if self.is_repository_directory(newRelativePath):
m = "Directory '%s' is a tracked repository directory"%(newRelativePath)
assert not raiseError, m
return False, m
if os.path.isdir(newRealPath):
if overwrite:
try:
shutil.rmtree(newRealPath)
except Exception as err:
assert not raiseError, str(err)
return False, str(err)
else:
error = "New directory path '%s' already exist on disk. Set overwrite to True"%(newRealPath,)
assert not raiseError, error
return False, error
# add directory
try:
success, reason = self.add_directory(newParentRelativePath, raiseError=False, ntrials=ntrials)
except Exception as err:
reason = "Unable to add directory (%s)"%(str(err))
success = False
if not success:
assert not raiseError, reason
return False, reason
# lock repository and get __repo updated from disk
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
if not acquired:
m = "code %s. Unable to aquire the repository lock. You may try again!"%(code,)
assert raiseError, Exception(m)
return False,m
try:
repo = self.__load_repository_pickle_file(os.path.join(self.__path, self.__repoFile))
self.__repo['walk_repo'] = repo['walk_repo']
except Exception as err:
LR.release_lock()
assert not raiseError, Exception(str(err))
return False,m
# create locks
L0 = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(parentRealPath, self.__dirLock))
acquired, code = L0.acquire_lock()
if not acquired:
LR.release_lock()
error = "Code %s. Unable to aquire the lock when adding '%s'. All prior directories were added. You may try again, to finish adding directory"%(code,dirPath)
assert not raiseError, error
return False, error
L1 = None
if parentRealPath != newParentRealPath:
L1 = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(newParentRealPath, self.__dirLock))
acquired, code = L1.acquire_lock()
if not acquired:
L0.release_lock()
LR.release_lock()
error = "Code %s. Unable to aquire the lock when adding '%s'. All prior directories were added. You may try again, to finish adding directory"%(code,dirPath)
assert not raiseError, error
return False, error
# get directory parent list
error = None
for _trial in range(ntrials):
try:
# make sure again because sometimes, when multiple processes are working on the same repo things can happen in between
assert self.is_repository_directory(relativePath), "Directory '%s' is not anymore a tracked repository directory"%(relativePath)
assert not self.is_repository_directory(newRelativePath), "Directory '%s' has become a tracked repository directory"%(relativePath)
dirList = self.__get_repository_parent_directory(relativePath=relativePath)
assert dirList is not None, "Given relative path '%s' is not a repository directory"%(relativePath,)
newDirList = self.__get_repository_parent_directory(relativePath=newRelativePath)
assert newDirList is not None, "Given new relative path '%s' parent directory is not a repository directory"%(newRelativePath,)
# change dirName in dirList
_dirDict = [nd for nd in dirList if isinstance(nd,dict)]
_dirDict = [nd for nd in _dirDict if dirName in nd]
assert len(_dirDict) == 1, "This should not have happened. Directory not found in repository. Please report issue"
_newDirDict = [nd for nd in newDirList if isinstance(nd,dict)]
_newDirDict = [nd for nd in _newDirDict if newDirName in nd]
assert len(_newDirDict) == 0, "This should not have happened. New directory is found in repository. Please report issue"
# try to copy directory
_newDirDict = copy.deepcopy(_dirDict[0])
if dirName != newDirName:
_newDirDict[newDirName] = _newDirDict.pop(dirName)
copy_tree(realPath, newRealPath)
# update newDirList
newDirList.append(_newDirDict)
# update and dump dirinfo
self.__save_dirinfo(description=None, dirInfoPath=newParentRelativePath, create=False)
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
if error is None:
_, error = self.__save_repository_pickle_file(lockFirst=False, raiseError=False)
LR.release_lock()
L0.release_lock()
if L1 is not None:
L1.release_lock()
# check and return
assert error is None or not raiseError, "Unable to copy directory '%s' to '%s' after %i trials (%s)"%(relativePath, newRelativePath, ntrials, error,)
return error is None, error |
def dump_file(self, value, relativePath,
description=None,
dump=None, pull=None,
replace=False, raiseError=True, ntrials=3):
"""
Dump a file using its value to the system and creates its
attribute in the Repository with utc timestamp.
:Parameters:
#. value (object): The value of a file to dump and add to the
repository. It is any python object or file.
#. relativePath (str): The relative to the repository path to where
to dump the file.
#. description (None, string): Any description about the file.
#. dump (None, string): The dumping method.
If None it will be set automatically to pickle and therefore the
object must be pickleable. If a string is given, it can be a
keyword ('json','pickle','dill') or a string compileable code to
dump the data. The string code must include all the necessary
imports and a '$FILE_PATH' that replaces the absolute file path
when the dumping will be performed.\n
e.g. "import numpy as np; np.savetxt(fname='$FILE_PATH', X=value, fmt='%.6e')"
#. pull (None, string): The pulling method. If None it will be set
automatically to pickle and therefore the object must be
pickleable. If a string is given, it can be a keyword
('json','pickle','dill') or a string compileable code to pull
the data. The string code must include all the necessary imports,
a '$FILE_PATH' that replaces the absolute file path when the
dumping will be performed and finally a PULLED_DATA variable.\n
e.g "import numpy as np; PULLED_DATA=np.loadtxt(fname='$FILE_PATH')"
#. replace (boolean): Whether to replace any existing file.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether renaming the directory was successful.
#. message (None, string): Some explanatory message or error reason
why directory was not dumped.
"""
# check arguments
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(replace, bool), "replace must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
if description is None:
description = ''
assert isinstance(description, basestring), "description must be None or a string"
# convert dump and pull methods to strings
if pull is None and dump is not None:
if dump.startswith('pickle') or dump.startswith('dill') or dump.startswith('numpy') or dump =='json':
pull = dump
dump = get_dump_method(dump, protocol=self._DEFAULT_PICKLE_PROTOCOL)
pull = get_pull_method(pull)
# check name and path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
savePath = os.path.join(self.__path,relativePath)
fPath, fName = os.path.split(savePath)
# check if name is allowed
success, reason = self.is_name_allowed(savePath)
if not success:
assert not raiseError, reason
return False, reason
# ensure directory added
try:
success, reason = self.add_directory(fPath, raiseError=False, ntrials=ntrials)
except Exception as err:
reason = "Unable to add directory (%s)"%(str(err))
success = False
if not success:
assert not raiseError, reason
return False, reason
# lock repository
LR = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(self.__path, self.__repoLock))
acquired, code = LR.acquire_lock()
if not acquired:
m = "code %s. Unable to aquire the repository lock. You may try again!"%(code,)
assert raiseError, Exception(m)
return False,m
# lock file
LF = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(fPath,self.__fileLock%fName))
acquired, code = LF.acquire_lock()
if not acquired:
LR.release_lock()
error = "Code %s. Unable to aquire the lock when adding '%s'"%(code,relativePath)
assert not raiseError, error
return False, error
# load repository info
for _trial in range(ntrials):
try:
repo = self.__load_repository_pickle_file(os.path.join(self.__path, self.__repoFile))
self.__repo['walk_repo'] = repo['walk_repo']
except Exception as err:
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
if error is not None:
LR.release_lock()
LF.release_lock()
assert not raiseError, Exception(error)
return False, error
# dump file
for _trial in range(ntrials):
error = None
try:
isRepoFile, fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
if isRepoFile:
assert replace, "file is a registered repository file. set replace to True to replace"
fileInfoPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileInfo%fName)
if isRepoFile and fileOnDisk:
with open(fileInfoPath, 'rb') as fd:
info = pickle.load(fd)
assert info['repository_unique_name'] == self.__repo['repository_unique_name'], "it seems that file was created by another repository"
info['last_update_utctime'] = time.time()
else:
info = {'repository_unique_name':self.__repo['repository_unique_name']}
info['create_utctime'] = info['last_update_utctime'] = time.time()
info['dump'] = dump
info['pull'] = pull
info['description'] = description
# get parent directory list if file is new and not being replaced
if not isRepoFile:
dirList = self.__get_repository_directory(fPath)
# dump file
#exec( dump.replace("$FILE_PATH", str(savePath)) )
my_exec( dump.replace("$FILE_PATH", str(savePath)), locals=locals(), globals=globals(), description='dump' )
# update info
with open(fileInfoPath, 'wb') as fd:
pickle.dump( info,fd, protocol=self._DEFAULT_PICKLE_PROTOCOL)
fd.flush()
os.fsync(fd.fileno())
# update class file
fileClassPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileClass%fName)
with open(fileClassPath, 'wb') as fd:
if value is None:
klass = None
else:
klass = value.__class__
pickle.dump(klass , fd, protocol=self._DEFAULT_PICKLE_PROTOCOL )
fd.flush()
os.fsync(fd.fileno())
# add to repo if file is new and not being replaced
if not isRepoFile:
dirList.append(fName)
except Exception as err:
error = "unable to dump the file (%s)"%(str(err),)
try:
if 'pickle.dump(' in dump:
mi = get_pickling_errors(value)
if mi is not None:
error += '\nmore info: %s'%str(mi)
except:
pass
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
error = None
break
# save repository
if error is None:
_, error = self.__save_repository_pickle_file(lockFirst=False, raiseError=False)
# release locks
LR.release_lock()
LF.release_lock()
assert not raiseError or error is None, "unable to dump file '%s' after %i trials (%s)"%(relativePath, ntrials, error,)
return success, error |
def update_file(self, value, relativePath, description=False,
dump=False, pull=False, raiseError=True, ntrials=3):
"""
Update the value of a file that is already in the Repository.\n
If file is not registered in repository, and error will be thrown.\n
If file is missing in the system, it will be regenerated as dump method
is called.
Unlike dump_file, update_file won't block the whole repository but only
the file being updated.
:Parameters:
#. value (object): The value of a file to update.
#. relativePath (str): The relative to the repository path of the
file to be updated.
#. description (False, string): Any random description about the file.
If False is given, the description info won't be updated,
otherwise it will be update to what description argument value is.
#. dump (False, string): The new dump method. If False is given,
the old one will be used.
#. pull (False, string): The new pull method. If False is given,
the old one will be used.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether renaming the directory was successful.
#. message (None, string): Some explanatory message or error reason
why directory was not updated.
"""
# check arguments
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert description is False or description is None or isinstance(description, basestring), "description must be False, None or a string"
assert dump is False or dump is None or isinstance(dump, basestring), "dump must be False, None or a string"
assert pull is False or pull is None or isinstance(pull, basestring), "pull must be False, None or a string"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# get name and path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
savePath = os.path.join(self.__path,relativePath)
fPath, fName = os.path.split(savePath)
# get locker
LF = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(fPath,self.__fileLock%fName))
acquired, code = LF.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock to update '%s'"%(code,relativePath)
assert not raiseError, error
return False, error
# update file
for _trial in range(ntrials):
message = []
updated = False
try:
# check file in repository
isRepoFile, fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
assert isRepoFile, "file '%s' is not registered in repository, no update can be performed."%(relativePath,)
# get file info
if not fileOnDisk:
assert description is not False, "file '%s' is found on disk, description must be provided"%(relativePath,)
assert dump is not False, "file '%s' is found on disk, dump must be provided"%(relativePath,)
assert pull is not False, "file '%s' is found on disk, pull must be provided"%(relativePath,)
info = {}
info['repository_unique_name'] = self.__repo['repository_unique_name']
info['create_utctime'] = info['last_update_utctime'] = time.time()
else:
with open(os.path.join(fPath,self.__fileInfo%fName), 'rb') as fd:
info = pickle.load(fd)
info['last_update_utctime'] = time.time()
if not fileOnDisk:
message.append("file %s is registered in repository but it was found on disk prior to updating"%relativePath)
if not infoOnDisk:
message.append("%s is not found on disk prior to updating"%self.__fileInfo%fName)
if not classOnDisk:
message.append("%s is not found on disk prior to updating"%self.__fileClass%fName)
# get dump and pull
if (description is False) or (dump is False) or (pull is False):
if description is False:
description = info['description']
elif description is None:
description = ''
if dump is False:
dump = info['dump']
elif dump is None:
dump = get_dump_method(dump, protocol=self._DEFAULT_PICKLE_PROTOCOL)
if pull is False:
pull = info['pull']
elif pull is None:
pull = get_pull_method(pull)
# update dump, pull and description
info['dump'] = dump
info['pull'] = pull
info['description'] = description
# dump file
my_exec( dump.replace("$FILE_PATH", str(savePath)), locals=locals(), globals=globals(), description='update' )
#exec( dump.replace("$FILE_PATH", str(savePath)) )
# remove file if exists
_path = os.path.join(fPath,self.__fileInfo%fName)
# update info
with open(_path, 'wb') as fd:
pickle.dump( info,fd, protocol=self._DEFAULT_PICKLE_PROTOCOL )
fd.flush()
os.fsync(fd.fileno())
# update class file
fileClassPath = os.path.join(self.__path,os.path.dirname(relativePath),self.__fileClass%fName)
with open(fileClassPath, 'wb') as fd:
if value is None:
klass = None
else:
klass = value.__class__
pickle.dump(klass , fd, protocol=self._DEFAULT_PICKLE_PROTOCOL )
fd.flush()
os.fsync(fd.fileno())
except Exception as err:
message.append(str(err))
updated = False
try:
if 'pickle.dump(' in dump:
mi = get_pickling_errors(value)
if mi is not None:
message.append('more info: %s'%str(mi))
except:
pass
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], '\n'.join(message)))
else:
updated = True
break
# release lock
LF.release_lock()
# check and return
assert updated or not raiseError, "Unable to update file '%s' (%s)"%(relativePath, '\n'.join(message),)
return updated, '\n'.join(message) |
def pull_file(self, relativePath, pull=None, update=True, ntrials=3):
"""
Pull a file's data from the Repository.
:Parameters:
#. relativePath (string): The relative to the repository path from
where to pull the file.
#. pull (None, string): The pulling method.
If None, the pull method saved in the file info will be used.
If a string is given, the string should include all the necessary
imports, a '$FILE_PATH' that replaces the absolute file path when
the dumping will be performed and finally a PULLED_DATA variable.
e.g "import numpy as np; PULLED_DATA=np.loadtxt(fname='$FILE_PATH')"
#. update (boolean): If pull is not None, Whether to update the pull
method stored in the file info by the given pull method.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. data (object): The pulled data from the file.
"""
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# check name and path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
realPath = os.path.join(self.__path,relativePath)
fPath, fName = os.path.split(realPath)
# check whether it's a repository file
isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
if not isRepoFile:
fileOnDisk = ["",". File itself is found on disk"][fileOnDisk]
infoOnDisk = ["",". %s is found on disk"%self.__fileInfo%fName][infoOnDisk]
classOnDisk = ["",". %s is found on disk"%self.__fileClass%fName][classOnDisk]
assert False, "File '%s' is not a repository file%s%s%s"%(relativePath,fileOnDisk,infoOnDisk,classOnDisk)
assert fileOnDisk, "File '%s' is registered in repository but the file itself was not found on disk"%(relativePath,)
if not infoOnDisk:
if pull is not None:
warnings.warn("'%s' was not found on disk but pull method is given"%(self.__fileInfo%fName))
else:
raise Exception("File '%s' is registered in repository but the '%s' was not found on disk and pull method is not specified"%(relativePath,(self.__fileInfo%fName)))
# lock repository
LF = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(fPath,self.__fileLock%fName))
acquired, code = LF.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock when adding '%s'"%(code,relativePath)
return False, error
# pull file
for _trial in range(ntrials):
error = None
try:
# get pull method
if pull is not None:
pull = get_pull_method(pull)
else:
with open(os.path.join(fPath,self.__fileInfo%fName), 'rb') as fd:
info = pickle.load(fd)
pull = info['pull']
# try to pull file
#namespace = {}
#namespace.update( globals() )
#exec( pull.replace("$FILE_PATH", str(realPath) ), namespace )
my_exec( pull.replace("$FILE_PATH", str(realPath) ), locals=locals(), globals=globals(), description='pull' )
except Exception as err:
LF.release_lock()
m = str(pull).replace("$FILE_PATH", str(realPath) )
error = "Unable to pull data using '%s' from file (%s)"%(m,err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
break
LF.release_lock()
assert error is None, "After %i trials, %s"%(ntrials, error)
# return data
return locals()['PULLED_DATA'] |
def rename_file(self, relativePath, newRelativePath,
force=False, raiseError=True, ntrials=3):
"""
Rename a file in the repository. It insures renaming the file in the system.
:Parameters:
#. relativePath (string): The relative to the repository path of
the file that needst to be renamed.
#. newRelativePath (string): The new relative to the repository path
of where to move and rename the file.
#. force (boolean): Whether to force renaming even when another
repository file exists. In this case old repository file
will be removed from the repository and the system as well.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
:Returns:
#. success (boolean): Whether renaming the file was successful.
#. message (None, string): Some explanatory message or error reason
why directory was not updated.
"""
assert isinstance(raiseError, bool), "raiseError must be boolean"
assert isinstance(force, bool), "force must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# check old name and path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
realPath = os.path.join(self.__path,relativePath)
fPath, fName = os.path.split(realPath)
# check new name and path
newRelativePath = self.to_repo_relative_path(path=newRelativePath, split=False)
newRealPath = os.path.join(self.__path,newRelativePath)
nfPath, nfName = os.path.split(newRealPath)
# lock old file
LO = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(fPath,self.__fileLock%fName))
acquired, code = LO.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock for old file '%s'"%(code,relativePath)
assert not raiseError, error
return False, error
# add directory
try:
success, reason = self.add_directory(nfPath, raiseError=False, ntrials=ntrials)
except Exception as err:
reason = "Unable to add directory (%s)"%(str(err))
success = False
if not success:
LO.release_lock()
assert not raiseError, reason
return False, reason
# create new file lock
LN = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(nfPath,self.__fileLock%nfName))
acquired, code = LN.acquire_lock()
if not acquired:
LO.release_lock()
error = "Code %s. Unable to aquire the lock for new file path '%s'"%(code,newRelativePath)
assert not raiseError, error
return False, error
# rename file
for _trial in range(ntrials):
renamed = False
error = None
try:
# check whether it's a repository file
isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
assert isRepoFile, "file '%s' is not a repository file"%(relativePath,)
assert fileOnDisk, "file '%s' is found on disk"%(relativePath,)
assert infoOnDisk, "%s is found on disk"%self.__fileInfo%fName
assert classOnDisk, "%s is found on disk"%self.__fileClass%fName
# get new file path
nisRepoFile,nfileOnDisk,ninfoOnDisk,nclassOnDisk = self.is_repository_file(newRelativePath)
assert not nisRepoFile or force, "New file path is a registered repository file, set force to True to proceed regardless"
# get parent directories list
oDirList = self.__get_repository_directory(fPath)
nDirList = self.__get_repository_directory(nfPath)
# remove new file and all repository files from disk
if os.path.isfile(newRealPath):
os.remove(newRealPath)
if os.path.isfile(os.path.join(nfPath,self.__fileInfo%nfName)):
os.remove(os.path.join(nfPath,self.__fileInfo%nfName))
if os.path.isfile(os.path.join(nfPath,self.__fileClass%nfName)):
os.remove(os.path.join(nfPath,self.__fileClass%nfName))
# move old file to new path
os.rename(realPath, newRealPath)
os.rename(os.path.join(fPath,self.__fileInfo%fName), os.path.join(nfPath,self.__fileInfo%nfName))
os.rename(os.path.join(fPath,self.__fileClass%fName), os.path.join(nfPath,self.__fileClass%nfName))
# update list
findex = oDirList.index(fName)
oDirList.pop(findex)
# update new list
if nfName not in nDirList:
nDirList.append(nfName)
except Exception as err:
renamed = False
error = str(err)
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], str(error)))
else:
renamed = True
break
# release locks
LO.release_lock()
LN.release_lock()
# always clean old file lock
try:
if os.path.isfile(os.path.join(fPath,self.__fileLock%fName)):
os.remove(os.path.join(fPath,self.__fileLock%fName))
except:
pass
# return
assert renamed or not raiseError, "Unable to rename file '%s' to '%s' after %i trials (%s)"%(relativePath, newRelativePath, ntrials, error,)
#assert renamed or not raiseError, '\n'.join(message)
return renamed, error |
def remove_file(self, relativePath, removeFromSystem=False,
raiseError=True, ntrials=3):
"""
Remove file from repository.
:Parameters:
#. relativePath (string): The relative to the repository path of the
file to remove.
#. removeFromSystem (boolean): Whether to remove file from disk as
well.
#. raiseError (boolean): Whether to raise encountered error instead
of returning failure.
#. ntrials (int): After aquiring all locks, ntrials is the maximum
number of trials allowed before failing.
In rare cases, when multiple processes
are accessing the same repository components, different processes
can alter repository components between successive lock releases
of some other process. Bigger number of trials lowers the
likelyhood of failure due to multiple processes same time
alteration.
"""
assert isinstance(raiseError, bool), "removeFromSystem must be boolean"
assert isinstance(removeFromSystem, bool), "removeFromSystem must be boolean"
assert isinstance(ntrials, int), "ntrials must be integer"
assert ntrials>0, "ntrials must be >0"
# check name and path
relativePath = self.to_repo_relative_path(path=relativePath, split=False)
realPath = os.path.join(self.__path,relativePath)
fPath, fName = os.path.split(realPath)
# lock repository
LF = Locker(filePath=None, lockPass=str(uuid.uuid1()), lockPath=os.path.join(fPath,self.__fileLock%fName))
acquired, code = LF.acquire_lock()
if not acquired:
error = "Code %s. Unable to aquire the lock when adding '%s'"%(code,relativePath)
assert not raiseError, error
return False, error
# remove file
for _trial in range(ntrials):
removed = False
message = []
try:
# check whether it's a repository file
isRepoFile,fileOnDisk, infoOnDisk, classOnDisk = self.is_repository_file(relativePath)
if not isRepoFile:
message("File '%s' is not a repository file"%(relativePath,))
if fileOnDisk:
message.append("File itself is found on disk")
if infoOnDisk:
message.append("%s is found on disk"%self.__fileInfo%fName)
if classOnDisk:
message.append("%s is found on disk"%self.__fileClass%fName)
else:
dirList = self.__get_repository_directory(fPath)
findex = dirList.index(fName)
dirList.pop(findex)
if os.path.isfile(realPath):
os.remove(realPath)
if os.path.isfile(os.path.join(fPath,self.__fileInfo%fName)):
os.remove(os.path.join(fPath,self.__fileInfo%fName))
if os.path.isfile(os.path.join(fPath,self.__fileClass%fName)):
os.remove(os.path.join(fPath,self.__fileClass%fName))
except Exception as err:
removed = False
message.append(str(err))
if self.DEBUG_PRINT_FAILED_TRIALS: print("Trial %i failed in Repository.%s (%s). Set Repository.DEBUG_PRINT_FAILED_TRIALS to False to mute"%(_trial, inspect.stack()[1][3], '\n'.join(message)))
else:
removed = True
break
# release lock
LF.release_lock()
# always clean
try:
if os.path.isfile(os.path.join(fPath,self.__fileLock%fName)):
os.remove(os.path.join(fPath,self.__fileLock%fName))
except:
pass
# check and return
assert removed or not raiseError, "Unable to remove file '%s' after %i trials (%s)"%(relativePath, ntrials, '\n'.join(message),)
return removed, '\n'.join(message) |
def rename(self, key: Any, new_key: Any):
"""
Renames an item in this collection as a transaction.
Will override if new key name already exists.
:param key: the current name of the item
:param new_key: the new name that the item should have
"""
if new_key == key:
return
required_locks = [self._key_locks[key], self._key_locks[new_key]]
ordered_required_locks = sorted(required_locks, key=lambda x: id(x))
for lock in ordered_required_locks:
lock.acquire()
try:
if key not in self._data:
raise KeyError("Attribute to rename \"%s\" does not exist" % key)
self._data[new_key] = self[key]
del self._data[key]
finally:
for lock in required_locks:
lock.release() |
def get_text_fingerprint(text, hash_meth, encoding="utf-8"): # pragma: no cover
"""
Use default hash method to return hash value of a piece of string
default setting use 'utf-8' encoding.
"""
m = hash_meth()
m.update(text.encode(encoding))
return m.hexdigest() |
def md5file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE):
"""
Return md5 hash value of a piece of a file
Estimate processing time on:
:param abspath: the absolute path to the file
:param nbytes: only has first N bytes of the file. if 0 or None,
hash all file
CPU = i7-4600U 2.10GHz - 2.70GHz, RAM = 8.00 GB
1 second can process 0.25GB data
- 0.59G - 2.43 sec
- 1.3G - 5.68 sec
- 1.9G - 7.72 sec
- 2.5G - 10.32 sec
- 3.9G - 16.0 sec
"""
return get_file_fingerprint(abspath, hashlib.md5, nbytes=nbytes, chunk_size=chunk_size) |
def sha256file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE):
"""
Return sha256 hash value of a piece of a file
Estimate processing time on:
:param abspath: the absolute path to the file
:param nbytes: only has first N bytes of the file. if 0 or None,
hash all file
"""
return get_file_fingerprint(abspath, hashlib.sha256, nbytes=nbytes, chunk_size=chunk_size) |
def sha512file(abspath, nbytes=0, chunk_size=DEFAULT_CHUNK_SIZE):
"""
Return sha512 hash value of a piece of a file
Estimate processing time on:
:param abspath: the absolute path to the file
:param nbytes: only has first N bytes of the file. if 0 or None,
hash all file
"""
return get_file_fingerprint(abspath, hashlib.sha512, nbytes=nbytes, chunk_size=chunk_size) |
def register(registerable: Any):
"""
Registers an object, notifying any listeners that may be interested in it.
:param registerable: the object to register
"""
listenable = registration_event_listenable_map[type(registerable)]
event = RegistrationEvent(registerable, RegistrationEvent.Type.REGISTERED)
listenable.notify_listeners(event) |
def unregister(registerable: Any):
"""
Unregisters an object, notifying any listeners that may be interested in it.
:param registerable: the object to unregister
"""
listenable = registration_event_listenable_map[type(registerable)]
event = RegistrationEvent(registerable, RegistrationEvent.Type.UNREGISTERED)
listenable.notify_listeners(event) |
def _load_module(path: str):
"""
Dynamically loads the python module at the given path.
:param path: the path to load the module from
"""
spec = spec_from_file_location(os.path.basename(path), path)
module = module_from_spec(spec)
spec.loader.exec_module(module) |
def is_empty(self, strict=True):
"""
- If it's a file, check if it is a empty file. (0 bytes content)
- If it's a directory, check if there's no file and dir in it.
But if ``strict = False``, then only check if there's no file in it.
:param strict: only useful when it is a directory. if True, only
return True if this dir has no dir and file. if False, return True
if it doesn't have any file.
"""
if self.exists():
if self.is_file():
return self.size == 0
elif self.is_dir():
if strict:
return len(list(self.select(recursive=True))) == 0
else: # pragma: no cover
return len(list(self.select_file(recursive=True))) == 0
else: # pragma: no cover
msg = "'%s' is not either file or directory! (maybe simlink)" % self
raise EnvironmentError(msg)
else:
raise EnvironmentError("'%s' not exists!" % self) |
def auto_complete_choices(self, case_sensitive=False):
"""
A command line auto complete similar behavior. Find all item with same
prefix of this one.
:param case_sensitive: toggle if it is case sensitive.
:return: list of :class:`pathlib_mate.pathlib2.Path`.
"""
self_basename = self.basename
self_basename_lower = self.basename.lower()
if case_sensitive: # pragma: no cover
def match(basename):
return basename.startswith(self_basename)
else:
def match(basename):
return basename.lower().startswith(self_basename_lower)
choices = list()
if self.is_dir():
choices.append(self)
for p in self.sort_by_abspath(self.select(recursive=False)):
choices.append(p)
else:
p_parent = self.parent
if p_parent.is_dir():
for p in self.sort_by_abspath(p_parent.select(recursive=False)):
if match(p.basename):
choices.append(p)
else: # pragma: no cover
raise ValueError("'%s' directory does not exist!" % p_parent)
return choices |
def print_big_dir(self, top_n=5):
"""
Print ``top_n`` big dir in this dir.
"""
self.assert_is_dir_and_exists()
size_table = sorted(
[(p, p.dirsize) for p in self.select_dir(recursive=False)],
key=lambda x: x[1],
reverse=True,
)
for p, size in size_table[:top_n]:
print("{:<9} {:<9}".format(repr_data_size(size), p.abspath)) |
def print_big_file(self, top_n=5):
"""
Print ``top_n`` big file in this dir.
"""
self.assert_is_dir_and_exists()
size_table = sorted(
[(p, p.size) for p in self.select_file(recursive=True)],
key=lambda x: x[1],
reverse=True,
)
for p, size in size_table[:top_n]:
print("{:<9} {:<9}".format(repr_data_size(size), p.abspath)) |
def print_big_dir_and_big_file(self, top_n=5):
"""Print ``top_n`` big dir and ``top_n`` big file in each dir.
"""
self.assert_is_dir_and_exists()
size_table1 = sorted(
[(p, p.dirsize) for p in self.select_dir(recursive=False)],
key=lambda x: x[1],
reverse=True,
)
for p1, size1 in size_table1[:top_n]:
print("{:<9} {:<9}".format(repr_data_size(size1), p1.abspath))
size_table2 = sorted(
[(p, p.size) for p in p1.select_file(recursive=True)],
key=lambda x: x[1],
reverse=True,
)
for p2, size2 in size_table2[:top_n]:
print(" {:<9} {:<9}".format(
repr_data_size(size2), p2.abspath)) |
def file_stat_for_all(self, filters=all_true): # pragma: no cover
"""
Find out how many files, directories and total size (Include file in
it's sub-folder) it has for each folder and sub-folder.
:returns: stat, a dict like ``{"directory path": {
"file": number of files, "dir": number of directories,
"size": total size in bytes}}``
**中文文档**
返回一个目录中的每个子目录的, 文件, 文件夹, 大小的统计数据。
"""
self.assert_is_dir_and_exists()
from collections import OrderedDict
stat = OrderedDict()
stat[self.abspath] = {"file": 0, "dir": 0, "size": 0}
for p in self.select(filters=filters, recursive=True):
if p.is_file():
size = p.size
while 1:
parent = p.parent
stat[parent.abspath]["file"] += 1
stat[parent.abspath]["size"] += size
if parent.abspath == self.abspath:
break
p = parent
elif p.is_dir():
stat[p.abspath] = {"file": 0, "dir": 0, "size": 0}
while 1:
parent = p.parent
stat[parent.abspath]["dir"] += 1
if parent.abspath == self.abspath:
break
p = parent
return stat |
def file_stat(self, filters=all_true):
"""Find out how many files, directorys and total size (Include file in
it's sub-folder).
:returns: stat, a dict like ``{"file": number of files,
"dir": number of directorys, "size": total size in bytes}``
**中文文档**
返回一个目录中的文件, 文件夹, 大小的统计数据。
"""
self.assert_is_dir_and_exists()
stat = {"file": 0, "dir": 0, "size": 0}
for p in self.select(filters=filters, recursive=True):
if p.is_file():
stat["file"] += 1
stat["size"] += p.size
elif p.is_dir():
stat["dir"] += 1
return stat |
def mirror_to(self, dst): # pragma: no cover
"""
Create a new folder having exactly same structure with this directory.
However, all files are just empty file with same file name.
:param dst: destination directory. The directory can't exists before
you execute this.
**中文文档**
创建一个目录的镜像拷贝, 与拷贝操作不同的是, 文件的副本只是在文件名上
与原件一致, 但是是空文件, 完全没有内容, 文件大小为0。
"""
self.assert_is_dir_and_exists()
src = self.abspath
dst = os.path.abspath(dst)
if os.path.exists(dst): # pragma: no cover
raise Exception("distination already exist!")
folder_to_create = list()
file_to_create = list()
for current_folder, _, file_list in os.walk(self.abspath):
current_folder = current_folder.replace(src, dst)
try:
os.mkdir(current_folder)
except: # pragma: no cover
pass
for basename in file_list:
abspath = os.path.join(current_folder, basename)
with open(abspath, "wb") as _:
pass |
def execute_pyfile(self, py_exe=None): # pragma: no cover
"""
Execute every ``.py`` file as main script.
:param py_exe: str, python command or python executable path.
**中文文档**
将目录下的所有Python文件作为主脚本用当前解释器运行。
"""
import subprocess
self.assert_is_dir_and_exists()
if py_exe is None:
if six.PY2:
py_exe = "python2"
elif six.PY3:
py_exe = "python3"
for p in self.select_by_ext(".py"):
subprocess.Popen('%s "%s"' % (py_exe, p.abspath)) |
def trail_space(self, filters=lambda p: p.ext == ".py"): # pragma: no cover
"""
Trail white space at end of each line for every ``.py`` file.
**中文文档**
将目录下的所有被选择的文件中行末的空格删除。
"""
self.assert_is_dir_and_exists()
for p in self.select_file(filters):
try:
with open(p.abspath, "rb") as f:
lines = list()
for line in f:
lines.append(line.decode("utf-8").rstrip())
with open(p.abspath, "wb") as f:
f.write("\n".join(lines).encode("utf-8"))
except Exception as e: # pragma: no cover
raise e |
def autopep8(self, **kwargs): # pragma: no cover
"""
Auto convert your python code in a directory to pep8 styled code.
:param kwargs: arguments for ``autopep8.fix_code`` method.
**中文文档**
将目录下的所有Python文件用pep8风格格式化。增加其可读性和规范性。
"""
self.assert_is_dir_and_exists()
for p in self.select_by_ext(".py"):
with open(p.abspath, "rb") as f:
code = f.read().decode("utf-8")
formatted_code = autopep8.fix_code(code, **kwargs)
with open(p.abspath, "wb") as f:
f.write(formatted_code.encode("utf-8")) |
def notify_listeners(self, data: Optional[_ListenableDataType]=_NO_DATA_MARKER):
"""
Notify event listeners, passing them the given data (if any).
:param data: the data to pass to the event listeners
"""
for listener in self._listeners:
if data is not Listenable._NO_DATA_MARKER:
listener(data)
else:
listener() |
def size(self):
"""
File size in bytes.
"""
try:
return self._stat.st_size
except: # pragma: no cover
self._stat = self.stat()
return self.size |
def mtime(self):
"""
Get most recent modify time in timestamp.
"""
try:
return self._stat.st_mtime
except: # pragma: no cover
self._stat = self.stat()
return self.mtime |
def atime(self):
"""
Get most recent access time in timestamp.
"""
try:
return self._stat.st_atime
except: # pragma: no cover
self._stat = self.stat()
return self.atime |
def ctime(self):
"""
Get most recent create time in timestamp.
"""
try:
return self._stat.st_ctime
except: # pragma: no cover
self._stat = self.stat()
return self.ctime |
def get_terminal_size(default_cols=80, default_rows=25):
"""Return current terminal size (cols, rows) or a default if detect fails.
This snippet comes from color ls by Chuck Blake:
http://pdos.csail.mit.edu/~cblake/cls/cls.py
"""
def ioctl_GWINSZ(fd):
"""Get (cols, rows) from a putative fd to a tty."""
try:
rows_cols = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
return tuple(reversed(rows_cols))
except:
return None
# Try std in/out/err...
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
# ...or ctty...
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
# ...or fall back to defaults
cr = (int(os.environ.get('COLUMNS', default_cols)),
int(os.environ.get('LINES', default_rows)))
return cr |
def get_metainfo(scriptfile,
keywords=['author', 'contact', 'copyright', 'download', 'git', 'subversion', 'version', 'website'],
special={},
first_line_pattern=r'^(?P<progname>.+)(\s+v(?P<version>\S+))?',
keyword_pattern_template=r'^\s*%(pretty)s:\s*(?P<%(keyword)s>\S.+?)\s*$',
prettify = lambda kw: kw.capitalize().replace('_', ' ')):
"""Dumb helper for pulling metainfo from a script __doc__ string.
Returns a metainfo dict with command, description, progname and the given
keywords (if present).
This function will only make minimal efforts to succeed. If you need
anything else: roll your own.
The docstring needs to be multiline and the closing quotes need to be first
on a line, optionally preceeded by whitespace.
The first non-whitespace line is re.search'ed using first_line_pattern,
default e.g (version optional, contains no whitespace): PROGNAME [vVERSION]
The next non-whitespace, non-keyword line is expected to be the program
description.
The following lines are re.search'ed against a keyword:pattern dict which
is constructed using
keyword_pattern % dict(pretty=prettify(keyword), keyword=keyword)
Default prettify is keyword.capitalize().replace('_', ' '). Example,
for the keyword "licence" will match the following line:
License: The MIT license.
and set the license metainfo to "The MIT license.".
Any keyword:pattern pairs that need special treatment can be supplied with
special.
"""
patterns = dict((kw, re.compile(keyword_pattern_template % dict(pretty=prettify(kw), keyword=kw))) for kw in keywords)
patterns.update(special)
metainfo = dict()
if scriptfile[-4:] in ['.pyc', '.pyo']:
scriptfile = scriptfile[:-1]
script = open(scriptfile)
closer = ''
for line in script:
line = line.strip()
if not line or line.startswith('#'):
continue
if line[:3] in ('"""', "'''"):
closer = line[:3]
break
raise ValueError('file contains no docstring')
if not line:
for line in script:
line = line.strip()
if line:
break
g = re.search(first_line_pattern, line[3:]).groupdict()
metainfo['progname'] = g['progname']
if g['version']:
metainfo['version'] = g['version']
for line in script:
if line.strip().startswith(closer):
break
for keyword, pattern in patterns.items():
m = pattern.search(line)
if m:
metainfo[keyword] = m.group(keyword)
break
if line.strip() and not 'description' in metainfo:
metainfo['description'] = line.strip()
return metainfo |
def unusedoptions(self, sections):
"""Lists options that have not been used to format other values in
their sections.
Good for finding out if the user has misspelled any of the options.
"""
unused = set([])
for section in _list(sections):
if not self.has_section(section):
continue
options = self.options(section)
raw_values = [self.get(section, option, raw=True) for option in options]
for option in options:
formatter = "%(" + option + ")s"
for raw_value in raw_values:
if formatter in raw_value:
break
else:
unused.add(option)
return list(unused) |
def parse(self, argv, usedname, location):
"""Consume and process arguments and store the result.
ARGS:
argv <list str>:
The argument list to parse.
usedname <str>:
The string used by the user to invoke the option.
location <str>:
A user friendly sring describing where the parser got this
data from.
"""
try:
value = self.format.parse(argv)
except formats.BadNumberOfArguments, e:
raise BadNumberOfArguments(usedname, e.required, e.supplied)
except formats.BadArgument, e:
raise BadArgument(usedname, e.argument, e.message)
if self.recurring:
self.value.append(value)
else:
self.value = value
self.location = location |
def parsestr(self, argsstr, usedname, location):
"""Parse a string lexically and store the result.
ARGS:
argsstr <str>:
The string to parse.
usedname <str>:
The string used by the user to invoke the option.
location <str>:
A user friendly sring describing where the parser got this
data from.
"""
try:
value = self.format.parsestr(argsstr)
except formats.BadNumberOfArguments, e:
raise BadNumberOfArguments(usedname, e.required, e.supplied)
except formats.BadArgument, e:
raise BadArgument(usedname, e.argument, e.message)
if self.recurring:
self.value.append(value)
else:
self.value = value
self.location = location |
def parse(self, argv):
"""Consume and process arguments and store the result.
argv is the list of arguments to parse (will be modified).
Recurring PositionalArgumants get a list as .value.
Optional PositionalArguments that do not get any arguments to parse get
None as .value, or [] if recurring.
"""
if not argv and self.optional:
self.value = [] if self.recurring else None
return
try:
value = self.format.parse(argv)
if not self.recurring:
self.value = value
return
self.value = [value]
while argv:
self.value.append(self.format.parse(argv))
except formats.BadNumberOfArguments, e:
raise BadNumberOfArguments(self.displayname, e.required, e.given)
except formats.BadArgument, e:
raise BadArgument(self.displayname, e.argument, e.details) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.