Search is not available for this dataset
text
stringlengths
75
104k
def rgb_color(self): """Return the color property as list of [R, G, B], each 0-255.""" self.update() return [self._red, self._green, self._blue]
def turn_on(self): """Turn bulb on (full brightness).""" command = "C {},,,,100,\r\n".format(self._zid) response = self._hub.send_command(command) _LOGGER.debug("Turn on %s: %s", repr(command), response) return response
def set_brightness(self, brightness): """Set brightness of bulb.""" command = "C {},,,,{},\r\n".format(self._zid, brightness) response = self._hub.send_command(command) _LOGGER.debug("Set brightness %s: %s", repr(command), response) return response
def set_all(self, red, green, blue, brightness): """Set color and brightness of bulb.""" command = "C {},{},{},{},{},\r\n".format(self._zid, red, green, blue, brightness) response = self._hub.send_command(command) _LOGGER.debug("Set all %s: %s", repr(command), response) return response
def update(self): """Update light objects to their current values.""" bulbs = self._hub.get_lights() if not bulbs: _LOGGER.debug("%s is offline, send command failed", self._zid) self._online = False
def functional(ifunctional): """ fun(fn) -> function or fun(fn, args...) -> call of fn(args...) :param ifunctional: f :return: decorated function """ @wraps(ifunctional) def wrapper(fn, *args, **kw): fn = ifunctional(fn) if args or kw: return fn(*args, **kw) else: return fn return wrapper
def tuple_arg(fn): """ fun(1,2) -> fun((1,), (2,))๋กœ f(1,2,3) => f((1,), (2,), (3,)) :param fn: :return: """ @wraps(fn) def wrapped(*args, **kwargs): args = map(tuplefy, args) return fn(*args, **kwargs) return wrapped
def tuple_args(fn): """ args ํŒŒ์‹ฑ ์œ ํ‹ธ function fun(p1, p2, ...pn, **kwargs) or fun([p1, p2, ..], **kwargs) ex) ์ƒ˜ํ”Œ:: @tuple_arg def f(args, **kwargs): for d in args: print d f(1,2,3) => f([1,2,3]) :param function fn: :return: """ @wraps(fn) def wrapped(*args, **kwargs): if len(args) == 1: if isinstance(args[0], tuple): return fn(args[0], **kwargs) elif isinstance(args[0], list): return fn(tuple(args[0]), **kwargs) return fn(args, **kwargs) return wrapped
def unpack_args(classfun, nth=0): """ args ๊ฐฏ์ˆ˜๊ฐ€ nth + 1 ๊ฐœ ์ผ๋•Œ, ๊ทธ๊ฒŒ ๋งŒ์•ฝ tuple์ด๋ฉด, unpack :param classfun: :param nth: nth = 0, ์ผ๋ฐ˜ ํ•จ์ˆ˜, nth = 1: ํด๋ž˜์Šค ํ•จ์ˆ˜ 1์ด self๋‹ˆ๊น. :return: """ if classfun: nth = 1 def deco(fn): def wrapped(*args, **kwargs): if len(args) == nth + 1 and isinstance(args[nth], (tuple, list)): args = tuple(args[:nth] + args[nth]) return fn(*args, **kwargs) else: return fn(*args, **kwargs) return wrapped return deco
def optional_str(deco): """ string 1๊ฐœ๋งŒ deco ์ธ์ž๋กœ ์˜ค๊ฑฐ๋‚˜ ์—†๊ฑฐ๋‚˜. :param deco: :return: """ @wraps(deco) def dispatcher(*args, **kwargs): # when only function arg if not kwargs and len(args) == 1 and not isinstance(args[0], str) \ and args[0] is not None: decorator = deco() return decorator(args[0]) else: # decorator with args decorator = deco(*args, **kwargs) return decorator return dispatcher
def patchmethod(*cls, **kwargs): """ ํด๋ž˜์Šค ๋ฉค๋ฒ„ ํŒจ์น˜ @patchmethod(Cls1, ..., [name='membername']) ex) class A(object): def __init__(self, data): self.data = data @patchmethod(A) def sample(self): ''' haha docstrings ''' print self.data @patchmethod(A, name='membermethod) def sample(self): ''' haha docstrings ''' print self.data a = A() a.sample() """ def _patch(fun): m = kwargs.pop('name', None) or fun.__name__ for c in cls: setattr(c, m, fun) # c.__dict__[m].__doc__ = fun.__doc__ def wrap(fun): _patch(fun) return fun return wrap
def patchproperty(*cls, **kwargs): """ class getter ํ•จ์ˆ˜ ํŒจ์น˜ decorator EX) class B(A): pass @patchproperty(B) def prop(self): return 'hello' :param cls: :param kwargs: """ def _patch(fun): m = kwargs.pop('property', None) or fun.__name__ p = property(fun) for c in cls: setattr(c, m, p) def wrap(fun): _patch(fun) return fun return wrap
def on_interrupt(handler, reraise=False): """ context for handling keyboardinterrupt ex) with on_interrupt(handler): critical_work_to_prevent() from logger import logg on_interrupt.signal = None :param function handler: :param bool reraise: :return: context """ def _handler(sig, frame): handler.signal = (sig, frame) handler._reraise = handler() handler._reraise = False handler.signal = None oldhandler = signal.getsignal(signal.SIGINT) signal.signal(signal.SIGINT, _handler) yield handler signal.signal(signal.SIGINT, oldhandler) if (reraise or handler._reraise) and handler.signal: oldhandler(*handler.signal)
def interrupt_guard(msg='', reraise=True): """ context for guard keyboardinterrupt ex) with interrupt_guard('need long time'): critical_work_to_prevent() :param str msg: message to print when interrupted :param reraise: re-raise or not when exit :return: context """ def echo(): print(msg) return on_interrupt(echo, reraise=reraise)
def is_main_alive(): """ is ๋ฉ”์ธ ์“ฐ๋ ˆ๋“œ alive? :rtype: bool """ for t in threading.enumerate(): if t.name == 'MainThread': return t.is_alive() print('MainThread not found') return False
def retrieve_document(file_path, directory='sec_filings'): ''' This function takes a file path beginning with edgar and stores the form in a directory. The default directory is sec_filings but can be changed through a keyword argument. ''' ftp = FTP('ftp.sec.gov', timeout=None) ftp.login() name = file_path.replace('/', '_') if not os.path.exists(directory): os.makedirs(directory) with tempfile.TemporaryFile() as temp: ftp.retrbinary('RETR %s' % file_path, temp.write) temp.seek(0) with open('{}/{}'.format(directory, name), 'w+') as f: f.write(temp.read().decode("utf-8")) f.closed records = temp retry = False ftp.close()
def action(self, item): """ for overriding :param item: :return: """ fun, args, kwargs = item return fun(*args, **kwargs)
def push_job(self, fun, *args, **kwargs): """ put job if possible, non-blocking :param fun: :param args: :param kwargs: :return: """ assert callable(fun) return self.put((fun, args, kwargs), block=True)
def put_job(self, fun, *args, **kwargs): """ put job if possible, non-blocking :param fun: :param args: :param kwargs: :return: """ if not args and not kwargs and isinstance(fun, (tuple, list)): # ex) q.put_job([fun, args, kwargs]) fun, args, kwargs = fun assert callable(fun) return self.put((fun, args, kwargs), block=False)
def add_flag(*args, **kwargs): """ define a single flag. add_flag(flagname, default_value, help='', **kwargs) add_flag([(flagname, default_value, help), ...]) or define flags without help message add_flag(flagname, default_value, help='', **kwargs) add_flag('gpu', 1, help='CUDA_VISIBLE_DEVICES') :param args: :param kwargs: :return: """ if len(args) == 1 and isinstance(args[0], (list, tuple)): for a in args[0]: flag.add_flag(*a) elif args: flag.add_flag(*args, **kwargs) else: for f, v in kwargs.items(): flag.add_flag(f, v)
def run(main=None, argv=None, **flags): """ :param main: main or sys.modules['__main__'].main :param argv: argument list used in argument parse :param flags: flags to define with defaults :return: """ """Runs the program with an optional 'main' function and 'argv' list.""" import sys as _sys import inspect main = main or _sys.modules['__main__'].main if main.__doc__: docstring = main.__doc__.split(':param')[0] _parser.usage = 'from docstring \n {}'.format(docstring) # add_help # if not flags: try: a = inspect.getfullargspec(main) except AttributeError: a = inspect.getargspec(main) # namedtuple(args, varargs, keywords, defaults) if a.defaults: kwargs = dict(zip(reversed(a.args), reversed(a.defaults))) add_flag(**kwargs) else: kwargs = dict() # add to command argument if a.defaults is None: nargs = len(a.args) else: nargs = len(a.args) - len(a.defaults) # if nargs > 0: posargs = a.args[:nargs] flag.add_args(posargs) add_flag(**flags) # Extract the args from the optional `argv` list. args = argv[1:] if argv else None # Parse the known flags from that list, or from the command # line otherwise. unparsed, kw = flag._parse_flags_kw(args=args) d = flag.__dict__['__flags'] args = [d[k] for k in posargs] args += unparsed kwargs.update({k: d[k] for k in kwargs.keys()}) kwargs.update(kw) # Call the main function, passing through any arguments, # with parsed flags as kwwargs # to the final program. _sys.exit(main(*args, **kwargs))
def mkdir_if_not(filepath, ispath=False): """ path ๋ถ€๋ถ„์ด ์—†์œผ๋ฉด mkdir ์„ ํ•œ๋‹ค. :param filepath: ํŒŒ์ผ ํŒจ์“ฐ :return: filpath ๊ทธ๋Œ€๋กœ ๋ฆฌํ„ด """ if not ispath: p, _ = os.path.split(filepath) else: p = filepath if not p: return filepath if not os.path.exists(p): # M.info('%s not exist, trying mkdir ', p) try: os.makedirs(p) except FileExistsError as e: logg.warn(str(e)) return filepath
def readlines(filepath): """ read lines from a textfile :param filepath: :return: list[line] """ with open(filepath, 'rt') as f: lines = f.readlines() lines = map(str.strip, lines) lines = [l for l in lines if l] return lines
def readtxt(filepath): """ read file as is""" with open(filepath, 'rt') as f: lines = f.readlines() return ''.join(lines)
def savefile(obj, filepath, compress=True): """ ํŒŒ์ผ ์žˆ์œผ๋ฉด ๋ฎ์–ด์”€ :param obj: :param str filepath: :param compress: :return: """ try: import cPickle as pickle except Exception: import pickle import joblib # ์ผ๋‹จ ์ž„์‹œ ํŒŒ์ผ์— ์ €์žฅ. tmpfile = filepath + '.tmp' mkdir_if_not(tmpfile) if compress: joblib.dump(obj, tmpfile, compress=3, cache_size=100, protocol=pickle.HIGHEST_PROTOCOL) else: joblib.dump(obj, tmpfile, compress=0) os.rename(tmpfile, filepath) return obj
def loadfile(filepath, mmap_mode=None): """ :param filepath: :param mmap_mode: {None, โ€˜r+โ€™, โ€˜rโ€™, โ€˜w+โ€™, โ€˜cโ€™} see. joblib.load :return: """ import joblib try: return joblib.load(filepath, mmap_mode=mmap_mode) except IOError: return None
def load_or_run(filepath, fun, *args, **kwargs): """ ๊ณ„์‚ฐ๋œ ๊ฒฐ๊ณผ ํŒŒ์ผ์ด ์žˆ์œผ๋ฉด ๋กœ๋”ฉํ•˜๊ณ , ์—†์œผ๋ฉด ๊ณ„์‚ฐํ›„ ์ €์žฅ ex) res = load_or_run('file_loadorsave', funlongtime, ...., force=False) :param filepath: :param fun: :param force: :return: """ force = kwargs.pop('force', False) compress = kwargs.pop('compress', True) if not filepath.startswith('/') or not filepath.startswith('~'): filepath = os.path.join('/tmp/snipy/load_or_run/', filepath) if not force and os.path.exists(filepath): # ์ €์žฅ๋˜์–ด ์žˆ๋Š” ๊ฒƒ ๋กœ๋”ฉ mmap_mode = 'r+' if not compress else None return loadfile(filepath, mmap_mode=mmap_mode) res = fun(*args, **kwargs) savefile(res, filepath, compress=compress) return res
def fnmatches(fname, patterns, matchfun): """" matches? :param fname: file name :type fname: str :param patterns: list of filename pattern. see fnmatch.fnamtch :type patterns: [str] :rtype: generator of bool """ import fnmatch matchfun = matchfun or fnmatch.fnmatch for p in patterns: yield matchfun(fname, p)
def listdir(p, match='*', exclude='', listtype='file', matchfun=None): """ list file(or folder) for this path (NOT recursive) :param p: :param match: :param exclude: :param listtype: ('file' | 'filepath' |'dir' | 'all') :param matchfun: match fun (default fnmatch.fnmatch) True/False = matchfun(name, pattern) :rtype: """ if listtype == 'file': gen = listfile(p) elif listtype == 'filepath': gen = listfilepath(p) elif listtype == 'dir': gen = listfolder(p) elif listtype == 'dirpath': gen = listfolderpath(p) else: # list file or folder gen = (entry.name for entry in scandir.scandir(p)) return filter_pattern(gen, match, exclude, matchfun)
def listfile(p): """ generator of list files in the path. filenames only """ try: for entry in scandir.scandir(p): if entry.is_file(): yield entry.name except OSError: return
def listfilepath(p): """ generator of list files in the path. filenames only """ for entry in scandir.scandir(p): if entry.is_file(): yield entry.path
def listfolder(p): """ generator of list folder in the path. folders only """ for entry in scandir.scandir(p): if entry.is_dir(): yield entry.name
def listfolderpath(p): """ generator of list folder in the path. folders only """ for entry in scandir.scandir(p): if entry.is_dir(): yield entry.path
def _pred_pattern(match='*', exclude='', patterntype='fnmatch'): """ internal use """ m, x = match, exclude if m == '*': if not x: pred = lambda n: True else: x = [x] if _is_str(x) else x matcher = get_match_fun(x, patterntype) pred = lambda n: not matcher(n) else: m = [m] if _is_str(m) else m if not x: matcher = get_match_fun(m, patterntype) pred = lambda n: matcher(n) else: x = [x] if _is_str(x) else x matcher_m = get_match_fun(m, patterntype) matcher_x = get_match_fun(x, patterntype) pred = lambda n: matcher_m(n) and not matcher_x(n) return pred
def findfolder(toppath, match='*', exclude=''): """ recursively find folder path from toppath. patterns to decide to walk folder path or not :type toppath: str :type match: str or list(str) :type exclude: str or list(str) :rtype: generator for path str """ pred = _pred_pattern(match, exclude) return (p for p in walkfolder(toppath, pred))
def walkfolder(toppath, pred): """ walk folder if pred(foldername) is True :type toppath: str :type pred: function(str) => bool """ for entry in scandir.scandir(toppath): if not entry.is_dir() or not pred(entry.name): continue yield entry.path for p in walkfolder(entry.path, pred): yield p
def tempfolder(prefix=''): """์ž„์‹œ ํด๋”๋ฅผ ๋งŒ๋“ค์–ด์„œ ๋ฆฌํ„ด""" import uuid p = prefix + str(uuid.uuid4()) d = tempdir() tmpd = os.path.join(d, p) return mkdir_if_not(tmpd, ispath=True)
def imsize(fname): """ return image size (height, width) :param fname: :return: """ from PIL import Image im = Image.open(fname) return im.size[1], im.size[0]
def intersect(self, other): """ self์™€ other ํ‚ค๊ฐ€ ๋™์ผํ•œ ์•„์ดํ…œ์˜ dictobj :type other: dict :rtype: dictobj: """ return DictObj({k: self[k] for k in self if k in other})
def from_dict(dic): """ recursive dict to dictobj ์ปจ๋ฒ„ํŠธ :param dic: :return: """ return DictObj({k: DictObj.convert_ifdic(v) for k, v in dic.items()})
def _clean_up(paths): """ Clean up after ourselves, removing created files. @param {[String]} A list of file paths specifying the files we've created during run. Will all be deleted. @return {None} """ print('Cleaning up') # Iterate over the given paths, unlinking them for path in paths: print('Removing %s' % path) os.unlink(path)
def _create_index_file( root_dir, location, image_files, dirs, force_no_processing=False): """ Create an index file in the given location, supplying known lists of present image files and subdirectories. @param {String} root_dir - The root directory of the entire crawl. Used to ascertain whether the given location is the top level. @param {String} location - The current directory of the crawl. The index file will be created here. @param {[String]} image_files - A list of image file names in the location. These will be displayed in the index file's gallery. @param {[String]} dirs - The subdirectories of the location directory. These will be displayed as links further down the file structure. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {String} The full path (location plus filename) of the newly created index file. Intended for usage cleaning up created files. """ # Put together HTML as a list of the lines we'll want to include # Issue #2 exists to do this better than HTML in-code header_text = \ 'imageMe: ' + location + ' [' + str(len(image_files)) + ' image(s)]' html = [ '<!DOCTYPE html>', '<html>', ' <head>', ' <title>imageMe</title>' ' <style>', ' html, body {margin: 0;padding: 0;}', ' .header {text-align: right;}', ' .content {', ' padding: 3em;', ' padding-left: 4em;', ' padding-right: 4em;', ' }', ' .image {max-width: 100%; border-radius: 0.3em;}', ' td {width: ' + str(100.0 / IMAGES_PER_ROW) + '%;}', ' </style>', ' </head>', ' <body>', ' <div class="content">', ' <h2 class="header">' + header_text + '</h2>' ] # Populate the present subdirectories - this includes '..' unless we're at # the top level directories = [] if root_dir != location: directories = ['..'] directories += dirs if len(directories) > 0: html.append('<hr>') # For each subdirectory, include a link to its index file for directory in directories: link = directory + '/' + INDEX_FILE_NAME html += [ ' <h3 class="header">', ' <a href="' + link + '">' + directory + '</a>', ' </h3>' ] # Populate the image gallery table # Counter to cycle down through table rows table_row_count = 1 html += ['<hr>', '<table>'] # For each image file, potentially create a new <tr> and create a new <td> for image_file in image_files: if table_row_count == 1: html.append('<tr>') img_src = _get_thumbnail_src_from_file( location, image_file, force_no_processing ) link_target = _get_image_link_target_from_file( location, image_file, force_no_processing ) html += [ ' <td>', ' <a href="' + link_target + '">', ' <img class="image" src="' + img_src + '">', ' </a>', ' </td>' ] if table_row_count == IMAGES_PER_ROW: table_row_count = 0 html.append('</tr>') table_row_count += 1 html += ['</tr>', '</table>'] html += [ ' </div>', ' </body>', '</html>' ] # Actually create the file, now we've put together the HTML content index_file_path = _get_index_file_path(location) print('Creating index file %s' % index_file_path) index_file = open(index_file_path, 'w') index_file.write('\n'.join(html)) index_file.close() # Return the path for cleaning up later return index_file_path
def _create_index_files(root_dir, force_no_processing=False): """ Crawl the root directory downwards, generating an index HTML file in each directory on the way down. @param {String} root_dir - The top level directory to crawl down from. In normal usage, this will be '.'. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {[String]} Full file paths of all created files. """ # Initialise list of created file paths to build up as we make them created_files = [] # Walk the root dir downwards, creating index files as we go for here, dirs, files in os.walk(root_dir): print('Processing %s' % here) # Sort the subdirectories by name dirs = sorted(dirs) # Get image files - all files in the directory matching IMAGE_FILE_REGEX image_files = [f for f in files if re.match(IMAGE_FILE_REGEX, f)] # Sort the image files by name image_files = sorted(image_files) # Create this directory's index file and add its name to the created # files list created_files.append( _create_index_file( root_dir, here, image_files, dirs, force_no_processing ) ) # Return the list of created files return created_files
def _get_image_from_file(dir_path, image_file): """ Get an instance of PIL.Image from the given file. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the image file as a PIL Image, or None if the functionality is not available. This could be because PIL is not present, or because it can't process the given file type. """ # Save ourselves the effort if PIL is not present, and return None now if not PIL_ENABLED: return None # Put together full path path = os.path.join(dir_path, image_file) # Try to read the image img = None try: img = Image.open(path) except IOError as exptn: print('Error loading image file %s: %s' % (path, exptn)) # Return image or None return img
def _get_image_link_target_from_file(dir_path, image_file, force_no_processing=False): """ Get the value to be used as the href for links from thumbnail images. For most image formats this will simply be the image file name itself. However, some image formats (tif) are not natively displayable by many browsers and therefore we must link to image data in another format. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The href to use. """ # If we've specified to force no processing, just return the image filename if force_no_processing: return image_file # First try to get an image img = _get_image_from_file(dir_path, image_file) # If format is directly displayable in-browser, just return the filename # Else, we need to return a full-sized chunk of displayable image data if img.format.lower() in ['tif', 'tiff']: return _get_image_src_from_file( dir_path, image_file, force_no_processing ) return image_file
def _get_image_src_from_file(dir_path, image_file, force_no_processing=False): """ Get base-64 encoded data as a string for the given image file's full image, for use directly in HTML <img> tags, or a path to the original if image scaling is not supported. This is a full-sized version of _get_thumbnail_src_from_file, for use in image formats which cannot be displayed directly in-browser, and therefore need processed versions even at full size. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If we've specified to force no processing, just return the image filename if force_no_processing: if image_file.endswith('tif') or image_file.endswith('tiff'): return UNSUPPORTED_IMAGE_TYPE_DATA return image_file # First try to get an image img = _get_image_from_file(dir_path, image_file) return _get_src_from_image(img, image_file)
def _get_src_from_image(img, fallback_image_file): """ Get base-64 encoded data as a string for the given image. Fallback to return fallback_image_file if cannot get the image data or img is None. @param {Image} img - The PIL Image to get src data for @param {String} fallback_image_file - The filename of the image file, to be used when image data capture fails @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If the image is None, then we can't process, so we should return the # path to the file itself if img is None: return fallback_image_file # Target format should be the same as the original image format, unless it's # a TIF/TIFF, which can't be displayed by most browsers; we convert these # to jpeg target_format = img.format if target_format.lower() in ['tif', 'tiff']: target_format = 'JPEG' # If we have an actual Image, great - put together the base64 image string try: bytesio = io.BytesIO() img.save(bytesio, target_format) byte_value = bytesio.getvalue() b64 = base64.b64encode(byte_value) return 'data:image/%s;base64,%s' % (target_format.lower(), b64) except IOError as exptn: print('IOError while saving image bytes: %s' % exptn) return fallback_image_file
def _get_thumbnail_image_from_file(dir_path, image_file): """ Get a PIL.Image from the given image file which has been scaled down to THUMBNAIL_WIDTH wide. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the thumbnail as a PIL Image, or None if the functionality is not available. See _get_image_from_file for details. """ # Get image img = _get_image_from_file(dir_path, image_file) # If it's not supported, exit now if img is None: return None if img.format.lower() == 'gif': return None # Get image dimensions img_width, img_height = img.size # We need to perform a resize - first, work out the scale ratio to take the # image width to THUMBNAIL_WIDTH (THUMBNAIL_WIDTH:img_width ratio) scale_ratio = THUMBNAIL_WIDTH / float(img_width) # Work out target image height based on the scale ratio target_height = int(scale_ratio * img_height) # Perform the resize try: img.thumbnail((THUMBNAIL_WIDTH, target_height), resample=RESAMPLE) except IOError as exptn: print('WARNING: IOError when thumbnailing %s/%s: %s' % ( dir_path, image_file, exptn )) return None # Return the resized image return img
def _get_thumbnail_src_from_file(dir_path, image_file, force_no_processing=False): """ Get base-64 encoded data as a string for the given image file's thumbnail, for use directly in HTML <img> tags, or a path to the original if image scaling is not supported. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @param {Boolean=False} force_no_processing - If True, do not attempt to actually process a thumbnail, PIL image or anything. Simply return the image filename as src. @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If we've specified to force no processing, just return the image filename if force_no_processing: if image_file.endswith('tif') or image_file.endswith('tiff'): return UNSUPPORTED_IMAGE_TYPE_DATA return image_file # First try to get a thumbnail image img = _get_thumbnail_image_from_file(dir_path, image_file) return _get_src_from_image(img, image_file)
def _run_server(): """ Run the image server. This is blocking. Will handle user KeyboardInterrupt and other exceptions appropriately and return control once the server is stopped. @return {None} """ # Get the port to run on port = _get_server_port() # Configure allow_reuse_address to make re-runs of the script less painful - # if this is not True then waiting for the address to be freed after the # last run can block a subsequent run SocketServer.TCPServer.allow_reuse_address = True # Create the server instance server = SocketServer.TCPServer( ('', port), SimpleHTTPServer.SimpleHTTPRequestHandler ) # Print out before actually running the server (cheeky / optimistic, however # you want to look at it) print('Your images are at http://127.0.0.1:%d/%s' % ( port, INDEX_FILE_NAME )) # Try to run the server try: # Run it - this call blocks until the server is killed server.serve_forever() except KeyboardInterrupt: # This is the expected way of the server being killed, since imageMe is # intended for ad-hoc running from command line print('User interrupted, stopping') except Exception as exptn: # Catch everything else - this will handle shutdowns via other signals # and faults actually starting the server in the first place print(exptn) print('Unhandled exception in server, stopping')
def serve_dir(dir_path): """ Generate indexes and run server from the given directory downwards. @param {String} dir_path - The directory path (absolute, or relative to CWD) @return {None} """ # Create index files, and store the list of their paths for cleanup later # This time, force no processing - this gives us a fast first-pass in terms # of page generation, but potentially slow serving for large image files print('Performing first pass index file generation') created_files = _create_index_files(dir_path, True) if (PIL_ENABLED): # If PIL is enabled, we'd like to process the HTML indexes to include # generated thumbnails - this slows down generation so we don't do it # first time around, but now we're serving it's good to do in the # background print('Performing PIL-enchanced optimised index file generation in background') background_indexer = BackgroundIndexFileGenerator(dir_path) background_indexer.run() # Run the server in the current location - this blocks until it's stopped _run_server() # Clean up the index files created earlier so we don't make a mess of # the image directories _clean_up(created_files)
def modulename(cls, depth=1): """ get caller's __name__ """ depth += cls.extra_depth frame = sys._getframe(depth) return frame.f_globals['__name__']
def deco_optional(decorator): """ optional argument ๋ฅผ ํฌํ•จํ•˜๋Š” decorator๋ฅผ ๋งŒ๋“œ๋Š” decorator """ @functools.wraps(decorator) def dispatcher(*args, **kwargs): one_arg = len(args) == 1 and not kwargs if one_arg and inspect.isfunction(args[0]): decor_obj = decorator() return decor_obj(args[0]) else: return decorator(*args, **kwargs) return dispatcher
def optional(deco): """ decorator option์€ kwargs๋งŒ ํ—ˆ์šฉ :param deco: :return: """ @functools.wraps(deco) def dispatcher(*args, **kwargs): decorator = deco(**kwargs) if args: assert len(args) == 1 return decorator(args[0]) else: return decorator return dispatcher
def bindargs(fun, *argsbind, **kwbind): """ _ = bind.placeholder # unbound placeholder (arg) f = bind(fun, _, _, arg3, kw=kw1, kw2=kw2), f(arg1, arg2) :param fun: :param argsbind: :param kwbind: :return: """ assert argsbind argsb = list(argsbind) iargs = [i for i in range(len(argsbind)) if argsbind[i] is bind.placeholder] # iargs = [a is bind.placeholder for a in argsbind] @functools.wraps(fun) def wrapped(*args, **kwargs): kws = kwbind.copy() args_this = [a for a in argsb] for i, arg in zip(iargs, args): args_this[i] = arg args_this.extend(args[len(iargs):]) # kwargs.update(kwbind) kws.update(kwargs) # return fun(*argsb, **kws) return fun(*args_this, **kws) return wrapped
def bindkw(fun, **kwbind): """ kwarg ๋ฐ”์ธ๋”ฉ๋œ ํ•จ์ˆ˜ return. ex) def fun(opt1, opt2): print opt1, opt2 f = bind(fun, opt1=2, opt2=3) f() :param function fun: :param kwbind: :return: function """ @functools.wraps(fun) def wrapped(*args, **kwargs): kws = kwbind.copy() kws.update(kwargs) return fun(*args, **kws) return wrapped
def default(fun, **kwdefault): """ change default value for function ex) def sample(a, b=1, c=1): print 'from sample:', a, b, c return a, b, c fun = default(sample, b=4,c=5) print fun.default # get default value dictionary fun(1) # print 1, 5, 5 and return :param fun: :param kwdefault: :return: """ @functools.wraps(fun) def wrapped(*args, **kwargs): merge = wrapped.default.copy() merge.update(kwargs) return fun(*args, **merge) wrapped.default = kwdefault return wrapped
def setup_once(initfn): """ call class instance method for initial setup :: class B(object): def init(self, a): print 'init call:', a @setup_once(init) def mycall(self, a): print 'real call:', a b = B() b.mycall(222) b.mycall(333) :param function initfn: :return: decorated method """ def wrap(method): finit = initfn.__name__ fnname = method.__name__ @functools.wraps(method) def wrapped(self, *args, **kwargs): @functools.wraps(method) def aftersetup(*a, **kw): return method(self, *a, **kw) setupfn = getattr(self, finit) setupfn(*args, **kwargs) res = method(self, *args, **kwargs) setattr(self, fnname, aftersetup) return res return wrapped return wrap
def static(**kwargs): """ USE carefully ^^ """ def wrap(fn): fn.func_globals['static'] = fn fn.__dict__.update(kwargs) return fn return wrap
def rand_crop(sz, *imagez): """ random crop # assume imagez has same size (H, W) # assume sz is less or equal than size of image :param sz: cropped image sz :param imagez: imagez :return: rand cropped image pairs or function bound to sz """ def _rand_crop(*imgz): imsz = imgz[0].shape[:2] assert imsz[0] >= sz[0] and imsz[1] >= sz[1] si = np.random.randint(imsz[0] - sz[0]) if imsz[0] > sz[0] else 0 sj = np.random.randint(imsz[1] - sz[1]) if imsz[1] > sz[1] else 0 slicei = slice(si, si+sz[0]) slicej = slice(sj, sj+sz[1]) outs = tuple(img[slicei, slicej] for img in imgz) return tuple_or_not(*outs) return _rand_crop(*imagez) if imagez else _rand_crop
def rand_rotate(anglerange, *imagez): """ :param anglerange: :param imagez: :return: """ r = float(anglerange[1] - anglerange[0]) s = anglerange[0] def _rand_rotate(*imgz): angle = np.random.random(1)[0] * r + s out = tuple(rotate(img, angle) for img in imgz) return tuple_or_not(out) return _rand_rotate(*imagez) if imagez else _rand_rotate
def blend_discrete(images, depthmask, depth=None): """ depthmask : shape of [batch, h, w] """ imshape = images.shape depth = depth or images.shape[3] blend = np.empty(shape=(imshape[0], imshape[1], imshape[2])) for d in range(depth): imask = (depthmask == d) channel = images[..., d] blend[imask] = channel[imask] return np.expand_dims(blend, axis=-1)
def rand_blend_mask(shape, rand=rand.uniform(-10, 10), **kwargs): """ random blending masks """ # batch, channel = shape[0], shape[3] z = rand(shape[0]) # seed noise = snoise2dz((shape[1], shape[2]), z, **kwargs) return noise
def snoise2dvec(size, *params, **kwargs): #, vlacunarity): """ vector parameters :param size: :param vz: :param vscale: :param voctave: :param vpersistence: :param vlacunarity: :return: """ data = (snoise2d(size, *p, **kwargs) for p in zip(*params)) # , vlacunarity)) return np.stack(data, 0)
def snoise2d(size, z=0.0, scale=0.05, octaves=1, persistence=0.25, lacunarity=2.0): """ z value as like a seed """ import noise data = np.empty(size, dtype='float32') for y in range(size[0]): for x in range(size[1]): v = noise.snoise3(x * scale, y * scale, z, octaves=octaves, persistence=persistence, lacunarity=lacunarity) data[x, y] = v data = data * 0.5 + 0.5 if __debug__: assert data.min() >= 0. and data.max() <= 1.0 return data
def snoise2dz(size, z, scale=0.05, octaves=1, persistence=0.25, lacunarity=2.0): """ z as seeds scale์ด ์ž‘์„ ์ˆ˜๋ก ํŒจํ„ด์ด ์ปค์ง€๋Š” ํšจ๊ณผ """ import noise z_l = len(z) data = np.empty((z_l, size[0], size[1]), dtype='float32') for iz in range(z_l): zvalue = z[iz] for y in range(size[0]): for x in range(size[1]): v = noise.snoise3(x * scale, y * scale, zvalue, octaves=octaves, persistence=persistence, lacunarity=lacunarity) data[iz, y, x] = v data = data * 0.5 + 0.5 if __debug__: assert data.min() >= 0. and data.max() <= 1.0 return data
def rand_brightness(imagez, scale=1.0, randfun=rand.normal(0., .1), clamp=(0., 1.)): """ :param images: :param scale: scale for random value :param randfun: any randfun binding except shape :param clamp: clamping range :return: """ l, h = clamp r = randfun((imagez[0].shape[0], 1, 1, 1)) * scale def apply(im): im += r im[im < l] = l im[im > h] = h return im return tuple(map(apply, imagez))
def elastic_transform(im, alpha=0.5, sigma=0.2, affine_sigma=1.): """ Based on https://gist.github.com/erniejunior/601cdf56d2b424757de5 elastic deformation of images as described in [Simard2003] """ # fixme : not implemented for multi channel ! import cv2 islist = isinstance(im, (tuple, list)) ima = im[0] if islist else im # image shape shape = ima.shape shape_size = shape[:2] # Random affine transform center_square = np.float32(shape_size) // 2 square_size = min(shape_size) // 3 pts1 = np.float32([center_square + square_size, [center_square[0] + square_size, center_square[1] - square_size], center_square - square_size]) pts2 = pts1 + np.random.uniform(-affine_sigma, affine_sigma, size=pts1.shape).astype(np.float32) M = cv2.getAffineTransform(pts1, pts2) if islist: res = [] for i, ima in enumerate(im): if i == 0: res.append(cv2.warpAffine(ima, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101)) else: res.append(cv2.warpAffine(ima, M, shape_size[::-1])) im = res else: ima = cv2.warpAffine(ima, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101) # ima = cv2.warpAffine(ima, M, shape_size[::-1]) # fast gaussian filter blur_size = int(4 * sigma) | 1 dx = cv2.GaussianBlur((np.random.rand(*shape) * 2 - 1), ksize=(blur_size, blur_size), sigmaX=sigma) * alpha dy = cv2.GaussianBlur((np.random.rand(*shape) * 2 - 1), ksize=(blur_size, blur_size), sigmaX=sigma) * alpha # remap x, y = np.meshgrid(np.arange(shape[1]), np.arange(shape[0])) map_x, map_y = (y + dy).astype('float32'), (x + dx).astype('float32') def remap(data): r = cv2.remap(data, map_y, map_x, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_REFLECT_101) return r[..., np.newaxis] if islist: return tuple([remap(ima) for ima in im]) else: return remap(ima)
def rotate_crop(centerij, sz, angle, img=None, mode='constant', **kwargs): """ rotate and crop if no img, then return crop function :param centerij: :param sz: :param angle: :param img: [h,w,d] :param mode: padding option :return: cropped image or function """ # crop enough size ( 2 * sqrt(sum(sz^2) ) # rotate from skimage import transform sz = np.array(sz) crop_half = int(np.ceil(np.sqrt(np.square(sz).sum()))) if centerij[0] >= crop_half or centerij[1] >= crop_half: raise NotImplementedError slicei = slice(centerij[0] - crop_half, centerij[0] + crop_half) slicej = slice(centerij[1] - crop_half, centerij[1] + crop_half) # slicei = (centerij[0] - crop_half, centerij[0] + crop_half) # slicej = (centerij[1] - crop_half, centerij[1] + crop_half) # def _pad_if_need(im): # imshape = im.shape # pad_need = slicei[0] < 0 or slicej[0] < 0 or slice # padwidth = [(slicei[0], np.maximum(0, slicei[1] - imshape[0])), # (slicej[0], np.maximum(0, slicej[1] - imshape[1]))] def _rotate_cropcenter(im): enoughcrop = im[slicei, slicej] rotated = transform.rotate(enoughcrop, angle, resize=False, preserve_range=True, mode=mode, **kwargs) return cropcenter(sz, rotated) if img is not None: return _rotate_cropcenter(img) return _rotate_cropcenter
def crop(img, center, sz, mode='constant'): """ crop sz from ij as center :param img: :param center: ij :param sz: :param mode: :return: """ center = np.array(center) sz = np.array(sz) istart = (center - sz / 2.).astype('int32') iend = istart + sz imsz = img.shape[:2] if np.any(istart < 0) or np.any(iend > imsz): # padding padwidth = [(np.minimum(0, istart[0]), np.maximum(0, iend[0]-imsz[0])), (np.minimum(0, istart[1]), np.maximum(0, iend[1]-imsz[1]))] padwidth += [(0, 0)] * (len(img.shape) - 2) img = np.pad(img, padwidth, mode=mode) istart = (np.maximum(0, istart[0]), np.maximum(0, istart[1])) return img[istart[0]:istart[0]+sz[0], istart[1]:istart[1]+sz[1]] return img[istart[0]:iend[0], istart[1]:iend[1]]
def cropcenter(sz, img=None): """ if no img, then return crop function :param sz: :param img: :return: """ l = len(sz) sz = np.array(sz) def wrapped(im): imsz = np.array(im.shape) s = (imsz[:l] - sz) / 2 # start index to = s + sz # end index # img[s[0]:to[0], ... s[end]:to[end], ...] slices = [slice(s, e) for s, e in zip(s, to)] return im[slices] if img is not None: return wrapped(img) return wrapped
def pad_if_need(sz_atleast, img, mode='constant'): # fixme : function or .... """ pad img if need to guarantee minumum size :param sz_atleast: [H,W] at least :param img: image np.array [H,W, ...] :param mode: str, padding mode :return: padded image or asis if enought size """ # sz_atleast = np.asarray(sz_atleast) imsz = img.shape[:2] # assume img [H,W, ...] padneed = np.asarray((sz_atleast[0] - imsz[0], sz_atleast[1] - imsz[1])) if np.any(padneed > 0): # need padding padding = np.zeros((img.ndim, 2), dtype='int16') padneed = np.maximum(padneed, 0) padding[:2, 0] = padneed/2 padding[:2, 1] = padneed - padneed/2 img = np.pad(img, padding, mode=mode) return img
def canny(img, threshold1=255/3, threshold2=255, **kwargs): """ canny edge """ import cv2 # edges=None, apertureSize=None, L2gradient=None if img.ndim <= 3: edge = cv2.Canny(img, threshold1, threshold2, **kwargs) if edge.ndim == 2: edge = np.expand_dims(edge, 2) elif img.ndim == 4: # batch edge = np.asarray([cv2.Canny(i, threshold1, threshold2, **kwargs) for i in img]) if edge.ndim == 3: edge = np.expand_dims(edge, 3) else: raise ValueError('above 5d?') return edge
def guess_package_path(searchfrom): """ package path. return None if failed to guess """ from snipy.io import fileutil current = searchfrom + '/' init_found = False pack_found = False while not init_found and current != '/': current = os.path.dirname(current) initfile = os.path.join(current, '__init__.py') init_found = os.path.exists(initfile) if not init_found: # search for breadth searchfrom = dirname(searchfrom) for folder in fileutil.listfolder(searchfrom): current = os.path.join(searchfrom, folder) initfile = os.path.join(current, '__init__.py') init_found = os.path.exists(initfile) if init_found: break while init_found: current = os.path.dirname(current) initfile = os.path.join(current, '__init__.py') init_found = os.path.exists(initfile) pack_found = not init_found return current if pack_found else None
def find_package_path(searchfrom): """ package path. return None if failed to guess """ current = searchfrom + '/' init_found = False pack_found = False while not init_found and current != '/': current = os.path.dirname(current) initfile = os.path.join(current, '__init__.py') init_found = os.path.exists(initfile) while init_found: current = os.path.dirname(current) initfile = os.path.join(current, '__init__.py') init_found = os.path.exists(initfile) pack_found = not init_found return current if pack_found else None
def append_this_package_path(depth=1): """ this_package.py ์—์„œ ์‚ฌ์šฉ import snipy.this_package """ from .caller import caller logg.debug('caller module %s', caller.modulename(depth + 1)) c = caller.abspath(depth + 1) logg.debug('caller path %s', c) p = guess_package_path(dirname(c)) if p: logg.debug('appending sys path %s', p) append_sys_path(p) else: # do some logging logg.debug('failed to guess package path for: %s', c)
def flows(args): """ todo : add some example :param args: :return: """ def flow_if_not(fun): # t = type(fun) if isinstance(fun, iterator): return fun elif isinstance(fun, type) and 'itertools' in str(fun.__class__): return fun else: try: return flow(fun) except AttributeError: # generator object has no attribute '__module__' return fun return FlowList(map(flow_if_not, args))
def forever(it): """ forever todo : add example """ while True: # generator ๋‘๋ฒˆ์จฐ iteration ๋ฌดํ•œ ๋ฃจํ”„ ๋ฐฉ์ง€ i = iter(it) try: yield i.next() except StopIteration: raise StopIteration while True: try: yield i.next() except StopIteration: break
def ibatch(size, iterable=None, rest=False): """ add example :param size: :param iterable: :param rest: :return: """ @iterflow def exact_size(it): it = iter(it) while True: yield [it.next() for _ in xrange(size)] @iterflow def at_most(it): it = iter(it) while True: data = [] for _ in xrange(size): try: data.append(it.next()) except StopIteration: if data: yield data raise StopIteration yield data ibatchit = at_most if rest else exact_size return ibatchit if iterable is None else ibatchit(iterable)
def batchzip(size, iterable=None, rest=False): """ todo : add example :param size: :param iterable: :param rest: :return: """ fn = ibatch(size, rest=rest) >> zipflow return fn if iterable is None else fn(iterable)
def batchstack(size, iterable=None, rest=False): """ todo : add example :param size: :param iterable: :param rest: :return: """ def stack(data): import numpy as np return map(np.vstack, data) fn = batchzip(size, rest=rest) >> flow(stack) return fn if iterable is None else fn(iterable)
def shuffle(qsize=1024, iterable=None): """ add example :param qsize: :param iterable: :return: """ @iterflow def shuffleit(it): from random import randrange q = [] for i, d in enumerate(it): q.insert(randrange(0, len(q) + 1), d) if i < qsize: continue yield q.pop(randrange(0, len(q))) while q: yield q.pop(randrange(0, len(q))) return shuffleit if iterable is None else shuffleit(iterable)
def to_permutation_matrix(matches): """Converts a permutation into a permutation matrix. `matches` is a dictionary whose keys are vertices and whose values are partners. For each vertex ``u`` and ``v``, entry (``u``, ``v``) in the returned matrix will be a ``1`` if and only if ``matches[u] == v``. Pre-condition: `matches` must be a permutation on an initial subset of the natural numbers. Returns a permutation matrix as a square NumPy array. """ n = len(matches) P = np.zeros((n, n)) # This is a cleverer way of doing # # for (u, v) in matches.items(): # P[u, v] = 1 # P[list(zip(*(matches.items())))] = 1 return P
def four_blocks(topleft, topright, bottomleft, bottomright): """Convenience function that creates a block matrix with the specified blocks. Each argument must be a NumPy matrix. The two top matrices must have the same number of rows, as must the two bottom matrices. The two left matrices must have the same number of columns, as must the two right matrices. """ return vstack(hstack(topleft, topright), hstack(bottomleft, bottomright))
def to_bipartite_matrix(A): """Returns the adjacency matrix of a bipartite graph whose biadjacency matrix is `A`. `A` must be a NumPy array. If `A` has **m** rows and **n** columns, then the returned matrix has **m + n** rows and columns. """ m, n = A.shape return four_blocks(zeros(m, m), A, A.T, zeros(n, n))
def to_pattern_matrix(D): """Returns the Boolean matrix in the same shape as `D` with ones exactly where there are nonzero entries in `D`. `D` must be a NumPy array. """ result = np.zeros_like(D) # This is a cleverer way of doing # # for (u, v) in zip(*(D.nonzero())): # result[u, v] = 1 # result[D.nonzero()] = 1 return result
def birkhoff_von_neumann_decomposition(D): """Returns the Birkhoff--von Neumann decomposition of the doubly stochastic matrix `D`. The input `D` must be a square NumPy array representing a doubly stochastic matrix (that is, a matrix whose entries are nonnegative reals and whose row sums and column sums are all 1). Each doubly stochastic matrix is a convex combination of at most ``n ** 2`` permutation matrices, where ``n`` is the dimension of the input array. The returned value is a list of pairs whose length is at most ``n ** 2``. In each pair, the first element is a real number in the interval **(0, 1]** and the second element is a NumPy array representing a permutation matrix. This represents the doubly stochastic matrix as a convex combination of the permutation matrices. The input matrix may also be a scalar multiple of a doubly stochastic matrix, in which case the row sums and column sums must each be *c*, for some positive real number *c*. This may be useful in avoiding precision issues: given a doubly stochastic matrix that will have many entries close to one, multiply it by a large positive integer. The returned permutation matrices will be the same regardless of whether the given matrix is a doubly stochastic matrix or a scalar multiple of a doubly stochastic matrix, but in the latter case, the coefficients will all be scaled by the appropriate scalar multiple, and their sum will be that scalar instead of one. For example:: >>> import numpy as np >>> from birkhoff import birkhoff_von_neumann_decomposition as decomp >>> D = np.ones((2, 2)) >>> zipped_pairs = decomp(D) >>> coefficients, permutations = zip(*zipped_pairs) >>> coefficients (1.0, 1.0) >>> permutations[0] array([[ 1., 0.], [ 0., 1.]]) >>> permutations[1] array([[ 0., 1.], [ 1., 0.]]) >>> zipped_pairs = decomp(D / 2) # halve each value in the matrix >>> coefficients, permutations = zip(*zipped_pairs) >>> coefficients # will be half as large as before (0.5, 0.5) >>> permutations[0] # will be the same as before array([[ 1., 0.], [ 0., 1.]]) >>> permutations[1] array([[ 0., 1.], [ 1., 0.]]) The returned list of pairs is given in the order computed by the algorithm (so in particular they are not sorted in any way). """ m, n = D.shape if m != n: raise ValueError('Input matrix must be square ({} x {})'.format(m, n)) indices = list(itertools.product(range(m), range(n))) # These two lists will store the result as we build it up each iteration. coefficients = [] permutations = [] # Create a copy of D so that we don't modify it directly. Cast the # entries of the matrix to floating point numbers, regardless of # whether they were integers. S = D.astype('float') while not np.all(S == 0): # Create an undirected graph whose adjacency matrix contains a 1 # exactly where the matrix S has a nonzero entry. W = to_pattern_matrix(S) # Construct the bipartite graph whose left and right vertices both # represent the vertex set of the pattern graph (whose adjacency matrix # is ``W``). X = to_bipartite_matrix(W) # Convert the matrix of a bipartite graph into a NetworkX graph object. G = from_numpy_matrix(X) # Compute a perfect matching for this graph. The dictionary `M` has one # entry for each matched vertex (in both the left and the right vertex # sets), and the corresponding value is its partner. # # The bipartite maximum matching algorithm requires specifying # the left set of nodes in the bipartite graph. By construction, # the left set of nodes is {0, ..., n - 1} and the right set is # {n, ..., 2n - 1}; see `to_bipartite_matrix()`. left_nodes = range(n) M = maximum_matching(G, left_nodes) # However, since we have both a left vertex set and a right vertex set, # each representing the original vertex set of the pattern graph # (``W``), we need to convert any vertex greater than ``n`` to its # original vertex number. To do this, # # - ignore any keys greater than ``n``, since they are already # covered by earlier key/value pairs, # - ensure that all values are less than ``n``. # M = {u: v % n for u, v in M.items() if u < n} # Convert that perfect matching to a permutation matrix. P = to_permutation_matrix(M) # Get the smallest entry of S corresponding to the 1 entries in the # permutation matrix. q = min(S[i, j] for (i, j) in indices if P[i, j] == 1) # Store the coefficient and the permutation matrix for later. coefficients.append(q) permutations.append(P) # Subtract P scaled by q. After this subtraction, S has a zero entry # where the value q used to live. S -= q * P # PRECISION ISSUE: There seems to be a problem with floating point # precision here, so we need to round down to 0 any entry that is very # small. S[np.abs(S) < TOLERANCE] = 0.0 return list(zip(coefficients, permutations))
def bump_version(version, which=None): """Returns the result of incrementing `version`. If `which` is not specified, the "patch" part of the version number will be incremented. If `which` is specified, it must be ``'major'``, ``'minor'``, or ``'patch'``. If it is one of these three strings, the corresponding part of the version number will be incremented instead of the patch number. Returns a string representing the next version number. Example:: >>> bump_version('2.7.1') '2.7.2' >>> bump_version('2.7.1', 'minor') '2.8.0' >>> bump_version('2.7.1', 'major') '3.0.0' """ try: parts = [int(n) for n in version.split('.')] except ValueError: fail('Current version is not numeric') if len(parts) != 3: fail('Current version is not semantic versioning') # Determine where to increment the version number PARTS = {'major': 0, 'minor': 1, 'patch': 2} index = PARTS[which] if which in PARTS else 2 # Increment the version number at that index and set the subsequent parts # to 0. before, middle, after = parts[:index], parts[index], parts[index + 1:] middle += 1 return '.'.join(str(n) for n in before + [middle] + after)
def get_version(filename, pattern): """Gets the current version from the specified file. This function assumes the file includes a string of the form:: <pattern> = <version> """ with open(filename) as f: match = re.search(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, f.read()) if match: before, version, after = match.groups() return version fail('Could not find {} in {}'.format(pattern, filename))
def fail(message=None, exit_status=None): """Prints the specified message and exits the program with the specified exit status. """ print('Error:', message, file=sys.stderr) sys.exit(exit_status or 1)
def git_tag(tag): """Tags the current version.""" print('Tagging "{}"'.format(tag)) msg = '"Released version {}"'.format(tag) Popen(['git', 'tag', '-s', '-m', msg, tag]).wait()
def initialize(self, templates_path, global_data): """initialize with templates' path parameters templates_path str the position of templates directory global_data dict globa data can be got in any templates""" self.env = Environment(loader=FileSystemLoader(templates_path)) self.env.trim_blocks = True self.global_data = global_data
def render(self, template, **data): """Render data with template, return html unicodes. parameters template str the template's filename data dict the data to render """ # make a copy and update the copy dct = self.global_data.copy() dct.update(data) try: html = self.env.get_template(template).render(**dct) except TemplateNotFound: raise JinjaTemplateNotFound return html
def render_to(self, path, template, **data): """Render data with template and then write to path""" html = self.render(template, **data) with open(path, 'w') as f: f.write(html.encode(charset))
def render(template, **data): """shortcut to render data with `template`. Just add exception catch to `renderer.render`""" try: return renderer.render(template, **data) except JinjaTemplateNotFound as e: logger.error(e.__doc__ + ', Template: %r' % template) sys.exit(e.exit_code)
def replace_relative_url_to_absolute(self, content): """Replace '../' leaded url with absolute uri. """ p = os.path.join(os.getcwd(), './src', '../') return content.replace('../', p)
def get_dataframe(self): """ Get the DataFrame for this view. Defaults to using `self.dataframe`. This method should always be used rather than accessing `self.dataframe` directly, as `self.dataframe` gets evaluated only once, and those results are cached for all subsequent requests. You may want to override this if you need to provide different dataframes depending on the incoming request. """ assert self.dataframe is not None, ( "'%s' should either include a `dataframe` attribute, " "or override the `get_dataframe()` method." % self.__class__.__name__ ) dataframe = self.dataframe return dataframe
def index_row(self, dataframe): """ Indexes the row based on the request parameters. """ return dataframe.loc[self.kwargs[self.lookup_url_kwarg]].to_frame().T
def get_object(self): """ Returns the row the view is displaying. You may want to override this if you need to provide non-standard queryset lookups. Eg if objects are referenced using multiple keyword arguments in the url conf. """ dataframe = self.filter_dataframe(self.get_dataframe()) assert self.lookup_url_kwarg in self.kwargs, ( 'Expected view %s to be called with a URL keyword argument ' 'named "%s". Fix your URL conf, or set the `.lookup_field` ' 'attribute on the view correctly.' % (self.__class__.__name__, self.lookup_url_kwarg) ) try: obj = self.index_row(dataframe) except (IndexError, KeyError, ValueError): raise Http404 # May raise a permission denied self.check_object_permissions(self.request, obj) return obj
def paginator(self): """ The paginator instance associated with the view, or `None`. """ if not hasattr(self, '_paginator'): if self.pagination_class is None: self._paginator = None else: self._paginator = self.pagination_class() return self._paginator