signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
---|---|---|---|
async def listener(self, channel): | while True:<EOL><INDENT>message = await self.channel_layer.receive(channel)<EOL>if not message.get("<STR_LIT:type>", None):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>scope = {"<STR_LIT:type>": "<STR_LIT>", "<STR_LIT>": channel}<EOL>instance_queue = self.get_or_create_application_instance(channel, scope)<EOL>await instance_queue.put(message)<EOL><DEDENT> | Single-channel listener | f1781:c0:m3 |
def parse_image_json(text): | image_details = json.loads(text)<EOL>if image_details.get('<STR_LIT>') is not None:<EOL><INDENT>try:<EOL><INDENT>image_details = image_details.get('<STR_LIT>')[<NUM_LIT:0>]<EOL><DEDENT>except IndexError:<EOL><INDENT>image_details = None<EOL><DEDENT><DEDENT>return image_details<EOL> | parses response output of AWS describe commands and returns the first (and only) item in array
:param text: describe output
:return: image json | f1786:m0 |
def __init__(self, config_save_dir, aws_profile, bucket, regions, ami_name, upload_file): | self.aws_project = aws_profile<EOL>self.aws_regions = regions<EOL>self.config_save_dir = config_save_dir<EOL>self.bucket_name = bucket<EOL>self.ami_name = ami_name<EOL>self.upload_file = upload_file<EOL> | Instantiate with common properties for all VM imports to AWS
:param config_save_dir: where to save aws config files
:param aws_project: which aws_project to upload to
:param profile: which aws credential profile to use
:param region: which aws region to impot AMI into | f1786:c0:m0 |
def validate(self): | self.validate_regions()<EOL>self.validate_bucket()<EOL>self.validate_ec2_action()<EOL> | Call instance validation methods
:return: | f1786:c0:m1 |
def validate_regions(self): | for region in self.aws_regions:<EOL><INDENT>if region not in aws_regions:<EOL><INDENT>print("<STR_LIT>".format(region))<EOL>print("<STR_LIT>".format(aws_regions))<EOL><DEDENT><DEDENT> | Validate the user specified regions are valid
:return: | f1786:c0:m2 |
def validate_ec2_action(self): | import_cmd = '<STR_LIT>'.format(self.aws_project, self.aws_regions[<NUM_LIT:0>])<EOL>print("<STR_LIT>".format(import_cmd))<EOL>try:<EOL><INDENT>subprocess.check_output(shlex.split(import_cmd), stderr=subprocess.STDOUT)<EOL><DEDENT>except subprocess.CalledProcessError as e:<EOL><INDENT>if "<STR_LIT>" in e.output:<EOL><INDENT>print("<STR_LIT>")<EOL>return<EOL><DEDENT>print("<STR_LIT>".format(e.output))<EOL>print("<STR_LIT>")<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT> | Attempt to validate that the provided user has permissions to import an AMI
:return: | f1786:c0:m3 |
def validate_bucket(self): | s3_check_cmd = "<STR_LIT>".format(self.bucket_name, self.aws_project,<EOL>self.aws_regions[<NUM_LIT:0>])<EOL>print("<STR_LIT>")<EOL>try:<EOL><INDENT>subprocess.check_output(shlex.split(s3_check_cmd))<EOL><DEDENT>except subprocess.CalledProcessError as e:<EOL><INDENT>print("<STR_LIT>".format(e))<EOL>print("<STR_LIT>".format(self.bucket_name))<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT> | Do a quick check to see if the s3 bucket is valid
:return: | f1786:c0:m4 |
def get_image_id_by_name(self, ami_name, region='<STR_LIT>'): | image_details = None<EOL>detail_query_attempts = <NUM_LIT:0><EOL>while image_details is None:<EOL><INDENT>describe_cmd = "<STR_LIT>".format(ami_name, self.aws_project, region)<EOL>res = subprocess.check_output(shlex.split(describe_cmd))<EOL>print("<STR_LIT>".format(res))<EOL>image_details = parse_image_json(res)<EOL>if not image_details:<EOL><INDENT>if detail_query_attempts > <NUM_LIT:5>:<EOL><INDENT>print("<STR_LIT>")<EOL>raise Exception("<STR_LIT>")<EOL><DEDENT>time.sleep(<NUM_LIT:10>)<EOL>print("<STR_LIT>")<EOL>detail_query_attempts += <NUM_LIT:1><EOL><DEDENT><DEDENT>image_id = image_details['<STR_LIT>']<EOL>print("<STR_LIT>".format(image_id))<EOL>return image_id<EOL> | Locate an AMI image id by name in a particular region
:param ami_name: ami name you need the id for
:param region: the region the image exists in
:return: id of the image | f1786:c0:m5 |
def copy_ami_to_new_name(self, ami_id, new_name, source_region='<STR_LIT>'): | new_image_ids = []<EOL>for region in self.aws_regions:<EOL><INDENT>copy_img_cmd = "<STR_LIT>".format(ami_id, self.aws_project, source_region, region, new_name)<EOL>res = subprocess.check_output(shlex.split(copy_img_cmd))<EOL>print("<STR_LIT>".format(res))<EOL>new_image_id = json.loads(res).get('<STR_LIT>')<EOL>new_image_ids.append((new_image_id, region))<EOL>print("<STR_LIT>".format(new_image_id))<EOL><DEDENT>print("<STR_LIT>".format(new_image_ids))<EOL>for tupp in new_image_ids:<EOL><INDENT>image_id = tupp[<NUM_LIT:0>]<EOL>image_region = tupp[<NUM_LIT:1>]<EOL>self.wait_for_copy_available(image_id, image_region)<EOL><DEDENT> | Copies an AMI from the default region and name to the desired name and region
:param ami_id: ami id to copy
:param new_name: name of the new ami to create
:param source_region: the source region of the ami to copy | f1786:c0:m6 |
def deregister_image(self, ami_id, region='<STR_LIT>'): | deregister_cmd = "<STR_LIT>".format(self.aws_project, region, ami_id)<EOL>print("<STR_LIT>")<EOL>print("<STR_LIT>".format(deregister_cmd))<EOL>res = subprocess.check_output(shlex.split(deregister_cmd))<EOL>print("<STR_LIT>".format(res))<EOL>print("<STR_LIT>")<EOL> | Deregister an AMI by id
:param ami_id:
:param region: region to deregister from
:return: | f1786:c0:m7 |
def wait_for_copy_available(self, image_id, region): | waiting = True<EOL>describe_image_cmd = "<STR_LIT>".format(self.aws_project, region, image_id)<EOL>while waiting:<EOL><INDENT>res = subprocess.check_output(shlex.split(describe_image_cmd))<EOL>print("<STR_LIT>".format(res))<EOL>image_json = parse_image_json(res)<EOL>image_state = image_json['<STR_LIT>']<EOL>if image_state == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>")<EOL>return<EOL><DEDENT>elif image_state == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>")<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>".format(image_state))<EOL>print("<STR_LIT>")<EOL>time.sleep(<NUM_LIT:30>)<EOL><DEDENT><DEDENT> | Wait for the newly copied ami to become available
:param image_id: image id to monitor
:param region: region to monitor copy | f1786:c0:m8 |
def rename_image(self, ami_name, new_ami_name, source_region='<STR_LIT>'): | print("<STR_LIT>")<EOL>image_id = self.get_image_id_by_name(ami_name, source_region)<EOL>self.copy_ami_to_new_name(image_id, new_ami_name, source_region)<EOL>self.deregister_image(image_id, source_region)<EOL> | Method which renames an ami by copying to a new ami with a new name (only way this is possible in AWS)
:param ami_name:
:param new_ami_name:
:return: | f1786:c0:m9 |
def create_config_file(self, vmdk_location, description): | description = description<EOL>format = "<STR_LIT>"<EOL>user_bucket = {<EOL>"<STR_LIT>": self.bucket_name,<EOL>"<STR_LIT>": vmdk_location<EOL>}<EOL>parent_obj = {'<STR_LIT>': description, '<STR_LIT>': format, '<STR_LIT>': user_bucket}<EOL>obj_list = [parent_obj]<EOL>temp_fd, temp_file = tempfile.mkstemp()<EOL>print('<STR_LIT>'.format(vmdk_location, temp_file))<EOL>with os.fdopen(temp_fd, '<STR_LIT:w>') as f:<EOL><INDENT>json.dump(obj_list, f)<EOL><DEDENT>return temp_fd, temp_file<EOL> | Create the aws import config file
:param vmdk_location: location of downloaded VMDK
:param description: description to use for config_file creation
:return: config file descriptor, config file full path | f1786:c0:m10 |
def run_ec2_import(self, config_file_location, description, region='<STR_LIT>'): | import_cmd = "<STR_LIT>""<STR_LIT>".format(description, self.aws_project, region, config_file_location)<EOL>try:<EOL><INDENT>res = subprocess.check_output(shlex.split(import_cmd), stderr=subprocess.STDOUT)<EOL><DEDENT>except subprocess.CalledProcessError as e:<EOL><INDENT>print("<STR_LIT>")<EOL>print("<STR_LIT>".format(e.output))<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT>print("<STR_LIT>".format(res))<EOL>res_json = json.loads(res)<EOL>task_running, import_id = self.check_task_status_and_id(res_json)<EOL>return import_id<EOL> | Runs the command to import an uploaded vmdk to aws ec2
:param config_file_location: config file of import param location
:param description: description to attach to the import task
:return: the import task id for the given ami | f1786:c0:m11 |
def upload_to_s3(self, region='<STR_LIT>'): | s3_import_cmd = "<STR_LIT>".format(self.upload_file, self.bucket_name,<EOL>self.aws_project, region)<EOL>print("<STR_LIT>".format(self.bucket_name, s3_import_cmd))<EOL>s3_upload = subprocess.Popen(shlex.split(s3_import_cmd), stderr=subprocess.PIPE)<EOL>while True:<EOL><INDENT>progress = s3_upload.stderr.readline()<EOL>if progress == '<STR_LIT>' and s3_upload.poll() is not None:<EOL><INDENT>break<EOL><DEDENT>if progress:<EOL><INDENT>print (progress)<EOL><DEDENT><DEDENT>rc = s3_upload.poll()<EOL>if rc != <NUM_LIT:0>:<EOL><INDENT>raise subprocess.CalledProcessError(rc)<EOL><DEDENT>print("<STR_LIT>")<EOL> | Uploads the vmdk file to aws s3
:param file_location: location of vmdk
:return: | f1786:c0:m12 |
def wait_for_import_to_complete(self, import_id, region='<STR_LIT>'): | task_running = True<EOL>while task_running:<EOL><INDENT>import_status_cmd = "<STR_LIT>".format(self.aws_project, region, import_id)<EOL>res = subprocess.check_output(shlex.split(import_status_cmd))<EOL>print("<STR_LIT>".format(res))<EOL>res_json = json.loads(res)<EOL>task_running, image_id = self.check_task_status_and_id(res_json)<EOL><DEDENT> | Monitors the status of aws import, waiting for it to complete, or error out
:param import_id: id of import task to monitor | f1786:c0:m13 |
@staticmethod<EOL><INDENT>def check_task_status_and_id(task_json):<DEDENT> | if task_json.get('<STR_LIT>') is not None:<EOL><INDENT>task = task_json['<STR_LIT>'][<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>task = task_json<EOL><DEDENT>current_status = task['<STR_LIT>']<EOL>image_id = task['<STR_LIT>']<EOL>if current_status == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>".format(image_id))<EOL>return False, image_id<EOL><DEDENT>elif current_status == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>")<EOL>return False, None<EOL><DEDENT>elif current_status == '<STR_LIT>':<EOL><INDENT>print("<STR_LIT>")<EOL>return False, None<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>".format(image_id, current_status))<EOL>print("<STR_LIT>")<EOL>time.sleep(<NUM_LIT:30>)<EOL>return True, image_id<EOL><DEDENT> | Read status of import json and parse
:param task_json: status json to parse
:return: (stillRunning, imageId) | f1786:c0:m14 |
def import_vmdk(self): | <EOL>first_upload_region = self.aws_regions[<NUM_LIT:0>]<EOL>print("<STR_LIT>".format(first_upload_region))<EOL>self.upload_to_s3(region=first_upload_region)<EOL>description = "<STR_LIT>".format(os.path.basename(self.upload_file))<EOL>temp_fd, file_location = self.create_config_file(os.path.basename(self.upload_file), description)<EOL>import_id = self.run_ec2_import(file_location, description, first_upload_region)<EOL>self.wait_for_import_to_complete(import_id)<EOL>self.rename_image(import_id, self.ami_name, source_region=first_upload_region)<EOL>return import_id<EOL> | All actions necessary to import vmdk (calls s3 upload, and import to aws ec2)
:param vmdk_location: location of vmdk to import. Can be provided as a string, or the result output of fabric
execution
:return: | f1786:c0:m15 |
def parse_args(): | parser = argparse.ArgumentParser(description="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, nargs='<STR_LIT:+>', required=True,<EOL>help='<STR_LIT>'<EOL>'<STR_LIT>'.format(AWSUtilities.aws_regions))<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, required=True, help='<STR_LIT>')<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, required=True,<EOL>help='<STR_LIT>')<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, required=True,<EOL>help="<STR_LIT>")<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, required=False, help='<STR_LIT>'<EOL>'<STR_LIT>')<EOL>parser.add_argument('<STR_LIT>', '<STR_LIT>', type=str, default=tempfile.mkdtemp(),<EOL>help='<STR_LIT>')<EOL>args = parser.parse_args()<EOL>if not args.ami_name:<EOL><INDENT>args.ami_name = os.path.basename(args.vmdk_upload_file)<EOL><DEDENT>validate_args(args)<EOL>return args<EOL> | Argument parser and validator | f1787:m0 |
def validate_args(args): | <EOL>if not os.path.isdir(args.directory):<EOL><INDENT>print("<STR_LIT>".format(args.directory))<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT>try:<EOL><INDENT>args.vmdk_upload_file = args.vmdk_upload_file<EOL><DEDENT>except AttributeError:<EOL><INDENT>args.vmdk_upload_file = None<EOL><DEDENT>if args.vmdk_upload_file and not os.path.isfile(args.vmdk_upload_file):<EOL><INDENT>print("<STR_LIT>".format(args.vmdk_upload_file))<EOL>sys.exit(<NUM_LIT:5>)<EOL><DEDENT>aws_importer = AWSUtilities.AWSUtils(args.directory, args.aws_profile, args.s3_bucket,<EOL>args.aws_regions, args.ami_name, args.vmdk_upload_file)<EOL>aws_importer.validate()<EOL> | Perform necessary validation checks
:param args:
:return: | f1787:m1 |
def vmdk_to_ami(args): | aws_importer = AWSUtilities.AWSUtils(args.directory, args.aws_profile, args.s3_bucket,<EOL>args.aws_regions, args.ami_name, args.vmdk_upload_file)<EOL>aws_importer.import_vmdk()<EOL> | Calls methods to perform vmdk import
:param args:
:return: | f1787:m2 |
def resolve(self): | return os.path.expandvars(os.path.expanduser(self.fqpn))<EOL> | Resolve pathname shell variables and ~userdir | f1789:c0:m11 |
@property<EOL><INDENT>def parts(self):<DEDENT> | return os.path.split(self.fqpn)<EOL> | Return list of path parts | f1789:c0:m15 |
@property<EOL><INDENT>def parent(self):<DEDENT> | return Path(os.path.join(*self.parts[:-<NUM_LIT:1>]))<EOL> | Return Path to parent directory. | f1789:c0:m16 |
def moveVisibleCol(sheet, fromVisColIdx, toVisColIdx): | toVisColIdx = min(max(toVisColIdx, <NUM_LIT:0>), sheet.nVisibleCols)<EOL>fromColIdx = sheet.columns.index(sheet.visibleCols[fromVisColIdx])<EOL>toColIdx = sheet.columns.index(sheet.visibleCols[toVisColIdx])<EOL>moveListItem(sheet.columns, fromColIdx, toColIdx)<EOL>return toVisColIdx<EOL> | Move visible column to another visible index in sheet. | f1792:m0 |
def expand_cols_deep(sheet, cols, row, depth=<NUM_LIT:0>): | ret = []<EOL>for col in cols:<EOL><INDENT>newcols = _addExpandedColumns(col, row, sheet.columns.index(col))<EOL>if depth != <NUM_LIT:1>: <EOL><INDENT>ret.extend(expand_cols_deep(sheet, newcols, row, depth-<NUM_LIT:1>))<EOL><DEDENT><DEDENT>return ret<EOL> | expand all visible columns of containers to the given depth (0=fully) | f1793:m0 |
def load_pyobj(name, pyobj): | if isinstance(pyobj, list) or isinstance(pyobj, tuple):<EOL><INDENT>if getattr(pyobj, '<STR_LIT>', None): <EOL><INDENT>return SheetNamedTuple(name, pyobj)<EOL><DEDENT>else:<EOL><INDENT>return SheetList(name, pyobj)<EOL><DEDENT><DEDENT>elif isinstance(pyobj, dict):<EOL><INDENT>return SheetDict(name, pyobj)<EOL><DEDENT>elif isinstance(pyobj, object):<EOL><INDENT>return SheetObject(name, pyobj)<EOL><DEDENT>else:<EOL><INDENT>error("<STR_LIT>" % type(pyobj).__name__)<EOL><DEDENT> | Return Sheet object of appropriate type for given sources in `args`. | f1793:m6 |
def open_pyobj(path): | return load_pyobj(path.name, eval(path.read_text()))<EOL> | Provide wrapper for `load_pyobj`. | f1793:m7 |
def getPublicAttrs(obj): | return [k for k in dir(obj) if not k.startswith('<STR_LIT:_>') and not callable(getattr(obj, k))]<EOL> | Return all public attributes (not methods or `_`-prefixed) on object. | f1793:m8 |
def PyobjColumns(obj): | return [ColumnAttr(k, type(getattr(obj, k))) for k in getPublicAttrs(obj)]<EOL> | Return columns for each public attribute on an object. | f1793:m9 |
def AttrColumns(attrnames): | return [ColumnAttr(name) for name in attrnames]<EOL> | Return column names for all elements of list `attrnames`. | f1793:m10 |
def DictKeyColumns(d): | return [ColumnItem(k, k, type=deduceType(d[k])) for k in d.keys()]<EOL> | Return a list of Column objects from dictionary keys. | f1793:m11 |
def SheetList(name, src, **kwargs): | if not src:<EOL><INDENT>status('<STR_LIT>' + name)<EOL>return<EOL><DEDENT>if isinstance(src[<NUM_LIT:0>], dict):<EOL><INDENT>return ListOfDictSheet(name, source=src, **kwargs)<EOL><DEDENT>elif isinstance(src[<NUM_LIT:0>], tuple):<EOL><INDENT>if getattr(src[<NUM_LIT:0>], '<STR_LIT>', None): <EOL><INDENT>return ListOfNamedTupleSheet(name, source=src, **kwargs)<EOL><DEDENT><DEDENT>return ListOfPyobjSheet(name, source=src, **kwargs)<EOL> | Creates a Sheet from a list of homogenous dicts or namedtuples. | f1793:m12 |
def _defaggr(name, type, func): | func.type=type<EOL>func.__name__ = name<EOL>return func<EOL> | Define aggregator `name` that calls func(col, rows) | f1794:m0 |
def aggregator(name, func, *args, type=None): | def _func(col, rows): <EOL><INDENT>vals = list(col.getValues(rows))<EOL>try:<EOL><INDENT>return func(vals, *args)<EOL><DEDENT>except Exception as e:<EOL><INDENT>if len(vals) == <NUM_LIT:0>:<EOL><INDENT>return None<EOL><DEDENT>return e<EOL><DEDENT><DEDENT>aggregators[name] = _defaggr(name, type, _func)<EOL> | Define simple aggregator `name` that calls func(values) | f1794:m1 |
def _percentile(N, percent, key=lambda x:x): | if not N:<EOL><INDENT>return None<EOL><DEDENT>k = (len(N)-<NUM_LIT:1>) * percent<EOL>f = math.floor(k)<EOL>c = math.ceil(k)<EOL>if f == c:<EOL><INDENT>return key(N[int(k)])<EOL><DEDENT>d0 = key(N[int(f)]) * (c-k)<EOL>d1 = key(N[int(c)]) * (k-f)<EOL>return d0+d1<EOL> | Find the percentile of a list of values.
@parameter N - is a list of values. Note N MUST BE already sorted.
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of N.
@return - the percentile of the values | f1794:m4 |
def addAggregators(cols, aggrnames): | for aggrname in aggrnames:<EOL><INDENT>aggrs = aggregators.get(aggrname)<EOL>aggrs = aggrs if isinstance(aggrs, list) else [aggrs]<EOL>for aggr in aggrs:<EOL><INDENT>for c in cols:<EOL><INDENT>if not hasattr(c, '<STR_LIT>'):<EOL><INDENT>c.aggregators = []<EOL><DEDENT>if aggr and aggr not in c.aggregators:<EOL><INDENT>c.aggregators += [aggr]<EOL><DEDENT><DEDENT><DEDENT><DEDENT> | add aggregator for each aggrname to each of cols | f1794:m7 |
@asyncthread<EOL>def fillNullValues(col, rows): | lastval = None<EOL>nullfunc = isNullFunc()<EOL>n = <NUM_LIT:0><EOL>rowsToFill = list(rows)<EOL>for r in Progress(col.sheet.rows, '<STR_LIT>'): <EOL><INDENT>try:<EOL><INDENT>val = col.getValue(r)<EOL><DEDENT>except Exception as e:<EOL><INDENT>val = e<EOL><DEDENT>if nullfunc(val) and r in rowsToFill:<EOL><INDENT>if lastval:<EOL><INDENT>col.setValue(r, lastval)<EOL>n += <NUM_LIT:1><EOL><DEDENT><DEDENT>else:<EOL><INDENT>lastval = val<EOL><DEDENT><DEDENT>col.recalc()<EOL>status("<STR_LIT>" % n)<EOL> | Fill null cells in col with the previous non-null value | f1801:m1 |
def saveSheets(fn, *vsheets, confirm_overwrite=False): | givenpath = Path(fn)<EOL>filetype = '<STR_LIT>'<EOL>basename, ext = os.path.splitext(fn)<EOL>if ext:<EOL><INDENT>filetype = ext[<NUM_LIT:1>:]<EOL><DEDENT>filetype = filetype or options.save_filetype<EOL>if len(vsheets) > <NUM_LIT:1>:<EOL><INDENT>if not fn.endswith('<STR_LIT:/>'): <EOL><INDENT>savefunc = getGlobals().get('<STR_LIT>' + filetype, None)<EOL>if savefunc:<EOL><INDENT>return savefunc(givenpath, *vsheets)<EOL><DEDENT><DEDENT>if not givenpath.exists():<EOL><INDENT>try:<EOL><INDENT>os.makedirs(givenpath.resolve(), exist_ok=True)<EOL><DEDENT>except FileExistsError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>assert givenpath.is_dir(), filetype + '<STR_LIT>'<EOL>savefunc = getGlobals().get('<STR_LIT>' + filetype) or fail('<STR_LIT>'+filetype)<EOL>if givenpath.exists():<EOL><INDENT>if confirm_overwrite:<EOL><INDENT>confirm('<STR_LIT>' % fn)<EOL><DEDENT><DEDENT>status('<STR_LIT>' % (len(vsheets), givenpath.fqpn))<EOL>for vs in vsheets:<EOL><INDENT>p = Path(os.path.join(givenpath.fqpn, vs.name+'<STR_LIT:.>'+filetype))<EOL>savefunc(p, vs)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>savefunc = getGlobals().get('<STR_LIT>' + filetype) or fail('<STR_LIT>'+filetype)<EOL>if givenpath.exists():<EOL><INDENT>if confirm_overwrite:<EOL><INDENT>confirm('<STR_LIT>' % fn)<EOL><DEDENT><DEDENT>status('<STR_LIT>' % (givenpath.fqpn, filetype))<EOL>savefunc(givenpath, vsheets[<NUM_LIT:0>])<EOL><DEDENT> | Save sheet `vs` with given filename `fn`. | f1801:m8 |
def openSource(p, filetype=None): | if not filetype:<EOL><INDENT>filetype = options.filetype<EOL><DEDENT>if isinstance(p, str):<EOL><INDENT>if '<STR_LIT>' in p:<EOL><INDENT>return openSource(UrlPath(p), filetype) <EOL><DEDENT>elif p == '<STR_LIT:->':<EOL><INDENT>return openSource(PathFd('<STR_LIT:->', vd().stdin), filetype)<EOL><DEDENT>else:<EOL><INDENT>return openSource(Path(p), filetype) <EOL><DEDENT><DEDENT>elif isinstance(p, UrlPath):<EOL><INDENT>openfunc = '<STR_LIT>' + p.scheme<EOL>return getGlobals()[openfunc](p, filetype=filetype)<EOL><DEDENT>elif isinstance(p, Path):<EOL><INDENT>if not filetype:<EOL><INDENT>filetype = p.suffix or '<STR_LIT>'<EOL><DEDENT>if os.path.isdir(p.resolve()):<EOL><INDENT>filetype = '<STR_LIT>'<EOL><DEDENT>openfunc = '<STR_LIT>' + filetype.lower()<EOL>if openfunc not in getGlobals():<EOL><INDENT>warning('<STR_LIT>' % openfunc)<EOL>filetype = '<STR_LIT>'<EOL>openfunc = '<STR_LIT>'<EOL><DEDENT>vs = getGlobals()[openfunc](p)<EOL><DEDENT>else: <EOL><INDENT>status('<STR_LIT>' % type(p))<EOL>vs = None<EOL><DEDENT>if vs:<EOL><INDENT>status('<STR_LIT>' % (p.name, filetype))<EOL><DEDENT>return vs<EOL> | calls open_ext(Path) or openurl_scheme(UrlPath, filetype) | f1801:m9 |
def open_vd(p): | vs = open_tsv(p)<EOL>vs.reload()<EOL>return vs<EOL> | Opens a .vd file as a .tsv file | f1801:m10 |
def open_txt(p): | with p.open_text() as fp:<EOL><INDENT>if options.delimiter in next(fp): <EOL><INDENT>return open_tsv(p) <EOL><DEDENT>return TextSheet(p.name, p)<EOL><DEDENT> | Create sheet from `.txt` file at Path `p`, checking whether it is TSV. | f1801:m11 |
def loadInternalSheet(klass, p, **kwargs): | vs = klass(p.name, source=p, **kwargs)<EOL>options._set('<STR_LIT>', '<STR_LIT:utf8>', vs)<EOL>if p.exists():<EOL><INDENT>vd.sheets.insert(<NUM_LIT:0>, vs)<EOL>vs.reload.__wrapped__(vs)<EOL>vd.sheets.pop(<NUM_LIT:0>)<EOL><DEDENT>return vs<EOL> | Load internal sheet of given klass. Internal sheets are always tsv. | f1801:m13 |
def anytype(r=None): | return r<EOL> | minimalist "any" passthrough type | f1802:m6 |
def error(s): | status(s, priority=<NUM_LIT:3>)<EOL>raise ExpectedException(s)<EOL> | Log an error and raise an exception. | f1802:m13 |
def chooseOne(choices): | ret = chooseMany(choices)<EOL>assert len(ret) == <NUM_LIT:1>, '<STR_LIT>'<EOL>return ret[<NUM_LIT:0>]<EOL> | Return one of `choices` elements (if list) or values (if dict). | f1802:m25 |
def chooseMany(choices): | if isinstance(choices, dict):<EOL><INDENT>choosed = input('<STR_LIT:/>'.join(choices.keys()) + '<STR_LIT>', completer=CompleteKey(choices)).split()<EOL>return [choices[c] for c in choosed]<EOL><DEDENT>else:<EOL><INDENT>return input('<STR_LIT:/>'.join(str(x) for x in choices) + '<STR_LIT>', completer=CompleteKey(choices)).split()<EOL><DEDENT> | Return list of `choices` elements (if list) or values (if dict). | f1802:m26 |
def regex_flags(): | return sum(getattr(re, f.upper()) for f in options.regex_flags)<EOL> | Return flags to pass to regex functions from options | f1802:m27 |
def asyncthread(func): | @functools.wraps(func)<EOL>def _execAsync(*args, **kwargs):<EOL><INDENT>return vd().execAsync(func, *args, **kwargs)<EOL><DEDENT>return _execAsync<EOL> | Function decorator, to make calls to `func()` spawn a separate thread if available. | f1802:m29 |
def wrmap(func, iterable, *args): | for it in iterable:<EOL><INDENT>try:<EOL><INDENT>yield func(it, *args)<EOL><DEDENT>except Exception as e:<EOL><INDENT>pass<EOL><DEDENT><DEDENT> | Same as map(func, iterable, *args), but ignoring exceptions. | f1802:m35 |
def wrapply(func, *args, **kwargs): | val = args[<NUM_LIT:0>]<EOL>if val is None:<EOL><INDENT>return TypedWrapper(func, None)<EOL><DEDENT>elif isinstance(val, TypedExceptionWrapper):<EOL><INDENT>tew = copy(val)<EOL>tew.forwarded = True<EOL>return tew<EOL><DEDENT>elif isinstance(val, TypedWrapper):<EOL><INDENT>return val<EOL><DEDENT>elif isinstance(val, Exception):<EOL><INDENT>return TypedWrapper(func, *args)<EOL><DEDENT>try:<EOL><INDENT>return func(*args, **kwargs)<EOL><DEDENT>except Exception as e:<EOL><INDENT>e.stacktrace = stacktrace()<EOL>return TypedExceptionWrapper(func, *args, exception=e)<EOL><DEDENT> | Like apply(), but which wraps Exceptions and passes through Wrappers (if first arg) | f1802:m36 |
def getattrdeep(obj, attr, *default): | attrs = attr.split('<STR_LIT:.>')<EOL>if default:<EOL><INDENT>getattr_default = lambda o,a,d=default[<NUM_LIT:0>]: getattr(o, a, d)<EOL><DEDENT>else:<EOL><INDENT>getattr_default = lambda o,a: getattr(o, a)<EOL><DEDENT>for a in attrs[:-<NUM_LIT:1>]:<EOL><INDENT>obj = getattr_default(obj, a)<EOL><DEDENT>return getattr_default(obj, attrs[-<NUM_LIT:1>])<EOL> | Return dotted attr (like "a.b.c") from obj, or default if any of the components are missing. | f1802:m38 |
def setattrdeep(obj, attr, val): | attrs = attr.split('<STR_LIT:.>')<EOL>for a in attrs[:-<NUM_LIT:1>]:<EOL><INDENT>obj = getattr(obj, a)<EOL><DEDENT>setattr(obj, attrs[-<NUM_LIT:1>], val)<EOL> | Set dotted attr (like "a.b.c") on obj to val. | f1802:m39 |
def ColumnAttr(name='<STR_LIT>', attr=None, **kwargs): | return Column(name,<EOL>expr=attr if attr is not None else name,<EOL>getter=lambda col,row: getattrdeep(row, col.expr),<EOL>setter=lambda col,row,val: setattrdeep(row, col.expr, val),<EOL>**kwargs)<EOL> | Column using getattr/setattr of given attr. | f1802:m40 |
def ColumnItem(name='<STR_LIT>', key=None, **kwargs): | return Column(name,<EOL>expr=key if key is not None else name,<EOL>getter=lambda col,row: getitemdef(row, col.expr),<EOL>setter=lambda col,row,val: setitem(row, col.expr, val),<EOL>**kwargs)<EOL> | Column using getitem/setitem of given key. | f1802:m42 |
def ArrayNamedColumns(columns): | return [ColumnItem(colname, i) for i, colname in enumerate(columns)]<EOL> | Return list of ColumnItems from given list of column names. | f1802:m43 |
def ArrayColumns(ncols): | return [ColumnItem('<STR_LIT>', i, width=<NUM_LIT:8>) for i in range(ncols)]<EOL> | Return list of ColumnItems for given row length. | f1802:m44 |
@functools.lru_cache(maxsize=<NUM_LIT>)<EOL>def clipstr(s, dispw): | w = <NUM_LIT:0><EOL>ret = '<STR_LIT>'<EOL>ambig_width = options.disp_ambig_width<EOL>for c in s:<EOL><INDENT>if c != '<STR_LIT:U+0020>' and unicodedata.category(c) in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>'): <EOL><INDENT>c = options.disp_oddspace<EOL><DEDENT>if c:<EOL><INDENT>c = c[<NUM_LIT:0>] <EOL>ret += c<EOL>eaw = unicodedata.east_asian_width(c)<EOL>if eaw == '<STR_LIT:A>': <EOL><INDENT>w += ambig_width<EOL><DEDENT>elif eaw in '<STR_LIT>': <EOL><INDENT>w += <NUM_LIT:2><EOL><DEDENT>elif not unicodedata.combining(c):<EOL><INDENT>w += <NUM_LIT:1><EOL><DEDENT><DEDENT>if w > dispw-len(options.disp_truncator)+<NUM_LIT:1>:<EOL><INDENT>ret = ret[:-<NUM_LIT:2>] + options.disp_truncator <EOL>w += len(options.disp_truncator)<EOL>break<EOL><DEDENT><DEDENT>return ret, w<EOL> | Return clipped string and width in terminal display characters.
Note: width may differ from len(s) if East Asian chars are 'fullwidth'. | f1802:m46 |
def clipdraw(scr, y, x, s, attr, w=None, rtl=False): | if not scr:<EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT>_, windowWidth = scr.getmaxyx()<EOL>dispw = <NUM_LIT:0><EOL>try:<EOL><INDENT>if w is None:<EOL><INDENT>w = windowWidth-<NUM_LIT:1><EOL><DEDENT>w = min(w, (x-<NUM_LIT:1>) if rtl else (windowWidth-x-<NUM_LIT:1>))<EOL>if w <= <NUM_LIT:0>: <EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT>clipped, dispw = clipstr(str(s), w)<EOL>if rtl:<EOL><INDENT>scr.addstr(y, x-dispw-<NUM_LIT:1>, disp_column_fill*dispw, attr)<EOL> | Draw string `s` at (y,x)-(y,x+w), clipping with ellipsis char. if rtl, draw inside (x-w, x). Returns width drawn (max of w). | f1802:m47 |
def editText(scr, y, x, w, i=<NUM_LIT:0>, attr=curses.A_NORMAL, value='<STR_LIT>', fillchar='<STR_LIT:U+0020>', truncchar='<STR_LIT:->', unprintablechar='<STR_LIT:.>', completer=lambda text,idx: None, history=[], display=True): | ESC='<STR_LIT>'<EOL>ENTER='<STR_LIT>'<EOL>TAB='<STR_LIT>'<EOL>def until_get_wch():<EOL><INDENT>'<STR_LIT>'<EOL>ret = None<EOL>while not ret:<EOL><INDENT>try:<EOL><INDENT>ret = scr.get_wch()<EOL><DEDENT>except curses.error:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return ret<EOL><DEDENT>def splice(v, i, s):<EOL><INDENT>'<STR_LIT>'<EOL>return v if i < <NUM_LIT:0> else v[:i] + s + v[i:]<EOL><DEDENT>def clean_printable(s):<EOL><INDENT>'<STR_LIT>'<EOL>return '<STR_LIT>'.join(c if c.isprintable() else ('<STR_LIT>' % ord(c)) for c in str(s))<EOL><DEDENT>def delchar(s, i, remove=<NUM_LIT:1>):<EOL><INDENT>'<STR_LIT>'<EOL>return s if i < <NUM_LIT:0> else s[:i] + s[i+remove:]<EOL><DEDENT>class CompleteState:<EOL><INDENT>def __init__(self, completer_func):<EOL><INDENT>self.comps_idx = -<NUM_LIT:1><EOL>self.completer_func = completer_func<EOL>self.former_i = None<EOL>self.just_completed = False<EOL><DEDENT>def complete(self, v, i, state_incr):<EOL><INDENT>self.just_completed = True<EOL>self.comps_idx += state_incr<EOL>if self.former_i is None:<EOL><INDENT>self.former_i = i<EOL><DEDENT>try:<EOL><INDENT>r = self.completer_func(v[:self.former_i], self.comps_idx)<EOL><DEDENT>except Exception as e:<EOL><INDENT>return v, i<EOL><DEDENT>if not r:<EOL><INDENT>return v, i<EOL><DEDENT>v = r + v[i:]<EOL>return v, len(v)<EOL><DEDENT>def reset(self):<EOL><INDENT>if self.just_completed:<EOL><INDENT>self.just_completed = False<EOL><DEDENT>else:<EOL><INDENT>self.former_i = None<EOL>self.comps_idx = -<NUM_LIT:1><EOL><DEDENT><DEDENT><DEDENT>class HistoryState:<EOL><INDENT>def __init__(self, history):<EOL><INDENT>self.history = history<EOL>self.hist_idx = None<EOL>self.prev_val = None<EOL><DEDENT>def up(self, v, i):<EOL><INDENT>if self.hist_idx is None:<EOL><INDENT>self.hist_idx = len(self.history)<EOL>self.prev_val = v<EOL><DEDENT>if self.hist_idx > <NUM_LIT:0>:<EOL><INDENT>self.hist_idx -= <NUM_LIT:1><EOL>v = self.history[self.hist_idx]<EOL><DEDENT>i = len(v)<EOL>return v, i<EOL><DEDENT>def down(self, v, i):<EOL><INDENT>if self.hist_idx is None:<EOL><INDENT>return v, i<EOL><DEDENT>elif self.hist_idx < len(self.history)-<NUM_LIT:1>:<EOL><INDENT>self.hist_idx += <NUM_LIT:1><EOL>v = self.history[self.hist_idx]<EOL><DEDENT>else:<EOL><INDENT>v = self.prev_val<EOL>self.hist_idx = None<EOL><DEDENT>i = len(v)<EOL>return v, i<EOL><DEDENT><DEDENT>history_state = HistoryState(history)<EOL>complete_state = CompleteState(completer)<EOL>insert_mode = True<EOL>first_action = True<EOL>v = str(value) <EOL>if i != <NUM_LIT:0>:<EOL><INDENT>first_action = False<EOL><DEDENT>left_truncchar = right_truncchar = truncchar<EOL>def rfind_nonword(s, a, b):<EOL><INDENT>if not s:<EOL><INDENT>return <NUM_LIT:0><EOL><DEDENT>while not s[b].isalnum() and b >= a: <EOL><INDENT>b -= <NUM_LIT:1><EOL><DEDENT>while s[b].isalnum() and b >= a:<EOL><INDENT>b -= <NUM_LIT:1><EOL><DEDENT>return b<EOL><DEDENT>while True:<EOL><INDENT>if display:<EOL><INDENT>dispval = clean_printable(v)<EOL><DEDENT>else:<EOL><INDENT>dispval = '<STR_LIT:*>' * len(v)<EOL><DEDENT>dispi = i <EOL>if len(dispval) < w: <EOL><INDENT>dispval += fillchar*(w-len(dispval)-<NUM_LIT:1>)<EOL><DEDENT>elif i == len(dispval): <EOL><INDENT>dispi = w-<NUM_LIT:1><EOL>dispval = left_truncchar + dispval[len(dispval)-w+<NUM_LIT:2>:] + fillchar<EOL><DEDENT>elif i >= len(dispval)-w//<NUM_LIT:2>: <EOL><INDENT>dispi = w-(len(dispval)-i)<EOL>dispval = left_truncchar + dispval[len(dispval)-w+<NUM_LIT:1>:]<EOL><DEDENT>elif i <= w//<NUM_LIT:2>: <EOL><INDENT>dispval = dispval[:w-<NUM_LIT:1>] + right_truncchar<EOL><DEDENT>else:<EOL><INDENT>dispi = w//<NUM_LIT:2> <EOL>k = <NUM_LIT:1> if w%<NUM_LIT:2>==<NUM_LIT:0> else <NUM_LIT:0> <EOL>dispval = left_truncchar + dispval[i-w//<NUM_LIT:2>+<NUM_LIT:1>:i+w//<NUM_LIT:2>-k] + right_truncchar<EOL><DEDENT>prew = clipdraw(scr, y, x, dispval[:dispi], attr, w)<EOL>clipdraw(scr, y, x+prew, dispval[dispi:], attr, w-prew+<NUM_LIT:1>)<EOL>scr.move(y, x+prew)<EOL>ch = vd().getkeystroke(scr)<EOL>if ch == '<STR_LIT>': continue<EOL>elif ch == '<STR_LIT>': insert_mode = not insert_mode<EOL>elif ch == '<STR_LIT>' or ch == '<STR_LIT>': i = <NUM_LIT:0><EOL>elif ch == '<STR_LIT>' or ch == '<STR_LIT>': i -= <NUM_LIT:1><EOL>elif ch in ('<STR_LIT>', '<STR_LIT>', ESC): raise EscapeException(ch)<EOL>elif ch == '<STR_LIT>' or ch == '<STR_LIT>': v = delchar(v, i)<EOL>elif ch == '<STR_LIT>' or ch == '<STR_LIT>': i = len(v)<EOL>elif ch == '<STR_LIT>' or ch == '<STR_LIT>': i += <NUM_LIT:1><EOL>elif ch in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>'): i -= <NUM_LIT:1>; v = delchar(v, i)<EOL>elif ch == TAB: v, i = complete_state.complete(v, i, +<NUM_LIT:1>)<EOL>elif ch == '<STR_LIT>': v, i = complete_state.complete(v, i, -<NUM_LIT:1>)<EOL>elif ch == ENTER: break<EOL>elif ch == '<STR_LIT>': v = v[:i] <EOL>elif ch == '<STR_LIT>': v = launchExternalEditor(v)<EOL>elif ch == '<STR_LIT>': v = str(value) <EOL>elif ch == '<STR_LIT>': v = delchar(splice(v, i-<NUM_LIT:2>, v[i-<NUM_LIT:1>]), i) <EOL>elif ch == '<STR_LIT>': v = v[i:]; i = <NUM_LIT:0> <EOL>elif ch == '<STR_LIT>': v = splice(v, i, until_get_wch()); i += <NUM_LIT:1> <EOL>elif ch == '<STR_LIT>': j = rfind_nonword(v, <NUM_LIT:0>, i-<NUM_LIT:1>); v = v[:j+<NUM_LIT:1>] + v[i:]; i = j+<NUM_LIT:1> <EOL>elif ch == '<STR_LIT>': suspend()<EOL>elif history and ch == '<STR_LIT>': v, i = history_state.up(v, i)<EOL>elif history and ch == '<STR_LIT>': v, i = history_state.down(v, i)<EOL>elif ch.startswith('<STR_LIT>'): pass<EOL>else:<EOL><INDENT>if first_action:<EOL><INDENT>v = '<STR_LIT>'<EOL><DEDENT>if insert_mode:<EOL><INDENT>v = splice(v, i, ch)<EOL><DEDENT>else:<EOL><INDENT>v = v[:i] + ch + v[i+<NUM_LIT:1>:]<EOL><DEDENT>i += <NUM_LIT:1><EOL><DEDENT>if i < <NUM_LIT:0>: i = <NUM_LIT:0><EOL>if i > len(v): i = len(v)<EOL>first_action = False<EOL>complete_state.reset()<EOL><DEDENT>return v<EOL> | A better curses line editing widget. | f1802:m51 |
def run(*sheetlist): | <EOL>os.putenv('<STR_LIT>', '<STR_LIT>')<EOL>ret = wrapper(cursesMain, sheetlist)<EOL>if ret:<EOL><INDENT>print(ret)<EOL><DEDENT> | Main entry point; launches vdtui with the given sheets already pushed (last one is visible) | f1802:m54 |
def cursesMain(_scr, sheetlist): | colors.setup()<EOL>for vs in sheetlist:<EOL><INDENT>vd().push(vs) <EOL><DEDENT>status('<STR_LIT>')<EOL>return vd().run(_scr)<EOL> | Populate VisiData object with sheets from a given list. | f1802:m55 |
def addGlobals(g): | globals().update(g)<EOL> | importers can call `addGlobals(globals())` to have their globals accessible to execstrings | f1802:m57 |
def getobj(self, objname): | return self.allobjs.get(objname)<EOL> | Inverse of objname(obj); returns obj if available | f1802:c2:m2 |
def set(self, k, v, obj='<STR_LIT>'): | if k not in self:<EOL><INDENT>self[k] = dict()<EOL><DEDENT>self[k][self.objname(obj)] = v<EOL>return v<EOL> | obj is a Sheet instance, or a Sheet [sub]class. obj="override" means override all; obj="default" means last resort. | f1802:c2:m3 |
def iter(self, obj=None): | if obj is None and vd:<EOL><INDENT>obj = vd.sheet<EOL><DEDENT>for o in self._mappings(obj):<EOL><INDENT>for k in self.keys():<EOL><INDENT>for o2 in self[k]:<EOL><INDENT>if o == o2:<EOL><INDENT>yield (k, o), self[k][o2]<EOL><DEDENT><DEDENT><DEDENT><DEDENT> | Iterate through all keys considering context of obj. If obj is None, uses the context of the top sheet. | f1802:c2:m7 |
def _get(self, k, obj=None): | opt = self._cache.get((k, obj), None)<EOL>if opt is None:<EOL><INDENT>opt = self._opts._get(k, obj)<EOL>self._cache[(k, obj or vd.sheet)] = opt<EOL><DEDENT>return opt<EOL> | Return Option object for k in context of obj. Cache result until any set(). | f1802:c5:m2 |
def status(self, *args, priority=<NUM_LIT:0>): | k = (priority, args)<EOL>self.statuses[k] = self.statuses.get(k, <NUM_LIT:0>) + <NUM_LIT:1><EOL>if self.statusHistory:<EOL><INDENT>prevpri, prevargs, prevn = self.statusHistory[-<NUM_LIT:1>]<EOL>if prevpri == priority and prevargs == args:<EOL><INDENT>self.statusHistory[-<NUM_LIT:1>][<NUM_LIT:2>] += <NUM_LIT:1><EOL>return True<EOL><DEDENT><DEDENT>self.statusHistory.append([priority, args, <NUM_LIT:1>])<EOL>return True<EOL> | Add status message to be shown until next action. | f1802:c7:m4 |
def addHook(self, hookname, hookfunc): | self.hooks[hookname].insert(<NUM_LIT:0>, hookfunc)<EOL> | Add hookfunc by hookname, to be called by corresponding `callHook`. | f1802:c7:m5 |
def callHook(self, hookname, *args, **kwargs): | r = []<EOL>for f in self.hooks[hookname]:<EOL><INDENT>try:<EOL><INDENT>r.append(f(*args, **kwargs))<EOL><DEDENT>except Exception as e:<EOL><INDENT>exceptionCaught(e)<EOL><DEDENT><DEDENT>return r<EOL> | Call all functions registered with `addHook` for the given hookname. | f1802:c7:m6 |
def execAsync(self, func, *args, **kwargs): | thread = threading.Thread(target=self.toplevelTryFunc, daemon=True, args=(func,)+args, kwargs=kwargs)<EOL>self.addThread(thread)<EOL>if self.sheets:<EOL><INDENT>currentSheet = self.sheets[<NUM_LIT:0>]<EOL>currentSheet.currentThreads.append(thread)<EOL><DEDENT>else:<EOL><INDENT>currentSheet = None<EOL><DEDENT>thread.sheet = currentSheet<EOL>thread.start()<EOL>return thread<EOL> | Execute `func(*args, **kwargs)` in a separate thread. | f1802:c7:m8 |
@staticmethod<EOL><INDENT>def toplevelTryFunc(func, *args, **kwargs):<DEDENT> | t = threading.current_thread()<EOL>t.name = func.__name__<EOL>ret = None<EOL>try:<EOL><INDENT>ret = func(*args, **kwargs)<EOL><DEDENT>except EscapeException as e: <EOL><INDENT>t.status += '<STR_LIT>'<EOL>status('<STR_LIT>' % t.name, priority=<NUM_LIT:2>)<EOL><DEDENT>except Exception as e:<EOL><INDENT>t.exception = e<EOL>exceptionCaught(e)<EOL><DEDENT>if t.sheet:<EOL><INDENT>t.sheet.currentThreads.remove(t)<EOL><DEDENT>return ret<EOL> | Thread entry-point for `func(*args, **kwargs)` with try/except wrapper | f1802:c7:m9 |
@property<EOL><INDENT>def unfinishedThreads(self):<DEDENT> | return [t for t in self.threads if getattr(t, '<STR_LIT>', None) is None]<EOL> | A list of unfinished threads (those without a recorded `endTime`). | f1802:c7:m10 |
def checkForFinishedThreads(self): | for t in self.unfinishedThreads:<EOL><INDENT>if not t.is_alive():<EOL><INDENT>t.endTime = time.process_time()<EOL>if getattr(t, '<STR_LIT:status>', None) is None:<EOL><INDENT>t.status = '<STR_LIT>'<EOL><DEDENT><DEDENT><DEDENT> | Mark terminated threads with endTime. | f1802:c7:m11 |
def sync(self, expectedThreads=<NUM_LIT:0>): | while len(self.unfinishedThreads) > expectedThreads:<EOL><INDENT>time.sleep(<NUM_LIT>)<EOL>self.checkForFinishedThreads()<EOL><DEDENT> | Wait for all but expectedThreads async threads to finish. | f1802:c7:m12 |
def editText(self, y, x, w, record=True, **kwargs): | v = self.callHook('<STR_LIT>') if record else None<EOL>if not v or v[<NUM_LIT:0>] is None:<EOL><INDENT>with EnableCursor():<EOL><INDENT>v = editText(self.scr, y, x, w, **kwargs)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>v = v[<NUM_LIT:0>]<EOL><DEDENT>if kwargs.get('<STR_LIT>', True):<EOL><INDENT>status('<STR_LIT>' % v)<EOL>self.callHook('<STR_LIT>', v) if record else None<EOL><DEDENT>return v<EOL> | Wrap global editText with `preedit` and `postedit` hooks. | f1802:c7:m14 |
def input(self, prompt, type='<STR_LIT>', defaultLast=False, **kwargs): | if type:<EOL><INDENT>histlist = list(self.lastInputs[type].keys())<EOL>ret = self._inputLine(prompt, history=histlist, **kwargs)<EOL>if ret:<EOL><INDENT>self.lastInputs[type][ret] = ret<EOL><DEDENT>elif defaultLast:<EOL><INDENT>histlist or fail("<STR_LIT>")<EOL>ret = histlist[-<NUM_LIT:1>]<EOL><DEDENT><DEDENT>else:<EOL><INDENT>ret = self._inputLine(prompt, **kwargs)<EOL><DEDENT>return ret<EOL> | Get user input, with history of `type`, defaulting to last history item if no input and defaultLast is True. | f1802:c7:m15 |
def _inputLine(self, prompt, **kwargs): | self.inInput = True<EOL>rstatuslen = self.drawRightStatus(self.scr, self.sheets[<NUM_LIT:0>])<EOL>attr = <NUM_LIT:0><EOL>promptlen = clipdraw(self.scr, self.windowHeight-<NUM_LIT:1>, <NUM_LIT:0>, prompt, attr, w=self.windowWidth-rstatuslen-<NUM_LIT:1>)<EOL>ret = self.editText(self.windowHeight-<NUM_LIT:1>, promptlen, self.windowWidth-promptlen-rstatuslen-<NUM_LIT:2>,<EOL>attr=colors.color_edit_cell,<EOL>unprintablechar=options.disp_unprintable,<EOL>truncchar=options.disp_truncator,<EOL>**kwargs)<EOL>self.inInput = False<EOL>return ret<EOL> | Add prompt to bottom of screen and get line of input from user. | f1802:c7:m16 |
def getkeystroke(self, scr, vs=None): | k = None<EOL>try:<EOL><INDENT>k = scr.get_wch()<EOL>self.drawRightStatus(scr, vs or self.sheets[<NUM_LIT:0>]) <EOL><DEDENT>except curses.error:<EOL><INDENT>return '<STR_LIT>' <EOL><DEDENT>if isinstance(k, str):<EOL><INDENT>if ord(k) >= <NUM_LIT:32> and ord(k) != <NUM_LIT>: <EOL><INDENT>return k<EOL><DEDENT>k = ord(k)<EOL><DEDENT>return curses.keyname(k).decode('<STR_LIT:utf-8>')<EOL> | Get keystroke and display it on status bar. | f1802:c7:m17 |
def exceptionCaught(self, exc=None, **kwargs): | if isinstance(exc, ExpectedException): <EOL><INDENT>return<EOL><DEDENT>self.lastErrors.append(stacktrace())<EOL>if kwargs.get('<STR_LIT:status>', True):<EOL><INDENT>status(self.lastErrors[-<NUM_LIT:1>][-<NUM_LIT:1>], priority=<NUM_LIT:2>) <EOL><DEDENT>if options.debug:<EOL><INDENT>raise<EOL><DEDENT> | Maintain list of most recent errors and return most recent one. | f1802:c7:m18 |
def drawLeftStatus(self, scr, vs): | cattr = CursesAttr(colors.color_status)<EOL>attr = cattr.attr<EOL>error_attr = cattr.update_attr(colors.color_error, <NUM_LIT:1>).attr<EOL>warn_attr = cattr.update_attr(colors.color_warning, <NUM_LIT:2>).attr<EOL>sep = options.disp_status_sep<EOL>try:<EOL><INDENT>lstatus = vs.leftStatus()<EOL>maxwidth = options.disp_lstatus_max<EOL>if maxwidth > <NUM_LIT:0>:<EOL><INDENT>lstatus = middleTruncate(lstatus, maxwidth//<NUM_LIT:2>)<EOL><DEDENT>y = self.windowHeight-<NUM_LIT:1><EOL>x = clipdraw(scr, y, <NUM_LIT:0>, lstatus, attr)<EOL>self.onMouse(scr, y, <NUM_LIT:0>, <NUM_LIT:1>, x,<EOL>BUTTON1_PRESSED='<STR_LIT>',<EOL>BUTTON3_PRESSED='<STR_LIT>',<EOL>BUTTON3_CLICKED='<STR_LIT>')<EOL>one = False<EOL>for (pri, msgparts), n in sorted(self.statuses.items(), key=lambda k: -k[<NUM_LIT:0>][<NUM_LIT:0>]):<EOL><INDENT>if x > self.windowWidth:<EOL><INDENT>break<EOL><DEDENT>if one: <EOL><INDENT>x += clipdraw(scr, y, x, sep, attr, self.windowWidth)<EOL><DEDENT>one = True<EOL>msg = composeStatus(msgparts, n)<EOL>if pri == <NUM_LIT:3>: msgattr = error_attr<EOL>elif pri == <NUM_LIT:2>: msgattr = warn_attr<EOL>elif pri == <NUM_LIT:1>: msgattr = warn_attr<EOL>else: msgattr = attr<EOL>x += clipdraw(scr, y, x, msg, msgattr, self.windowWidth)<EOL><DEDENT><DEDENT>except Exception as e:<EOL><INDENT>self.exceptionCaught(e)<EOL><DEDENT> | Draw left side of status bar. | f1802:c7:m21 |
def drawRightStatus(self, scr, vs): | rightx = self.windowWidth-<NUM_LIT:1><EOL>ret = <NUM_LIT:0><EOL>for rstatcolor in self.callHook('<STR_LIT>', vs):<EOL><INDENT>if rstatcolor:<EOL><INDENT>try:<EOL><INDENT>rstatus, coloropt = rstatcolor<EOL>rstatus = '<STR_LIT:U+0020>'+rstatus<EOL>attr = colors.get_color(coloropt).attr<EOL>statuslen = clipdraw(scr, self.windowHeight-<NUM_LIT:1>, rightx, rstatus, attr, rtl=True)<EOL>rightx -= statuslen<EOL>ret += statuslen<EOL><DEDENT>except Exception as e:<EOL><INDENT>self.exceptionCaught(e)<EOL><DEDENT><DEDENT><DEDENT>if scr:<EOL><INDENT>curses.doupdate()<EOL><DEDENT>return ret<EOL> | Draw right side of status bar. Return length displayed. | f1802:c7:m22 |
def rightStatus(self, sheet): | if sheet.currentThreads:<EOL><INDENT>gerund = ('<STR_LIT:U+0020>'+sheet.progresses[<NUM_LIT:0>].gerund) if sheet.progresses else '<STR_LIT>'<EOL>status = '<STR_LIT>' % (len(sheet), sheet.progressPct, gerund)<EOL><DEDENT>else:<EOL><INDENT>status = '<STR_LIT>' % (len(sheet), sheet.rowtype)<EOL><DEDENT>return status, '<STR_LIT>'<EOL> | Compose right side of status bar. | f1802:c7:m23 |
def run(self, scr): | global sheet<EOL>scr.timeout(int(options.curses_timeout))<EOL>with suppress(curses.error):<EOL><INDENT>curses.curs_set(<NUM_LIT:0>)<EOL><DEDENT>self.scr = scr<EOL>numTimeouts = <NUM_LIT:0><EOL>self.keystrokes = '<STR_LIT>'<EOL>while True:<EOL><INDENT>if not self.sheets:<EOL><INDENT>return<EOL><DEDENT>sheet = self.sheets[<NUM_LIT:0>]<EOL>threading.current_thread().sheet = sheet<EOL>try:<EOL><INDENT>sheet.draw(scr)<EOL><DEDENT>except Exception as e:<EOL><INDENT>self.exceptionCaught(e)<EOL><DEDENT>self.drawLeftStatus(scr, sheet)<EOL>self.drawRightStatus(scr, sheet) <EOL>keystroke = self.getkeystroke(scr, sheet)<EOL>if keystroke: <EOL><INDENT>numTimeouts = <NUM_LIT:0><EOL>if not self.prefixWaiting:<EOL><INDENT>self.keystrokes = '<STR_LIT>'<EOL><DEDENT>self.statuses.clear()<EOL>if keystroke == '<STR_LIT>':<EOL><INDENT>self.keystrokes = '<STR_LIT>'<EOL>clicktype = '<STR_LIT>'<EOL>try:<EOL><INDENT>devid, x, y, z, bstate = curses.getmouse()<EOL>sheet.mouseX, sheet.mouseY = x, y<EOL>if bstate & curses.BUTTON_CTRL:<EOL><INDENT>clicktype += "<STR_LIT>"<EOL>bstate &= ~curses.BUTTON_CTRL<EOL><DEDENT>if bstate & curses.BUTTON_ALT:<EOL><INDENT>clicktype += "<STR_LIT>"<EOL>bstate &= ~curses.BUTTON_ALT<EOL><DEDENT>if bstate & curses.BUTTON_SHIFT:<EOL><INDENT>clicktype += "<STR_LIT>"<EOL>bstate &= ~curses.BUTTON_SHIFT<EOL><DEDENT>keystroke = clicktype + curses.mouseEvents.get(bstate, str(bstate))<EOL>f = self.getMouse(scr, x, y, keystroke)<EOL>if f:<EOL><INDENT>if isinstance(f, str):<EOL><INDENT>for cmd in f.split():<EOL><INDENT>sheet.exec_keystrokes(cmd)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>f(y, x, keystroke)<EOL><DEDENT>self.keystrokes = keystroke<EOL>keystroke = '<STR_LIT>'<EOL><DEDENT><DEDENT>except curses.error:<EOL><INDENT>pass<EOL><DEDENT>except Exception as e:<EOL><INDENT>exceptionCaught(e)<EOL><DEDENT><DEDENT>self.keystrokes += keystroke<EOL><DEDENT>self.drawRightStatus(scr, sheet) <EOL>if not keystroke: <EOL><INDENT>pass<EOL><DEDENT>elif keystroke == '<STR_LIT>':<EOL><INDENT>return self.lastErrors and '<STR_LIT:\n>'.join(self.lastErrors[-<NUM_LIT:1>])<EOL><DEDENT>elif bindkeys._get(self.keystrokes):<EOL><INDENT>sheet.exec_keystrokes(self.keystrokes)<EOL>self.prefixWaiting = False<EOL><DEDENT>elif keystroke in self.allPrefixes:<EOL><INDENT>self.keystrokes = '<STR_LIT>'.join(sorted(set(self.keystrokes))) <EOL>self.prefixWaiting = True<EOL><DEDENT>else:<EOL><INDENT>status('<STR_LIT>' % (self.keystrokes))<EOL>self.prefixWaiting = False<EOL><DEDENT>self.checkForFinishedThreads()<EOL>self.callHook('<STR_LIT>')<EOL>catchapply(sheet.checkCursor)<EOL>time.sleep(<NUM_LIT:0>) <EOL>if vd.unfinishedThreads:<EOL><INDENT>scr.timeout(options.curses_timeout)<EOL><DEDENT>else:<EOL><INDENT>numTimeouts += <NUM_LIT:1><EOL>if numTimeouts > <NUM_LIT:1>:<EOL><INDENT>scr.timeout(-<NUM_LIT:1>)<EOL><DEDENT>else:<EOL><INDENT>scr.timeout(options.curses_timeout)<EOL><DEDENT><DEDENT><DEDENT> | Manage execution of keystrokes and subsequent redrawing of screen. | f1802:c7:m26 |
def replace(self, vs): | self.sheets.pop(<NUM_LIT:0>)<EOL>return self.push(vs)<EOL> | Replace top sheet with the given sheet `vs`. | f1802:c7:m27 |
def push(self, vs): | if vs:<EOL><INDENT>vs.vd = self<EOL>if vs in self.sheets:<EOL><INDENT>self.sheets.remove(vs)<EOL>self.sheets.insert(<NUM_LIT:0>, vs)<EOL><DEDENT>elif not vs.loaded:<EOL><INDENT>self.sheets.insert(<NUM_LIT:0>, vs)<EOL>vs.reload()<EOL>vs.recalc() <EOL><DEDENT>else:<EOL><INDENT>self.sheets.insert(<NUM_LIT:0>, vs)<EOL><DEDENT>if vs.precious and vs not in vs.vd.allSheets:<EOL><INDENT>vs.vd.allSheets[vs] = vs.name<EOL><DEDENT>return vs<EOL><DEDENT> | Move given sheet `vs` to index 0 of list `sheets`. | f1802:c7:m29 |
def __bool__(self): | return True<EOL> | an instantiated Sheet always tests true | f1802:c11:m4 |
def leftStatus(self): | return options.disp_status_fmt.format(sheet=self)<EOL> | Compose left side of status bar for this sheet (overridable). | f1802:c11:m7 |
def exec_command(self, cmd, args='<STR_LIT>', vdglobals=None, keystrokes=None): | global sheet<EOL>sheet = vd.sheets[<NUM_LIT:0>]<EOL>if not cmd:<EOL><INDENT>debug('<STR_LIT>' % keystrokes)<EOL>return True<EOL><DEDENT>if isinstance(cmd, CommandLog):<EOL><INDENT>cmd.replay()<EOL>return False<EOL><DEDENT>escaped = False<EOL>err = '<STR_LIT>'<EOL>if vdglobals is None:<EOL><INDENT>vdglobals = getGlobals()<EOL><DEDENT>if not self.vd:<EOL><INDENT>self.vd = vd()<EOL><DEDENT>self.sheet = self<EOL>try:<EOL><INDENT>self.vd.callHook('<STR_LIT>', self, cmd, '<STR_LIT>', keystrokes)<EOL>exec(cmd.execstr, vdglobals, LazyMap(self))<EOL><DEDENT>except EscapeException as e: <EOL><INDENT>status('<STR_LIT>')<EOL>escaped = True<EOL><DEDENT>except Exception as e:<EOL><INDENT>debug(cmd.execstr)<EOL>err = self.vd.exceptionCaught(e)<EOL>escaped = True<EOL><DEDENT>try:<EOL><INDENT>self.vd.callHook('<STR_LIT>', self.vd.sheets[<NUM_LIT:0>] if self.vd.sheets else None, escaped, err)<EOL><DEDENT>except Exception:<EOL><INDENT>self.vd.exceptionCaught(e)<EOL><DEDENT>catchapply(self.checkCursor)<EOL>self.vd.refresh()<EOL>return escaped<EOL> | Execute `cmd` tuple with `vdglobals` as globals and this sheet's attributes as locals. Returns True if user cancelled. | f1802:c11:m9 |
@name.setter<EOL><INDENT>def name(self, name):<DEDENT> | self._name = name.strip().replace('<STR_LIT:U+0020>', '<STR_LIT:_>')<EOL> | Set name without spaces. | f1802:c11:m11 |
@property<EOL><INDENT>def progressPct(self):<DEDENT> | if self.progressTotal != <NUM_LIT:0>:<EOL><INDENT>return int(self.progressMade*<NUM_LIT:100>/self.progressTotal)<EOL><DEDENT>return <NUM_LIT:0><EOL> | Percent complete as indicated by async actions. | f1802:c11:m14 |
def recalc(self): | pass<EOL> | Clear any calculated value caches. | f1802:c11:m15 |
@property<EOL><INDENT>def attr(self):<DEDENT> | return self.color | self.attributes<EOL> | the composed curses attr | f1802:c12:m2 |
def colorize(self, col, row, value=None): | <INDENT>colorstack = tuple(c.coloropt for c in self.getColorizers() if wrapply(c.func, self, col, row, value))<EOL><DEDENT>colorstack = []<EOL>for colorizer in self.getColorizers():<EOL><INDENT>try:<EOL><INDENT>r = colorizer.func(self, col, row, value)<EOL>if r:<EOL><INDENT>colorstack.append(colorizer.coloropt if colorizer.coloropt else r)<EOL><DEDENT><DEDENT>except Exception as e:<EOL><INDENT>exceptionCaught(e)<EOL><DEDENT><DEDENT>return colors.resolve_colors(tuple(colorstack))<EOL> | Returns curses attribute for the given col/row/value | f1802:c13:m5 |
def column(self, colregex): | for c in self.columns:<EOL><INDENT>if re.search(colregex, c.name, regex_flags()):<EOL><INDENT>return c<EOL><DEDENT><DEDENT> | Return first column whose Column.name matches colregex. | f1802:c13:m7 |
def recalc(self): | for c in self.columns:<EOL><INDENT>c.recalc(self)<EOL><DEDENT> | Clear caches and set col.sheet to this sheet for all columns. | f1802:c13:m8 |
def reload(self): | self.rows = []<EOL>for r in self.iterload():<EOL><INDENT>self.addRow(r)<EOL><DEDENT> | Loads rows and/or columns. Override in subclass. | f1802:c13:m9 |
def iterload(self): | for row in []:<EOL><INDENT>yield row<EOL><DEDENT> | Override this generator for loading, if columns can be predefined. | f1802:c13:m10 |
def __copy__(self): | cls = self.__class__<EOL>ret = cls.__new__(cls)<EOL>ret.__dict__.update(self.__dict__)<EOL>ret.rows = [] <EOL>ret.columns = [copy(c) for c in self.keyCols]<EOL>ret.setKeys(ret.columns)<EOL>ret.columns.extend(copy(c) for c in self.columns if c not in self.keyCols)<EOL>ret.recalc() <EOL>ret._selectedRows = {}<EOL>ret.topRowIndex = ret.cursorRowIndex = <NUM_LIT:0><EOL>ret.progresses = []<EOL>ret.currentThreads = []<EOL>ret.precious = True <EOL>return ret<EOL> | copy sheet design (no rows). deepcopy columns so their attributes (width, type, name) may be adjusted independently. | f1802:c13:m11 |
def __deepcopy__(self, memo): | ret = self.__copy__()<EOL>memo[id(self)] = ret<EOL>return ret<EOL> | same as __copy__ | f1802:c13:m12 |
Subsets and Splits