language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def stop(self): """Used for Ascend, stop profiling.""" if self._has_started: self._has_started = False else: msg = "The profiler has not start, so can not stop." logger.error(msg) raise RuntimeError(msg) self._ascend_profiler.stop() self._stop_time = int(time.time() * 10000000) logger.info("Profiling: stop time: %d", self._stop_time)
def stop(self): """Used for Ascend, stop profiling.""" if self._has_started: self._has_started = False else: msg = "The profiler has not start, so can not stop." logger.error(msg) raise RuntimeError(msg) self._ascend_profiler.stop() self._stop_time = int(time.time() * 10000000) logger.info("Profiling: stop time: %d", self._stop_time)
Python
def _gpu_analyse(self): """Collect and analyse gpu performance data""" self._dev_id = context.get_context("device_id") if GlobalComm.WORLD_COMM_GROUP == "nccl_world_group": self._dev_id = str(get_rank()) self._gpu_profiler.stop() timeline_generator = self._generate_timeline() # parse minddata pipeline operator and queue for GPU try: pipeline_parser = MinddataPipelineParser(self._output_path, self._dev_id, self._output_path) logger.info("Profiling: analyzing the minddata pipeline operator and queue for GPU.") pipeline_parser.parse() except ProfilerException as err: logger.warning(err.message) # Analyze minddata information try: md_analyzer = MinddataProfilingAnalyzer(self._output_path, self._dev_id, self._output_path) logger.info("Profiling: analyzing the minddata information.") md_analyzer.analyze() except ProfilerException as err: logger.warning(err.message) # analyse step trace info try: logger.info("Profiling: analyzing the step trace info.") self._analyse_step_trace( is_training_mode_flag=timeline_generator.check_op_name('Gradients'), is_gpu_kernel_async_launch_flag=timeline_generator.is_gpu_kernel_async_launch() ) except ProfilerException as err: logger.warning(err.message) logger.warning( '\nMemory Usage is not supported on GPU currently.\n' 'Please running on Ascend if you would like to see memory analysis, ' 'otherwise, this warning can be ignored.' )
def _gpu_analyse(self): """Collect and analyse gpu performance data""" self._dev_id = context.get_context("device_id") if GlobalComm.WORLD_COMM_GROUP == "nccl_world_group": self._dev_id = str(get_rank()) self._gpu_profiler.stop() timeline_generator = self._generate_timeline() # parse minddata pipeline operator and queue for GPU try: pipeline_parser = MinddataPipelineParser(self._output_path, self._dev_id, self._output_path) logger.info("Profiling: analyzing the minddata pipeline operator and queue for GPU.") pipeline_parser.parse() except ProfilerException as err: logger.warning(err.message) # Analyze minddata information try: md_analyzer = MinddataProfilingAnalyzer(self._output_path, self._dev_id, self._output_path) logger.info("Profiling: analyzing the minddata information.") md_analyzer.analyze() except ProfilerException as err: logger.warning(err.message) # analyse step trace info try: logger.info("Profiling: analyzing the step trace info.") self._analyse_step_trace( is_training_mode_flag=timeline_generator.check_op_name('Gradients'), is_gpu_kernel_async_launch_flag=timeline_generator.is_gpu_kernel_async_launch() ) except ProfilerException as err: logger.warning(err.message) logger.warning( '\nMemory Usage is not supported on GPU currently.\n' 'Please running on Ascend if you would like to see memory analysis, ' 'otherwise, this warning can be ignored.' )
Python
def _analyse_step_trace(self, source_path=None, framework_parser=None, is_training_mode_flag=True, is_gpu_kernel_async_launch_flag=False): """ Analyse step trace data and save the result. Args: source_path (str): The directory that contains the step trace original data. framework_parser (FrameworkParser): The framework parse instance. is_training_mode_flag (bool): Whether in training mode or not. """ logger.info("Begin to parse step trace.") # construct output path dev_id = self._rank_id if self._device_target == "Ascend" else self._dev_id step_trace_intermediate_file_path = os.path.join( self._output_path, f'step_trace_raw_{dev_id}_detail_time.csv' ) point_info_file_path = os.path.join( self._output_path, f'step_trace_point_info_{dev_id}.json' ) step_trace_intermediate_file_path = validate_and_normalize_path(step_trace_intermediate_file_path) point_info_file_path = validate_and_normalize_path(point_info_file_path) if self._device_target and self._device_target == 'GPU': input_file_path = os.path.join( self._output_path, f'step_trace_profiling_{self._dev_id}.txt' ) parser = GpuStepTraceParser(input_dir=input_file_path, output_file_path=step_trace_intermediate_file_path, is_training_mode=is_training_mode_flag, is_gpu_kernel_async_launch=is_gpu_kernel_async_launch_flag) parser.parse_and_save() point_info = parser.record_point_info(input_file_path, point_info_file_path) else: # whether keep the first step skip_first_step_flag = framework_parser.check_op_name(INIT_OP_NAME) point_info = framework_parser.point_info # recognize inference or training mode is_training_mode_flag = framework_parser.check_op_name("Gradients") # parser the step trace files and save the result to disk source_path = validate_and_normalize_path(source_path) parser = AscendStepTraceParser(input_dir=source_path, output_file_path=step_trace_intermediate_file_path, job_id=self._job_id_env, skip_first_step=skip_first_step_flag, is_training_mode=is_training_mode_flag) parser.update_tag_op_type_map(point_info) parser.parse_and_save() point_info = parser.record_point_info(point_info, point_info_file_path) # print parser result parser.show() logger.info("Finish saving the intermediate result: %s", step_trace_intermediate_file_path) logger.info("The point info is: %s", point_info) return point_info, is_training_mode_flag
def _analyse_step_trace(self, source_path=None, framework_parser=None, is_training_mode_flag=True, is_gpu_kernel_async_launch_flag=False): """ Analyse step trace data and save the result. Args: source_path (str): The directory that contains the step trace original data. framework_parser (FrameworkParser): The framework parse instance. is_training_mode_flag (bool): Whether in training mode or not. """ logger.info("Begin to parse step trace.") # construct output path dev_id = self._rank_id if self._device_target == "Ascend" else self._dev_id step_trace_intermediate_file_path = os.path.join( self._output_path, f'step_trace_raw_{dev_id}_detail_time.csv' ) point_info_file_path = os.path.join( self._output_path, f'step_trace_point_info_{dev_id}.json' ) step_trace_intermediate_file_path = validate_and_normalize_path(step_trace_intermediate_file_path) point_info_file_path = validate_and_normalize_path(point_info_file_path) if self._device_target and self._device_target == 'GPU': input_file_path = os.path.join( self._output_path, f'step_trace_profiling_{self._dev_id}.txt' ) parser = GpuStepTraceParser(input_dir=input_file_path, output_file_path=step_trace_intermediate_file_path, is_training_mode=is_training_mode_flag, is_gpu_kernel_async_launch=is_gpu_kernel_async_launch_flag) parser.parse_and_save() point_info = parser.record_point_info(input_file_path, point_info_file_path) else: # whether keep the first step skip_first_step_flag = framework_parser.check_op_name(INIT_OP_NAME) point_info = framework_parser.point_info # recognize inference or training mode is_training_mode_flag = framework_parser.check_op_name("Gradients") # parser the step trace files and save the result to disk source_path = validate_and_normalize_path(source_path) parser = AscendStepTraceParser(input_dir=source_path, output_file_path=step_trace_intermediate_file_path, job_id=self._job_id_env, skip_first_step=skip_first_step_flag, is_training_mode=is_training_mode_flag) parser.update_tag_op_type_map(point_info) parser.parse_and_save() point_info = parser.record_point_info(point_info, point_info_file_path) # print parser result parser.show() logger.info("Finish saving the intermediate result: %s", step_trace_intermediate_file_path) logger.info("The point info is: %s", point_info) return point_info, is_training_mode_flag
Python
def _analyse_timeline(self, aicpu_parser, optime_parser, source_path): """ Analyse and parse timeline info. Args: aicpu_parser (DataPreProcessParser): The parser instance for AI CPU operator execution time calculation. optime_parser (OPComputeTimeParserParser): The parser instance for AI Core operator execution time calculation. """ timeline_analyser = AscendTimelineGenerator(self._output_path, self._dev_id, self._rank_id, self._rank_size) # Get framework info integrator = Integrator(self._output_path, self._rank_id) aicore_detail_data = integrator.get_aicore_detail_data() aicore_detail_data_size = len(aicore_detail_data) col_names = ['op_name', 'op_type', 'avg_execution_time', 'subgraph', 'full_op_name', 'op_info'] framework_info = { 'col_name': col_names, 'object': aicore_detail_data, 'size': aicore_detail_data_size } all_reduce_info = integrator.query_for_all_reduce() # Get timeline info logger.info('Start writing timeline info...') logger.info('Warm Prompt: It could take a few minutes if you are training ' 'with a complex network or more than 10 steps.') # Add info into timeline, such as AI CPU, AllReduce, framework info. aicpu_info = aicpu_parser.query_aicpu_data() min_cycle_counter = min(aicpu_parser.min_cycle_counter, optime_parser.min_cycle_counter) timeline_analyser.init_timeline(all_reduce_info, framework_info, aicpu_info, min_cycle_counter, source_path) size_limit = 100 * 1024 * 1024 # 100MB timeline_analyser.write_timeline(size_limit) timeline_analyser.write_timeline_summary()
def _analyse_timeline(self, aicpu_parser, optime_parser, source_path): """ Analyse and parse timeline info. Args: aicpu_parser (DataPreProcessParser): The parser instance for AI CPU operator execution time calculation. optime_parser (OPComputeTimeParserParser): The parser instance for AI Core operator execution time calculation. """ timeline_analyser = AscendTimelineGenerator(self._output_path, self._dev_id, self._rank_id, self._rank_size) # Get framework info integrator = Integrator(self._output_path, self._rank_id) aicore_detail_data = integrator.get_aicore_detail_data() aicore_detail_data_size = len(aicore_detail_data) col_names = ['op_name', 'op_type', 'avg_execution_time', 'subgraph', 'full_op_name', 'op_info'] framework_info = { 'col_name': col_names, 'object': aicore_detail_data, 'size': aicore_detail_data_size } all_reduce_info = integrator.query_for_all_reduce() # Get timeline info logger.info('Start writing timeline info...') logger.info('Warm Prompt: It could take a few minutes if you are training ' 'with a complex network or more than 10 steps.') # Add info into timeline, such as AI CPU, AllReduce, framework info. aicpu_info = aicpu_parser.query_aicpu_data() min_cycle_counter = min(aicpu_parser.min_cycle_counter, optime_parser.min_cycle_counter) timeline_analyser.init_timeline(all_reduce_info, framework_info, aicpu_info, min_cycle_counter, source_path) size_limit = 100 * 1024 * 1024 # 100MB timeline_analyser.write_timeline(size_limit) timeline_analyser.write_timeline_summary()
Python
def _generate_timeline(self): """Used for gpu, generate timeline info, write to json format file.""" try: size_limit = 100 * 1024 * 1024 # 100MB timeline_generator = GpuTimelineGenerator(self._output_path, self._dev_id) timeline_generator.init_timeline() timeline_generator.write_timeline(size_limit) timeline_generator.write_timeline_summary() return timeline_generator except (ProfilerIOException, ProfilerFileNotFoundException, RuntimeError) as err: logger.warning('Fail to write timeline data: %s', err) raise RuntimeError('Fail to write timeline data.')
def _generate_timeline(self): """Used for gpu, generate timeline info, write to json format file.""" try: size_limit = 100 * 1024 * 1024 # 100MB timeline_generator = GpuTimelineGenerator(self._output_path, self._dev_id) timeline_generator.init_timeline() timeline_generator.write_timeline(size_limit) timeline_generator.write_timeline_summary() return timeline_generator except (ProfilerIOException, ProfilerFileNotFoundException, RuntimeError) as err: logger.warning('Fail to write timeline data: %s', err) raise RuntimeError('Fail to write timeline data.')
Python
def _get_profiling_job_id(self): """Get profiling job id, which was generated by ada service. Returns: str, profiling job id. """ job_id = "" job_dirs = filter(lambda item: item.startswith('JOB') and os.path.isdir(os.path.join(self._output_path, item)), os.listdir(self._output_path)) sorted_job_dirs = sorted(job_dirs, key=lambda x: os.path.getmtime(os.path.join(self._output_path, x)), reverse=True) for dir_name in sorted_job_dirs: job_dir = os.path.join(self._output_path, dir_name) host_start_file_path = get_file_path(job_dir, "host_start.log") if host_start_file_path is None: logger.warning("Find profiling job path %s, but host_start.log not exist, " "profiler will ignore this job dir.", job_dir) continue training_device_id = host_start_file_path.split('.')[-1] if self._dev_id != training_device_id: logger.warning("Find profiling find job path %s, but not current training device id. " "Current training device id %s, but job path device id: %s, " "profiler will ignore this job dir.", job_dir, self._dev_id, training_device_id) continue job_start_time = self._parse_host_start_log(host_start_file_path) if not job_start_time: logger.warning("Find profiling job path %s, but fail to get job start info, " "profiler will ignore this job dir.", job_start_time) continue if int(job_start_time) < self._start_time: logger.warning("Find profiling job path %s, but start_time(%d) is earlier than this training " "start_time(%d), profiler will ignore this job dir.", job_dir, int(job_start_time), self._start_time) job_id = dir_name break if not job_id: msg = "Fail to get profiling job, output path is {}, " \ "please check whether job dir(name startswith JOB) in output path was generated, " \ "or may be the device id from job dir dismatch the " \ "device_id in current process.".format(self._output_path) raise RuntimeError(msg) return job_id
def _get_profiling_job_id(self): """Get profiling job id, which was generated by ada service. Returns: str, profiling job id. """ job_id = "" job_dirs = filter(lambda item: item.startswith('JOB') and os.path.isdir(os.path.join(self._output_path, item)), os.listdir(self._output_path)) sorted_job_dirs = sorted(job_dirs, key=lambda x: os.path.getmtime(os.path.join(self._output_path, x)), reverse=True) for dir_name in sorted_job_dirs: job_dir = os.path.join(self._output_path, dir_name) host_start_file_path = get_file_path(job_dir, "host_start.log") if host_start_file_path is None: logger.warning("Find profiling job path %s, but host_start.log not exist, " "profiler will ignore this job dir.", job_dir) continue training_device_id = host_start_file_path.split('.')[-1] if self._dev_id != training_device_id: logger.warning("Find profiling find job path %s, but not current training device id. " "Current training device id %s, but job path device id: %s, " "profiler will ignore this job dir.", job_dir, self._dev_id, training_device_id) continue job_start_time = self._parse_host_start_log(host_start_file_path) if not job_start_time: logger.warning("Find profiling job path %s, but fail to get job start info, " "profiler will ignore this job dir.", job_start_time) continue if int(job_start_time) < self._start_time: logger.warning("Find profiling job path %s, but start_time(%d) is earlier than this training " "start_time(%d), profiler will ignore this job dir.", job_dir, int(job_start_time), self._start_time) job_id = dir_name break if not job_id: msg = "Fail to get profiling job, output path is {}, " \ "please check whether job dir(name startswith JOB) in output path was generated, " \ "or may be the device id from job dir dismatch the " \ "device_id in current process.".format(self._output_path) raise RuntimeError(msg) return job_id
Python
def _parse_host_start_log(input_file): """ Parse host start log file, get the start time of the job. Args: input_file (str): The file path of the host start log file. Returns: str, job start time. """ job_start_time = "" with open(input_file) as f: for line in f.readlines(): if "clock_realtime" in line: # 16 means the first digit of the timestamp, len(line)-3 means the last. job_start_time = line[16:len(line) - 3] return job_start_time
def _parse_host_start_log(input_file): """ Parse host start log file, get the start time of the job. Args: input_file (str): The file path of the host start log file. Returns: str, job start time. """ job_start_time = "" with open(input_file) as f: for line in f.readlines(): if "clock_realtime" in line: # 16 means the first digit of the timestamp, len(line)-3 means the last. job_start_time = line[16:len(line) - 3] return job_start_time
Python
def _query_op_type_info(self): """ Query AICORE operator type information. Returns: list[list], the AICORE operator type and execution time information. """ integrator = Integrator(self._output_path, self._rank_id) return integrator.get_aicore_data()
def _query_op_type_info(self): """ Query AICORE operator type information. Returns: list[list], the AICORE operator type and execution time information. """ integrator = Integrator(self._output_path, self._rank_id) return integrator.get_aicore_data()
Python
def _query_op_detail_info(self, op_type_order): """ Query AICORE operator detail information. Args: op_type_order(list): The name of the op type in order. Returns: dict, the AICORE operator detail information. """ op_type_condition = {} if self._filt_optype_names: op_type_condition['not_in'] = self._filt_optype_names filter_condition = { 'op_type': op_type_condition, 'is_display_detail': False, } integrator = Integrator(self._output_path, self._rank_id) return integrator.query_and_sort_by_op_type(filter_condition, op_type_order)
def _query_op_detail_info(self, op_type_order): """ Query AICORE operator detail information. Args: op_type_order(list): The name of the op type in order. Returns: dict, the AICORE operator detail information. """ op_type_condition = {} if self._filt_optype_names: op_type_condition['not_in'] = self._filt_optype_names filter_condition = { 'op_type': op_type_condition, 'is_display_detail': False, } integrator = Integrator(self._output_path, self._rank_id) return integrator.query_and_sort_by_op_type(filter_condition, op_type_order)
Python
def _get_devid_rankid_and_devtarget(self): """Get device id and rank id and target of this training.""" device_target = "" dev_id = "" rank_id = "" try: dev_id = str(context.get_context("device_id")) device_target = context.get_context("device_target") except ValueError as err: logger.error("Profiling: fail to get context, %s", err) if not dev_id or not dev_id.isdigit(): dev_id = os.getenv('DEVICE_ID') if not dev_id or not dev_id.isdigit(): dev_id = "0" logger.warning("Fail to get DEVICE_ID, use 0 instead.") if device_target and device_target not in ["Ascend", "GPU", "CPU"]: msg = "Profiling: unsupported backend: %s" % device_target raise RuntimeError(msg) rank_id = os.getenv("RANK_ID") if not rank_id or not rank_id.isdigit(): rank_id = "0" logger.info("Fail to get RANK_ID, use 0 instead.") self._dev_id = dev_id self._device_target = device_target self._rank_id = rank_id
def _get_devid_rankid_and_devtarget(self): """Get device id and rank id and target of this training.""" device_target = "" dev_id = "" rank_id = "" try: dev_id = str(context.get_context("device_id")) device_target = context.get_context("device_target") except ValueError as err: logger.error("Profiling: fail to get context, %s", err) if not dev_id or not dev_id.isdigit(): dev_id = os.getenv('DEVICE_ID') if not dev_id or not dev_id.isdigit(): dev_id = "0" logger.warning("Fail to get DEVICE_ID, use 0 instead.") if device_target and device_target not in ["Ascend", "GPU", "CPU"]: msg = "Profiling: unsupported backend: %s" % device_target raise RuntimeError(msg) rank_id = os.getenv("RANK_ID") if not rank_id or not rank_id.isdigit(): rank_id = "0" logger.info("Fail to get RANK_ID, use 0 instead.") self._dev_id = dev_id self._device_target = device_target self._rank_id = rank_id
Python
def _get_output_path(self, kwargs): """Get output path of profiling data.""" if os.getenv("MS_DIAGNOSTIC_DATA_PATH") and kwargs.get("output_path") is not None: logger.warning("Both parameter output_path and environment variable MS_DIAGNOSTIC_DATA_PATH" " have values set, and the profiling data saving path is the value set " "in parameter output_path") if kwargs.get("output_path") is None: if "output_path" in kwargs: kwargs.pop("output_path") # Environment variables are mainly set for the convenience of cloud profiler. output_path = os.getenv("MS_DIAGNOSTIC_DATA_PATH") if output_path: self._output_path = validate_and_normalize_path(output_path) else: output_path = "data" self._output_path = validate_and_normalize_path(output_path) else: output_path = kwargs.pop("output_path") self._output_path = validate_and_normalize_path(output_path) self._output_path = os.path.join(self._output_path, "profiler") if not os.path.exists(self._output_path): os.makedirs(self._output_path, exist_ok=True) os.chmod(self._output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) else: logger.warning("The target dir already exists. " "There may be some old profiling data, and they will be rewritten in the end.")
def _get_output_path(self, kwargs): """Get output path of profiling data.""" if os.getenv("MS_DIAGNOSTIC_DATA_PATH") and kwargs.get("output_path") is not None: logger.warning("Both parameter output_path and environment variable MS_DIAGNOSTIC_DATA_PATH" " have values set, and the profiling data saving path is the value set " "in parameter output_path") if kwargs.get("output_path") is None: if "output_path" in kwargs: kwargs.pop("output_path") # Environment variables are mainly set for the convenience of cloud profiler. output_path = os.getenv("MS_DIAGNOSTIC_DATA_PATH") if output_path: self._output_path = validate_and_normalize_path(output_path) else: output_path = "data" self._output_path = validate_and_normalize_path(output_path) else: output_path = kwargs.pop("output_path") self._output_path = validate_and_normalize_path(output_path) self._output_path = os.path.join(self._output_path, "profiler") if not os.path.exists(self._output_path): os.makedirs(self._output_path, exist_ok=True) os.chmod(self._output_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) else: logger.warning("The target dir already exists. " "There may be some old profiling data, and they will be rewritten in the end.")
Python
def profile(network, profile_option): """ Get the number of trainable parameters in the training network. Args: network (Cell): The training network. profile_option (ProfileOption): The profile option. Returns: dict, the key is the option name, the value is the result of option. """ result = dict() if not profile_option: raise ValueError("The parameter profile_option must pass a value using ProfileOption.") if profile_option == ProfileOption.trainable_parameters: if not isinstance(network, Cell): msg = "Profiling: The network should be an object of nn.Cell" raise ValueError(msg) param_nums = len(network.parameters_dict()) result = {"trainable_parameters": param_nums} else: raise ValueError("Wrong options.") return result
def profile(network, profile_option): """ Get the number of trainable parameters in the training network. Args: network (Cell): The training network. profile_option (ProfileOption): The profile option. Returns: dict, the key is the option name, the value is the result of option. """ result = dict() if not profile_option: raise ValueError("The parameter profile_option must pass a value using ProfileOption.") if profile_option == ProfileOption.trainable_parameters: if not isinstance(network, Cell): msg = "Profiling: The network should be an object of nn.Cell" raise ValueError(msg) param_nums = len(network.parameters_dict()) result = {"trainable_parameters": param_nums} else: raise ValueError("Wrong options.") return result
Python
def arnoldi_iteration(k, A, M, V, H): """ Performs a single (the k'th) step of the Arnoldi process.""" v_ = V[..., k] v = M(A(v_)) v, h = gram_schmidt(V, v) eps_v = _eps(v) _, v_norm_0 = _safe_normalize(v) tol = eps_v * v_norm_0 unit_v, v_norm_1 = _safe_normalize(v, tol) V[..., k + 1] = unit_v h[k + 1] = v_norm_1 H[k, :] = h breakdown = v_norm_1 == 0 return V, H, breakdown
def arnoldi_iteration(k, A, M, V, H): """ Performs a single (the k'th) step of the Arnoldi process.""" v_ = V[..., k] v = M(A(v_)) v, h = gram_schmidt(V, v) eps_v = _eps(v) _, v_norm_0 = _safe_normalize(v) tol = eps_v * v_norm_0 unit_v, v_norm_1 = _safe_normalize(v, tol) V[..., k + 1] = unit_v h[k + 1] = v_norm_1 H[k, :] = h breakdown = v_norm_1 == 0 return V, H, breakdown
Python
def gmres_iter(A_mat_func, b, x0, r, r_norm, ptol, restart, M_mat_func): """ Single iteration for Gmres Algorithm with restart """ V = mnp.pad(r[..., None], ((0, 0),) * r.ndim + ((0, restart),)) dtype = mnp.result_type(b) # use eye() to avoid constructing a singular matrix in case of early # termination R = mnp.eye(restart, restart + 1, dtype=dtype) givens = mnp.zeros((restart, 2), dtype=dtype) beta_vec = mnp.zeros((restart + 1), dtype=dtype) beta_vec[0] = r_norm k = 0 err = r_norm while mnp.logical_and(mnp.less(k, restart), mnp.less(ptol, err)): V, H, _ = arnoldi_iteration(k, A_mat_func, M_mat_func, V, R) R[k, :], givens = givens_rotation(H[k, :], givens, k) beta_vec = rotate_vectors(beta_vec, k, givens[k, 0], givens[k, 1]) err = mnp.absolute(beta_vec[k + 1]) k = k + 1 y = solve_triangular(R[:, :-1], beta_vec[:-1], trans='T', lower=True) dx = mnp.dot(V[:, :-1], y) x = x0 + dx r = M_mat_func(b - A_mat_func(x)) r, r_norm = _safe_normalize(r) return x, r, r_norm
def gmres_iter(A_mat_func, b, x0, r, r_norm, ptol, restart, M_mat_func): """ Single iteration for Gmres Algorithm with restart """ V = mnp.pad(r[..., None], ((0, 0),) * r.ndim + ((0, restart),)) dtype = mnp.result_type(b) # use eye() to avoid constructing a singular matrix in case of early # termination R = mnp.eye(restart, restart + 1, dtype=dtype) givens = mnp.zeros((restart, 2), dtype=dtype) beta_vec = mnp.zeros((restart + 1), dtype=dtype) beta_vec[0] = r_norm k = 0 err = r_norm while mnp.logical_and(mnp.less(k, restart), mnp.less(ptol, err)): V, H, _ = arnoldi_iteration(k, A_mat_func, M_mat_func, V, R) R[k, :], givens = givens_rotation(H[k, :], givens, k) beta_vec = rotate_vectors(beta_vec, k, givens[k, 0], givens[k, 1]) err = mnp.absolute(beta_vec[k + 1]) k = k + 1 y = solve_triangular(R[:, :-1], beta_vec[:-1], trans='T', lower=True) dx = mnp.dot(V[:, :-1], y) x = x0 + dx r = M_mat_func(b - A_mat_func(x)) r, r_norm = _safe_normalize(r) return x, r, r_norm
Python
def gmres(A, b, x0=None, *, tol=1e-5, atol=0.0, restart=20, maxiter=None, M=None, solve_method='batched') -> (Tensor, int): """ GMRES solves the linear system A x = b for x, given A and b. A is specified as a function performing A(vi) -> vf = A @ vi, and in principle need not have any particular special properties, such as symmetry. However, convergence is often slow for nearly symmetric operators. Args: A (Tensor or function): 2D Tensor or function that calculates the linear map (matrix-vector product) ``Ax`` when called like ``A(x)``. ``A`` must return Tensor with the same structure and shape as its argument. b (Tensor): Right hand side of the linear system representing a single vector. Can be stored as a Tensor Returns: x (Tensor): The converged solution. Has the same structure as ``b``. info (None): Placeholder for convergence information. In the future, MindSpore will report the number of iterations when convergence is not achieved, like SciPy. Other Parameters: x0 (Tensor, optional): Starting guess for the solution. Must have the same structure as ``b``. If this is unspecified, zeroes are used. tol, atol (float, optional): Tolerances for convergence, ``norm(residual) <= max(tol*norm(b), atol)``. We do not implement SciPy's "legacy" behavior, so MindSpore's tolerance will differ from SciPy unless you explicitly pass ``atol`` to SciPy's ``gmres``. restart (integer, optional): Size of the Krylov subspace ("number of iterations") built between restarts. GMRES works by approximating the true solution x as its projection into a Krylov space of this dimension - this parameter therefore bounds the maximum accuracy achievable from any guess solution. Larger values increase both number of iterations and iteration cost, but may be necessary for convergence. The algorithm terminates early if convergence is achieved before the full subspace is built. Default is 20. maxiter (integer): Maximum number of times to rebuild the size-``restart`` Krylov space starting from the solution found at the last iteration. If GMRES halts or is very slow, decreasing this parameter may help. Default is infinite. M (Tensor or function): Preconditioner for A. The preconditioner should approximate the inverse of A. Effective preconditioning dramatically improves the rate of convergence, which implies that fewer iterations are needed to reach a given error tolerance. solve_method ('incremental' or 'batched'): The 'incremental' solve method builds a QR decomposition for the Krylov subspace incrementally during the GMRES process using Givens rotations. This improves numerical stability and gives a free estimate of the residual norm that allows for early termination within a single "restart". In contrast, the 'batched' solve method solves the least squares problem from scratch at the end of each GMRES iteration. It does not allow for early termination, but has much less overhead on GPUs. Supported Platforms: ``CPU`` ``GPU`` Examples: >>> import numpy as onp >>> from mindspore.common import Tensor >>> from mindspore.numpy as mnp >>> from mindspore.scipy.sparse.linalg import gmres >>> A = Tensor(mnp.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]], dtype=mnp.float32)) >>> b = Tensor(onp.array([2, 4, -1], dtype=mnp.float32)) >>> x, exitCode = gmres(A, b) >>> print(exitCode) # 0 indicates successful convergence 0 >>> onp.allclose(mnp.dot(A,x).asnumpy(), b.asnumpy()) True """ if x0 is None: x0 = mnp.zeros_like(b) size = b.size if maxiter is None: maxiter = 10 * size # copied from scipy if restart > size: restart = size x = x0 if solve_method == 'incremental': if M is None: def M_mat_func(x): return x elif not callable(M): def M_mat_func(x): return mnp.dot(M, x) else: M_mat_func = M if not callable(A): def A_mat_func(x): return mnp.dot(A, x) else: A_mat_func = A _, b_norm = _safe_normalize(b) atol = mnp.maximum(tol * b_norm, atol) Mb = M_mat_func(b) _, Mb_norm = _safe_normalize(Mb) ptol = Mb_norm * mnp.minimum(1.0, atol / b_norm) # iterative gmres r = M_mat_func(b - A_mat_func(x0)) r, r_norm = _safe_normalize(r) k = 0 while k < maxiter and r_norm > atol: x, r, r_norm = gmres_iter( A_mat_func, b, x, r, r_norm, ptol, restart, M_mat_func) k += 1 elif solve_method == 'batched': if M is None: def identity(x): return x M = identity x = BatchedGmres(A, M)(b, x, tol, atol, restart, maxiter) else: raise ValueError("solve_method should be in ('incremental' or 'batched'), but got {}." .format(solve_method)) _, x_norm = _safe_normalize(x) info = mnp.where(mnp.isnan(x_norm), _INT_NEG_ONE, _INT_ZERO) return x, info
def gmres(A, b, x0=None, *, tol=1e-5, atol=0.0, restart=20, maxiter=None, M=None, solve_method='batched') -> (Tensor, int): """ GMRES solves the linear system A x = b for x, given A and b. A is specified as a function performing A(vi) -> vf = A @ vi, and in principle need not have any particular special properties, such as symmetry. However, convergence is often slow for nearly symmetric operators. Args: A (Tensor or function): 2D Tensor or function that calculates the linear map (matrix-vector product) ``Ax`` when called like ``A(x)``. ``A`` must return Tensor with the same structure and shape as its argument. b (Tensor): Right hand side of the linear system representing a single vector. Can be stored as a Tensor Returns: x (Tensor): The converged solution. Has the same structure as ``b``. info (None): Placeholder for convergence information. In the future, MindSpore will report the number of iterations when convergence is not achieved, like SciPy. Other Parameters: x0 (Tensor, optional): Starting guess for the solution. Must have the same structure as ``b``. If this is unspecified, zeroes are used. tol, atol (float, optional): Tolerances for convergence, ``norm(residual) <= max(tol*norm(b), atol)``. We do not implement SciPy's "legacy" behavior, so MindSpore's tolerance will differ from SciPy unless you explicitly pass ``atol`` to SciPy's ``gmres``. restart (integer, optional): Size of the Krylov subspace ("number of iterations") built between restarts. GMRES works by approximating the true solution x as its projection into a Krylov space of this dimension - this parameter therefore bounds the maximum accuracy achievable from any guess solution. Larger values increase both number of iterations and iteration cost, but may be necessary for convergence. The algorithm terminates early if convergence is achieved before the full subspace is built. Default is 20. maxiter (integer): Maximum number of times to rebuild the size-``restart`` Krylov space starting from the solution found at the last iteration. If GMRES halts or is very slow, decreasing this parameter may help. Default is infinite. M (Tensor or function): Preconditioner for A. The preconditioner should approximate the inverse of A. Effective preconditioning dramatically improves the rate of convergence, which implies that fewer iterations are needed to reach a given error tolerance. solve_method ('incremental' or 'batched'): The 'incremental' solve method builds a QR decomposition for the Krylov subspace incrementally during the GMRES process using Givens rotations. This improves numerical stability and gives a free estimate of the residual norm that allows for early termination within a single "restart". In contrast, the 'batched' solve method solves the least squares problem from scratch at the end of each GMRES iteration. It does not allow for early termination, but has much less overhead on GPUs. Supported Platforms: ``CPU`` ``GPU`` Examples: >>> import numpy as onp >>> from mindspore.common import Tensor >>> from mindspore.numpy as mnp >>> from mindspore.scipy.sparse.linalg import gmres >>> A = Tensor(mnp.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]], dtype=mnp.float32)) >>> b = Tensor(onp.array([2, 4, -1], dtype=mnp.float32)) >>> x, exitCode = gmres(A, b) >>> print(exitCode) # 0 indicates successful convergence 0 >>> onp.allclose(mnp.dot(A,x).asnumpy(), b.asnumpy()) True """ if x0 is None: x0 = mnp.zeros_like(b) size = b.size if maxiter is None: maxiter = 10 * size # copied from scipy if restart > size: restart = size x = x0 if solve_method == 'incremental': if M is None: def M_mat_func(x): return x elif not callable(M): def M_mat_func(x): return mnp.dot(M, x) else: M_mat_func = M if not callable(A): def A_mat_func(x): return mnp.dot(A, x) else: A_mat_func = A _, b_norm = _safe_normalize(b) atol = mnp.maximum(tol * b_norm, atol) Mb = M_mat_func(b) _, Mb_norm = _safe_normalize(Mb) ptol = Mb_norm * mnp.minimum(1.0, atol / b_norm) # iterative gmres r = M_mat_func(b - A_mat_func(x0)) r, r_norm = _safe_normalize(r) k = 0 while k < maxiter and r_norm > atol: x, r, r_norm = gmres_iter( A_mat_func, b, x, r, r_norm, ptol, restart, M_mat_func) k += 1 elif solve_method == 'batched': if M is None: def identity(x): return x M = identity x = BatchedGmres(A, M)(b, x, tol, atol, restart, maxiter) else: raise ValueError("solve_method should be in ('incremental' or 'batched'), but got {}." .format(solve_method)) _, x_norm = _safe_normalize(x) info = mnp.where(mnp.isnan(x_norm), _INT_NEG_ONE, _INT_ZERO) return x, info
Python
def createObjects(self): """ You should override this and initialize all of your wpilib objects here (and not in your components, for example). This serves two purposes: - It puts all of your motor/sensor initialization in the same place, so that if you need to change a port/pin number it makes it really easy to find it. Additionally, if you want to create a simplified robot program to test a specific thing, it makes it really easy to copy/paste it elsewhere - It allows you to use the magic injection mechanism to share variables between components .. note:: Do not access your magic components in this function, as their instances have not been created yet. Do not create them either. """ raise NotImplementedError
def createObjects(self): """ You should override this and initialize all of your wpilib objects here (and not in your components, for example). This serves two purposes: - It puts all of your motor/sensor initialization in the same place, so that if you need to change a port/pin number it makes it really easy to find it. Additionally, if you want to create a simplified robot program to test a specific thing, it makes it really easy to copy/paste it elsewhere - It allows you to use the magic injection mechanism to share variables between components .. note:: Do not access your magic components in this function, as their instances have not been created yet. Do not create them either. """ raise NotImplementedError
Python
def teleopPeriodic(self): """ Periodic code for teleop mode should go here. Users should override this method for code which will be called periodically at a regular rate while the robot is in teleop mode. This code executes before the ``execute`` functions of all components are called. .. note:: If you want this function to be called in autonomous mode, set ``use_teleop_in_autonomous`` to True in your robot class. """ func = self.teleopPeriodic.__func__ if not hasattr(func, "firstRun"): self.logger.warning( "Default MagicRobot.teleopPeriodic() method... Override me!" ) func.firstRun = False
def teleopPeriodic(self): """ Periodic code for teleop mode should go here. Users should override this method for code which will be called periodically at a regular rate while the robot is in teleop mode. This code executes before the ``execute`` functions of all components are called. .. note:: If you want this function to be called in autonomous mode, set ``use_teleop_in_autonomous`` to True in your robot class. """ func = self.teleopPeriodic.__func__ if not hasattr(func, "firstRun"): self.logger.warning( "Default MagicRobot.teleopPeriodic() method... Override me!" ) func.firstRun = False
Python
def disabledInit(self): """ Initialization code for disabled mode may go here. Users may override this method for initialization code which will be called each time the robot enters disabled mode. .. note:: The ``on_disable`` functions of all components are called before this function is called. """ pass
def disabledInit(self): """ Initialization code for disabled mode may go here. Users may override this method for initialization code which will be called each time the robot enters disabled mode. .. note:: The ``on_disable`` functions of all components are called before this function is called. """ pass
Python
def disabledPeriodic(self): """ Periodic code for disabled mode should go here. Users should override this method for code which will be called periodically at a regular rate while the robot is in disabled mode. This code executes before the ``execute`` functions of all components are called. """ func = self.disabledPeriodic.__func__ if not hasattr(func, "firstRun"): self.logger.warning( "Default MagicRobot.disabledPeriodic() method... Override me!" ) func.firstRun = False
def disabledPeriodic(self): """ Periodic code for disabled mode should go here. Users should override this method for code which will be called periodically at a regular rate while the robot is in disabled mode. This code executes before the ``execute`` functions of all components are called. """ func = self.disabledPeriodic.__func__ if not hasattr(func, "firstRun"): self.logger.warning( "Default MagicRobot.disabledPeriodic() method... Override me!" ) func.firstRun = False
Python
def testInit(self): """Initialization code for test mode should go here. Users should override this method for initialization code which will be called each time the robot enters disabled mode. """ pass
def testInit(self): """Initialization code for test mode should go here. Users should override this method for initialization code which will be called each time the robot enters disabled mode. """ pass
Python
def robotPeriodic(self): """ Periodic code for all modes should go here. Users must override this method to utilize it but it is not required. This function gets called last in each mode. You may use it for any code you need to run during all modes of the robot (e.g NetworkTables updates) The default implementation will update SmartDashboard, LiveWindow and Shuffleboard. """ watchdog = self.watchdog self.__sd_update() watchdog.addEpoch("SmartDashboard") self.__lv_update() watchdog.addEpoch("LiveWindow") self.__sf_update() watchdog.addEpoch("Shuffleboard")
def robotPeriodic(self): """ Periodic code for all modes should go here. Users must override this method to utilize it but it is not required. This function gets called last in each mode. You may use it for any code you need to run during all modes of the robot (e.g NetworkTables updates) The default implementation will update SmartDashboard, LiveWindow and Shuffleboard. """ watchdog = self.watchdog self.__sd_update() watchdog.addEpoch("SmartDashboard") self.__lv_update() watchdog.addEpoch("LiveWindow") self.__sf_update() watchdog.addEpoch("Shuffleboard")
Python
def onException(self, forceReport=False): """ This function must *only* be called when an unexpected exception has occurred that would otherwise crash the robot code. Use this inside your :meth:`operatorActions` function. If the FMS is attached (eg, during a real competition match), this function will return without raising an error. However, it will try to report one-off errors to the Driver Station so that it will be recorded in the Driver Station Log Viewer. Repeated errors may not get logged. Example usage:: def teleopPeriodic(self): try: if self.joystick.getTrigger(): self.shooter.shoot() except: self.onException() try: if self.joystick.getRawButton(2): self.ball_intake.run() except: self.onException() # and so on... :param forceReport: Always report the exception to the DS. Don't set this to True """ # If the FMS is not attached, crash the robot program if not self.ds.isFMSAttached(): raise # Otherwise, if the FMS is attached then try to report the error via # the driver station console. Maybe. now = wpilib.Timer.getFPGATimestamp() try: if ( forceReport or (now - self.__last_error_report) > self.error_report_interval ): wpilib.DriverStation.reportError("Unexpected exception", True) except: pass # ok, can't do anything here self.__last_error_report = now
def onException(self, forceReport=False): """ This function must *only* be called when an unexpected exception has occurred that would otherwise crash the robot code. Use this inside your :meth:`operatorActions` function. If the FMS is attached (eg, during a real competition match), this function will return without raising an error. However, it will try to report one-off errors to the Driver Station so that it will be recorded in the Driver Station Log Viewer. Repeated errors may not get logged. Example usage:: def teleopPeriodic(self): try: if self.joystick.getTrigger(): self.shooter.shoot() except: self.onException() try: if self.joystick.getRawButton(2): self.ball_intake.run() except: self.onException() # and so on... :param forceReport: Always report the exception to the DS. Don't set this to True """ # If the FMS is not attached, crash the robot program if not self.ds.isFMSAttached(): raise # Otherwise, if the FMS is attached then try to report the error via # the driver station console. Maybe. now = wpilib.Timer.getFPGATimestamp() try: if ( forceReport or (now - self.__last_error_report) > self.error_report_interval ): wpilib.DriverStation.reportError("Unexpected exception", True) except: pass # ok, can't do anything here self.__last_error_report = now
Python
def consumeExceptions(self, forceReport=False): """ This returns a context manager which will consume any uncaught exceptions that might otherwise crash the robot. Example usage:: def teleopPeriodic(self): with self.consumeExceptions(): if self.joystick.getTrigger(): self.shooter.shoot() with self.consumeExceptions(): if self.joystick.getRawButton(2): self.ball_intake.run() # and so on... :param forceReport: Always report the exception to the DS. Don't set this to True .. seealso:: :meth:`onException` for more details """ try: yield except: self.onException(forceReport=forceReport)
def consumeExceptions(self, forceReport=False): """ This returns a context manager which will consume any uncaught exceptions that might otherwise crash the robot. Example usage:: def teleopPeriodic(self): with self.consumeExceptions(): if self.joystick.getTrigger(): self.shooter.shoot() with self.consumeExceptions(): if self.joystick.getRawButton(2): self.ball_intake.run() # and so on... :param forceReport: Always report the exception to the DS. Don't set this to True .. seealso:: :meth:`onException` for more details """ try: yield except: self.onException(forceReport=forceReport)
Python
def autonomous(self): """ MagicRobot will do The Right Thing and automatically load all autonomous mode routines defined in the autonomous folder. .. warning:: Internal API, don't override """ self.__nt_put_mode("auto") self.__nt_put_is_ds_attached(self.ds.isDSAttached()) self._on_mode_enable_components() auto_functions = ( self._execute_components, self._update_feedback, self.robotPeriodic, ) if self.use_teleop_in_autonomous: auto_functions = (self.teleopPeriodic,) + auto_functions self._automodes.run( self.control_loop_wait_time, auto_functions, self.onException, watchdog=self.watchdog, ) self._on_mode_disable_components()
def autonomous(self): """ MagicRobot will do The Right Thing and automatically load all autonomous mode routines defined in the autonomous folder. .. warning:: Internal API, don't override """ self.__nt_put_mode("auto") self.__nt_put_is_ds_attached(self.ds.isDSAttached()) self._on_mode_enable_components() auto_functions = ( self._execute_components, self._update_feedback, self.robotPeriodic, ) if self.use_teleop_in_autonomous: auto_functions = (self.teleopPeriodic,) + auto_functions self._automodes.run( self.control_loop_wait_time, auto_functions, self.onException, watchdog=self.watchdog, ) self._on_mode_disable_components()
Python
def disabled(self): """ This function is called in disabled mode. You should not override this function; rather, you should override the :meth:`disabledPeriodic` function instead. .. warning:: Internal API, don't override """ watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("disabled") ds_attached = None self._on_mode_disable_components() try: self.disabledInit() except: self.onException(forceReport=True) watchdog.addEpoch("disabledInit()") with NotifierDelay(self.control_loop_wait_time) as delay: while self.isDisabled(): if ds_attached != self.ds.isDSAttached(): ds_attached = not ds_attached self.__nt_put_is_ds_attached(ds_attached) hal.observeUserProgramDisabled() try: self.disabledPeriodic() except: self.onException() watchdog.addEpoch("disabledPeriodic()") self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset()
def disabled(self): """ This function is called in disabled mode. You should not override this function; rather, you should override the :meth:`disabledPeriodic` function instead. .. warning:: Internal API, don't override """ watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("disabled") ds_attached = None self._on_mode_disable_components() try: self.disabledInit() except: self.onException(forceReport=True) watchdog.addEpoch("disabledInit()") with NotifierDelay(self.control_loop_wait_time) as delay: while self.isDisabled(): if ds_attached != self.ds.isDSAttached(): ds_attached = not ds_attached self.__nt_put_is_ds_attached(ds_attached) hal.observeUserProgramDisabled() try: self.disabledPeriodic() except: self.onException() watchdog.addEpoch("disabledPeriodic()") self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset()
Python
def operatorControl(self): """ This function is called in teleoperated mode. You should not override this function; rather, you should override the :meth:`teleopPeriodics` function instead. .. warning:: Internal API, don't override """ watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("teleop") # don't need to update this during teleop -- presumably will switch # modes when ds is no longer attached self.__nt_put_is_ds_attached(self.ds.isDSAttached()) # initialize things self._on_mode_enable_components() try: self.teleopInit() except: self.onException(forceReport=True) watchdog.addEpoch("teleopInit()") observe = hal.observeUserProgramTeleop with NotifierDelay(self.control_loop_wait_time) as delay: while self.isOperatorControlEnabled(): observe() try: self.teleopPeriodic() except: self.onException() watchdog.addEpoch("teleopPeriodic()") self._execute_components() self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset() self._on_mode_disable_components()
def operatorControl(self): """ This function is called in teleoperated mode. You should not override this function; rather, you should override the :meth:`teleopPeriodics` function instead. .. warning:: Internal API, don't override """ watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("teleop") # don't need to update this during teleop -- presumably will switch # modes when ds is no longer attached self.__nt_put_is_ds_attached(self.ds.isDSAttached()) # initialize things self._on_mode_enable_components() try: self.teleopInit() except: self.onException(forceReport=True) watchdog.addEpoch("teleopInit()") observe = hal.observeUserProgramTeleop with NotifierDelay(self.control_loop_wait_time) as delay: while self.isOperatorControlEnabled(): observe() try: self.teleopPeriodic() except: self.onException() watchdog.addEpoch("teleopPeriodic()") self._execute_components() self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset() self._on_mode_disable_components()
Python
def test(self): """Called when the robot is in test mode""" watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("test") self.__nt_put_is_ds_attached(self.ds.isDSAttached()) try: self.testInit() except: self.onException(forceReport=True) watchdog.addEpoch("testInit()") with NotifierDelay(self.control_loop_wait_time) as delay: while self.isTest() and self.isEnabled(): hal.observeUserProgramTest() try: self.testPeriodic() except: self.onException() watchdog.addEpoch("testPeriodic()") self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset()
def test(self): """Called when the robot is in test mode""" watchdog = self.watchdog watchdog.reset() self.__nt_put_mode("test") self.__nt_put_is_ds_attached(self.ds.isDSAttached()) try: self.testInit() except: self.onException(forceReport=True) watchdog.addEpoch("testInit()") with NotifierDelay(self.control_loop_wait_time) as delay: while self.isTest() and self.isEnabled(): hal.observeUserProgramTest() try: self.testPeriodic() except: self.onException() watchdog.addEpoch("testPeriodic()") self._update_feedback() self.robotPeriodic() watchdog.addEpoch("robotPeriodic()") # watchdog.disable() watchdog.printIfExpired() delay.wait() watchdog.reset()
Python
def plot_model_history(model_history): """ Plot Accuracy and Loss curves given the model_history """ fig, axs = plt.subplots(1, 2, figsize=(15, 5)) # summarize history for accuracy axs[0].plot(range(1, len(model_history.history['accuracy'])+1), model_history.history['accuracy']) axs[0].plot(range(1, len(model_history.history['val_accuracy'])+1), model_history.history['val_accuracy']) axs[0].set_title('Model Accuracy') axs[0].set_ylabel('Accuracy') axs[0].set_xlabel('Epoch') axs[0].set_xticks(np.arange(1, len(model_history.history['accuracy'])+1), len(model_history.history['accuracy'])/10) axs[0].legend(['train', 'val'], loc='best') # summarize history for loss axs[1].plot(range(1, len(model_history.history['loss'])+1), model_history.history['loss']) axs[1].plot(range(1, len(model_history.history['val_loss'])+1), model_history.history['val_loss']) axs[1].set_title('Model Loss') axs[1].set_ylabel('Loss') axs[1].set_xlabel('Epoch') axs[1].set_xticks(np.arange( 1, len(model_history.history['loss'])+1), len(model_history.history['loss'])/10) axs[1].legend(['train', 'val'], loc='best') fig.savefig('plot.png') plt.show()
def plot_model_history(model_history): """ Plot Accuracy and Loss curves given the model_history """ fig, axs = plt.subplots(1, 2, figsize=(15, 5)) # summarize history for accuracy axs[0].plot(range(1, len(model_history.history['accuracy'])+1), model_history.history['accuracy']) axs[0].plot(range(1, len(model_history.history['val_accuracy'])+1), model_history.history['val_accuracy']) axs[0].set_title('Model Accuracy') axs[0].set_ylabel('Accuracy') axs[0].set_xlabel('Epoch') axs[0].set_xticks(np.arange(1, len(model_history.history['accuracy'])+1), len(model_history.history['accuracy'])/10) axs[0].legend(['train', 'val'], loc='best') # summarize history for loss axs[1].plot(range(1, len(model_history.history['loss'])+1), model_history.history['loss']) axs[1].plot(range(1, len(model_history.history['val_loss'])+1), model_history.history['val_loss']) axs[1].set_title('Model Loss') axs[1].set_ylabel('Loss') axs[1].set_xlabel('Epoch') axs[1].set_xticks(np.arange( 1, len(model_history.history['loss'])+1), len(model_history.history['loss'])/10) axs[1].legend(['train', 'val'], loc='best') fig.savefig('plot.png') plt.show()
Python
def timed_message(bot): """ Spit out one of the pre-configured messages every [LRB]timers_delay minutes. """ if not 'index' in bot.memory['timer']: # Because for some reason in setup() it doesn't trigger all the time? bot.memory['timer']['index'] = 0 if not bot.memory['timer']['enabled']: # Timed messages are disabled. return NOLIMIT if not 'delay' in bot.memory['timer']: # Because for some reason in setup() it doesn't trigger all the time? bot.memory['timer']['delay'] = 999 # Determine the delay. bot.memory['timer']['delay'] = bot.memory['timer']['delay'] + 1 if bot.memory['timer']['delay'] < int(bot.config.LRB.timers_delay): # Not time yet. return NOLIMIT else: # It is time. bot.memory['timer']['delay'] = 0 # Fetch line. ret = bot.db.execute('SELECT message FROM lrb_timers WHERE id=?', str(bot.memory['timer']['index'])) msg = ret.fetchone()[0] # Move index up one, or loop. bot.memory['timer']['index'] += 1 count = bot.db.execute('SELECT COUNT(*) FROM lrb_timers').fetchone()[0] if bot.memory['timer']['index'] >= int(count): bot.memory['timer']['index'] = 0 # Say! bot.msg(bot.config.LRB.channel, msg)
def timed_message(bot): """ Spit out one of the pre-configured messages every [LRB]timers_delay minutes. """ if not 'index' in bot.memory['timer']: # Because for some reason in setup() it doesn't trigger all the time? bot.memory['timer']['index'] = 0 if not bot.memory['timer']['enabled']: # Timed messages are disabled. return NOLIMIT if not 'delay' in bot.memory['timer']: # Because for some reason in setup() it doesn't trigger all the time? bot.memory['timer']['delay'] = 999 # Determine the delay. bot.memory['timer']['delay'] = bot.memory['timer']['delay'] + 1 if bot.memory['timer']['delay'] < int(bot.config.LRB.timers_delay): # Not time yet. return NOLIMIT else: # It is time. bot.memory['timer']['delay'] = 0 # Fetch line. ret = bot.db.execute('SELECT message FROM lrb_timers WHERE id=?', str(bot.memory['timer']['index'])) msg = ret.fetchone()[0] # Move index up one, or loop. bot.memory['timer']['index'] += 1 count = bot.db.execute('SELECT COUNT(*) FROM lrb_timers').fetchone()[0] if bot.memory['timer']['index'] >= int(count): bot.memory['timer']['index'] = 0 # Say! bot.msg(bot.config.LRB.channel, msg)
Python
def caps_detection(bot, trigger): """ Automatically detect allcaps and act on it. """ try: if trigger.admin: # This person is a moderator. return NOLIMIT except KeyError as e: # This potentially lets new-joiners shout for a little bit. # #blametwitch return NOLIMIT if len(trigger.group(0)) < 10: # This message was very short, could be something like "OMG", # let's not make a fuss about it. return NOLIMIT if isReg(bot, trigger.sender, trigger.nick): #This person is a regular. return NOLIMIT counter = Counter(trigger.group(0)) caps = 0 lowercase = 0 for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ": caps += counter[c] for c in "abcdefghijklmnopqrstuvwxyz": lowercase += counter[c] if caps >= lowercase: bot.say('.ban '+trigger.nick) bot.reply('Oi! I\'m the only one allowed to ROAAAAAAAAAAAAR around here! (Caps purge)') bot.say('.unban '+trigger.nick) else: return NOLIMIT
def caps_detection(bot, trigger): """ Automatically detect allcaps and act on it. """ try: if trigger.admin: # This person is a moderator. return NOLIMIT except KeyError as e: # This potentially lets new-joiners shout for a little bit. # #blametwitch return NOLIMIT if len(trigger.group(0)) < 10: # This message was very short, could be something like "OMG", # let's not make a fuss about it. return NOLIMIT if isReg(bot, trigger.sender, trigger.nick): #This person is a regular. return NOLIMIT counter = Counter(trigger.group(0)) caps = 0 lowercase = 0 for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ": caps += counter[c] for c in "abcdefghijklmnopqrstuvwxyz": lowercase += counter[c] if caps >= lowercase: bot.say('.ban '+trigger.nick) bot.reply('Oi! I\'m the only one allowed to ROAAAAAAAAAAAAR around here! (Caps purge)') bot.say('.unban '+trigger.nick) else: return NOLIMIT
Python
def shoutout(bot, trigger): """ Share a little bit of twitch love. """ if not trigger.admin: return bot.reply('I only let mods do shoutouts.') if trigger.group(2): try: query_url = 'https://api.twitch.tv/kraken/channels/{0}?api_version=4&client_id={1}' answer = web.get(query_url.format(trigger.group(2), bot.config.LRB.api_key)) except: return bot.reply("Couldn't contact the Twitch API servers. :( #BlameTwitch") try: data = json.loads(answer) except: return bot.reply("The Twitch API returned an invalid object. :( #BlameTwitch") try: replaceData = {'name': data['display_name'], 'link': data['url'], 'game': data['game']} return bot.say(bot.config.LRB.shoutmsg % replaceData) except: return bot.reply("The Twitch API be derp. :( #BlameTwitch") else: return NOLIMIT
def shoutout(bot, trigger): """ Share a little bit of twitch love. """ if not trigger.admin: return bot.reply('I only let mods do shoutouts.') if trigger.group(2): try: query_url = 'https://api.twitch.tv/kraken/channels/{0}?api_version=4&client_id={1}' answer = web.get(query_url.format(trigger.group(2), bot.config.LRB.api_key)) except: return bot.reply("Couldn't contact the Twitch API servers. :( #BlameTwitch") try: data = json.loads(answer) except: return bot.reply("The Twitch API returned an invalid object. :( #BlameTwitch") try: replaceData = {'name': data['display_name'], 'link': data['url'], 'game': data['game']} return bot.say(bot.config.LRB.shoutmsg % replaceData) except: return bot.reply("The Twitch API be derp. :( #BlameTwitch") else: return NOLIMIT
Python
def link_detection(bot, trigger): """ Automatically detect links and act on it. """ try: if trigger.admin: #Channel ops can link just fine. return NOLIMIT except KeyError as e: pass if trigger.nick in bot.memory['permitted_users']: bot.memory['permitted_users'].pop(trigger.nick, None) return NOLIMIT elif isReg(bot, trigger.sender, trigger.nick): return NOLIMIT # Regulars can link just fine. else: bot.say('.ban '+trigger.nick) bot.reply('Sharing knowledge is cool and all, but ask the mods before sending links, ok?') bot.say('.unban '+trigger.nick)
def link_detection(bot, trigger): """ Automatically detect links and act on it. """ try: if trigger.admin: #Channel ops can link just fine. return NOLIMIT except KeyError as e: pass if trigger.nick in bot.memory['permitted_users']: bot.memory['permitted_users'].pop(trigger.nick, None) return NOLIMIT elif isReg(bot, trigger.sender, trigger.nick): return NOLIMIT # Regulars can link just fine. else: bot.say('.ban '+trigger.nick) bot.reply('Sharing knowledge is cool and all, but ask the mods before sending links, ok?') bot.say('.unban '+trigger.nick)
Python
def tflite_inference(interpreter, indexes, img_arr): ''' Receive an image array and run inference :param interpreter: tflite interpreter. :param indexes: tflite tensor indexes. :param img_arr: 3D numpy array, RGB order. :return: ''' input_data = np.array(img_arr, dtype=np.float32) interpreter.set_tensor(indexes[0], input_data) interpreter.invoke() bboxes = interpreter.get_tensor(indexes[1][0]) scores = interpreter.get_tensor(indexes[1][1]) return bboxes, scores
def tflite_inference(interpreter, indexes, img_arr): ''' Receive an image array and run inference :param interpreter: tflite interpreter. :param indexes: tflite tensor indexes. :param img_arr: 3D numpy array, RGB order. :return: ''' input_data = np.array(img_arr, dtype=np.float32) interpreter.set_tensor(indexes[0], input_data) interpreter.invoke() bboxes = interpreter.get_tensor(indexes[1][0]) scores = interpreter.get_tensor(indexes[1][1]) return bboxes, scores
Python
def goals( self, timeout: Optional[int] = None, ) -> Tuple[bool, str, Optional[Goals], str]: """Get the current set of hypotheses and goals.""" assert self.xml is not None self.logger.debug("goals") response, err = self.call(self.xml.goal(), timeout=timeout) return ( isinstance(response, Ok), response.msg, response.val if isinstance(response, Ok) else None, err, )
def goals( self, timeout: Optional[int] = None, ) -> Tuple[bool, str, Optional[Goals], str]: """Get the current set of hypotheses and goals.""" assert self.xml is not None self.logger.debug("goals") response, err = self.call(self.xml.goal(), timeout=timeout) return ( isinstance(response, Ok), response.msg, response.val if isinstance(response, Ok) else None, err, )
Python
def do_option( self, cmd: str, in_script: bool, encoding: str = "utf-8", timeout: Optional[int] = None, ) -> Tuple[bool, str, Optional[Tuple[int, int]], str]: """Set or get an option.""" assert self.xml is not None self.logger.debug("do_option: %s", cmd) vals, opt = self.xml.parse_option(cmd) if vals is None: response, err = self.call( self.xml.get_options(encoding=encoding), timeout=timeout, ) if isinstance(response, Ok): optval = [ (val, desc) for name, desc, val in response.val if name == opt ] if optval != []: ret = f"{optval[0][1]}: {optval[0][0]}" else: ret = "Invalid option name" else: errs = [] for val in vals: response, err = self.call( self.xml.set_options(opt, val, encoding=encoding), timeout=timeout, ) ret = response.msg errs.append(err) if isinstance(response, Ok): break err = "".join(errs) if isinstance(response, Ok) and in_script: # Hack to associate setting an option with a new state id by # executing a noop so it works correctly with rewinding if in_script: success, _, _, _ = self.advance(self.xml.noop, encoding) assert success return ( isinstance(response, Ok), ret if isinstance(response, Ok) else response.msg, None if isinstance(response, Ok) else response.loc, err, )
def do_option( self, cmd: str, in_script: bool, encoding: str = "utf-8", timeout: Optional[int] = None, ) -> Tuple[bool, str, Optional[Tuple[int, int]], str]: """Set or get an option.""" assert self.xml is not None self.logger.debug("do_option: %s", cmd) vals, opt = self.xml.parse_option(cmd) if vals is None: response, err = self.call( self.xml.get_options(encoding=encoding), timeout=timeout, ) if isinstance(response, Ok): optval = [ (val, desc) for name, desc, val in response.val if name == opt ] if optval != []: ret = f"{optval[0][1]}: {optval[0][0]}" else: ret = "Invalid option name" else: errs = [] for val in vals: response, err = self.call( self.xml.set_options(opt, val, encoding=encoding), timeout=timeout, ) ret = response.msg errs.append(err) if isinstance(response, Ok): break err = "".join(errs) if isinstance(response, Ok) and in_script: # Hack to associate setting an option with a new state id by # executing a noop so it works correctly with rewinding if in_script: success, _, _, _ = self.advance(self.xml.noop, encoding) assert success return ( isinstance(response, Ok), ret if isinstance(response, Ok) else response.msg, None if isinstance(response, Ok) else response.loc, err, )
Python
def call( self, cmdtype_msg: Tuple[str, Optional[bytes]], timeout: Optional[int] = None, ) -> Tuple[Result, str]: """Send 'msg' to the Coqtop process and wait for the response.""" assert self.xml is not None # Check if Coqtop has stopped if not self.running(): raise CoqtopError("Coqtop is not running.") # Throw away any unread messages self.empty_out() # 'msg' can be None if a command does not exist for a particular # version and is being faked. # NOTE: It is important that the '_standardize' function being called # does not depend on the value it is passed since it is None cmd, msg = cmdtype_msg if msg is None: return self.xml.standardize(cmd, Ok(None)), self.collect_err() # Don't bother doing prettyxml if debugging isn't on if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(prettyxml(msg)) self.send_cmd(msg) with futures.ThreadPoolExecutor(1) as pool: try: timeout = timeout if timeout != 0 else None response, err = pool.submit(self.get_answer).result(timeout) except futures.TimeoutError: self.interrupt() response, err = TIMEOUT_ERR, "" return self.xml.standardize(cmd, response), err
def call( self, cmdtype_msg: Tuple[str, Optional[bytes]], timeout: Optional[int] = None, ) -> Tuple[Result, str]: """Send 'msg' to the Coqtop process and wait for the response.""" assert self.xml is not None # Check if Coqtop has stopped if not self.running(): raise CoqtopError("Coqtop is not running.") # Throw away any unread messages self.empty_out() # 'msg' can be None if a command does not exist for a particular # version and is being faked. # NOTE: It is important that the '_standardize' function being called # does not depend on the value it is passed since it is None cmd, msg = cmdtype_msg if msg is None: return self.xml.standardize(cmd, Ok(None)), self.collect_err() # Don't bother doing prettyxml if debugging isn't on if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(prettyxml(msg)) self.send_cmd(msg) with futures.ThreadPoolExecutor(1) as pool: try: timeout = timeout if timeout != 0 else None response, err = pool.submit(self.get_answer).result(timeout) except futures.TimeoutError: self.interrupt() response, err = TIMEOUT_ERR, "" return self.xml.standardize(cmd, response), err
Python
def drain_queue(q: BytesQueue) -> Iterator[bytes]: """Yield data from 'q' until it is empty.""" while not q.empty(): try: yield q.get_nowait() except Empty: return
def drain_queue(q: BytesQueue) -> Iterator[bytes]: """Yield data from 'q' until it is empty.""" while not q.empty(): try: yield q.get_nowait() except Empty: return
Python
def capture_dead(self) -> None: """Continually check if Coqtop has died.""" while self.running(): time.sleep(1) self.stop()
def capture_dead(self) -> None: """Continually check if Coqtop has died.""" while self.running(): time.sleep(1) self.stop()
Python
def send_cmd(self, cmd: bytes) -> None: """Write to Coqtop's stdin.""" if self.coqtop is None: raise CoqtopError("coqtop must not be None in send_cmd()") if self.coqtop.stdin is None: raise CoqtopError("coqtop stdin must not be None in send_cmd()") self.coqtop.stdin.write(cmd) self.coqtop.stdin.flush()
def send_cmd(self, cmd: bytes) -> None: """Write to Coqtop's stdin.""" if self.coqtop is None: raise CoqtopError("coqtop must not be None in send_cmd()") if self.coqtop.stdin is None: raise CoqtopError("coqtop stdin must not be None in send_cmd()") self.coqtop.stdin.write(cmd) self.coqtop.stdin.flush()
Python
def interrupt(self) -> None: """Send a SIGINT signal to Coqtop.""" if self.coqtop is None: raise CoqtopError("Coqtop is not running.") self.coqtop.send_signal(signal.SIGINT)
def interrupt(self) -> None: """Send a SIGINT signal to Coqtop.""" if self.coqtop is None: raise CoqtopError("Coqtop is not running.") self.coqtop.send_signal(signal.SIGINT)
Python
def toggle_debug(self) -> Optional[str]: """Enable or disable logging of debug messages.""" self.logger.removeHandler(self.handler) self.handler.flush() self.handler.close() if self.log is None: # Create unique log file fmt = logging.Formatter("%(asctime)s: %(message)s") self.log = NamedTemporaryFile( # pylint: disable=consider-using-with mode="w", prefix=f"coqtop_{datetime.datetime.now().strftime('%y%m%d_%H%M%S')}_", delete=False, ) self.handler = logging.StreamHandler(self.log) self.handler.setFormatter(fmt) self.logger.addHandler(self.handler) self.logger.setLevel(logging.DEBUG) else: # Clean up old logging self.log.close() # Set to null logging self.log = None self.handler = logging.NullHandler() self.logger.addHandler(self.handler) self.logger.setLevel(logging.CRITICAL) return self.log.name if self.log is not None else None
def toggle_debug(self) -> Optional[str]: """Enable or disable logging of debug messages.""" self.logger.removeHandler(self.handler) self.handler.flush() self.handler.close() if self.log is None: # Create unique log file fmt = logging.Formatter("%(asctime)s: %(message)s") self.log = NamedTemporaryFile( # pylint: disable=consider-using-with mode="w", prefix=f"coqtop_{datetime.datetime.now().strftime('%y%m%d_%H%M%S')}_", delete=False, ) self.handler = logging.StreamHandler(self.log) self.handler.setFormatter(fmt) self.logger.addHandler(self.handler) self.logger.setLevel(logging.DEBUG) else: # Clean up old logging self.log.close() # Set to null logging self.log = None self.handler = logging.NullHandler() self.logger.addHandler(self.handler) self.logger.setLevel(logging.CRITICAL) return self.log.name if self.log is not None else None
Python
def _unescape(cmd: bytes) -> bytes: """Replace escaped characters with the unescaped version.""" charmap = {b"&nbsp;": b" ", b"&apos;": b"'", b"&#40;": b"(", b"&#41;": b")"} for escape, unescape in charmap.items(): cmd = cmd.replace(escape, unescape) return cmd
def _unescape(cmd: bytes) -> bytes: """Replace escaped characters with the unescaped version.""" charmap = {b"&nbsp;": b" ", b"&apos;": b"'", b"&#40;": b"(", b"&#41;": b")"} for escape, unescape in charmap.items(): cmd = cmd.replace(escape, unescape) return cmd
Python
def _parse_tagged_tokens( tags: Container[PPTag], xml: ET.Element, stack: Optional[List[PPTag]] = None, inner: bool = False, ) -> Iterator[Tuple[str, List[PPTag]]]: """Scrape an XML element into a stream of text tokens and stack of tags. Helper function to parse_tagged_tokens. Written to support richpp tags, and thus supports .start and .end tags used by Coqtop to highlight ranges that are not properly nested (i.e., <start.a/>...<start.b/>...<end.a/>...<end.b/> is allowed). This is somewhat documented here: https://github.com/coq/coq/blob/master/dev/doc/xml-protocol.md#highlighting-text Documentation neglects to mention the semantics of start. and end. tags that are not self-closing. Until we get clarification, we will interpret <start.a>foo</start.a>bar as <start.a/>foobar and <end.b>foo</end.b>bar as foobar<end.b/>. """ pop_after = None if stack is None: stack = [] # Check tag, see if we should modify stack if xml.tag.startswith("start."): _, _, tag = xml.tag.rpartition("start.") # assert(tag != "") if tag in tags: # start. tag: push onto stack stack.insert(0, tag) elif xml.tag.startswith("end."): _, _, tag = xml.tag.rpartition("end.") # assert(tag != "") if tag in tags: # end. tag: remove from stack (even if it's not at the top) pop_after = tag elif xml.tag in tags: # regular tag: push onto stack, but remember to pop it before xml.tail stack.insert(0, xml.tag) pop_after = xml.tag # Get text before first inner child if xml.text is not None: yield (xml.text, stack[:]) # Recurse on children, with modified stack for child in xml: yield from _parse_tagged_tokens(tags, child, stack, True) if pop_after is not None: stack.remove(pop_after) # Get trailing text up to start of next tag, unless this is the outermost tag if inner and xml.tail is not None: yield (xml.tail, stack[:])
def _parse_tagged_tokens( tags: Container[PPTag], xml: ET.Element, stack: Optional[List[PPTag]] = None, inner: bool = False, ) -> Iterator[Tuple[str, List[PPTag]]]: """Scrape an XML element into a stream of text tokens and stack of tags. Helper function to parse_tagged_tokens. Written to support richpp tags, and thus supports .start and .end tags used by Coqtop to highlight ranges that are not properly nested (i.e., <start.a/>...<start.b/>...<end.a/>...<end.b/> is allowed). This is somewhat documented here: https://github.com/coq/coq/blob/master/dev/doc/xml-protocol.md#highlighting-text Documentation neglects to mention the semantics of start. and end. tags that are not self-closing. Until we get clarification, we will interpret <start.a>foo</start.a>bar as <start.a/>foobar and <end.b>foo</end.b>bar as foobar<end.b/>. """ pop_after = None if stack is None: stack = [] # Check tag, see if we should modify stack if xml.tag.startswith("start."): _, _, tag = xml.tag.rpartition("start.") # assert(tag != "") if tag in tags: # start. tag: push onto stack stack.insert(0, tag) elif xml.tag.startswith("end."): _, _, tag = xml.tag.rpartition("end.") # assert(tag != "") if tag in tags: # end. tag: remove from stack (even if it's not at the top) pop_after = tag elif xml.tag in tags: # regular tag: push onto stack, but remember to pop it before xml.tail stack.insert(0, xml.tag) pop_after = xml.tag # Get text before first inner child if xml.text is not None: yield (xml.text, stack[:]) # Recurse on children, with modified stack for child in xml: yield from _parse_tagged_tokens(tags, child, stack, True) if pop_after is not None: stack.remove(pop_after) # Get trailing text up to start of next tag, unless this is the outermost tag if inner and xml.tail is not None: yield (xml.tail, stack[:])
Python
def parse_tagged_tokens( tags: Container[PPTag], xml: ET.Element, ) -> Iterator[TaggedToken]: """Scrape an XML element into a stream of text tokens and accompanying tags. Written to support richpp markup. Only considers tags specified by the tags parameter. """ token_acc, last_tag = "", None # Recursive helper _parse_tagged_tokens gives us tag stacks for token, tag_list in _parse_tagged_tokens(tags, xml): # Take top tag from tag stack, if any top_tag = tag_list[0] if tag_list != [] else None if top_tag == last_tag: # Join tokens whose top tag is the same token_acc += token else: yield (token_acc, last_tag) token_acc, last_tag = token, top_tag yield (token_acc, last_tag)
def parse_tagged_tokens( tags: Container[PPTag], xml: ET.Element, ) -> Iterator[TaggedToken]: """Scrape an XML element into a stream of text tokens and accompanying tags. Written to support richpp markup. Only considers tags specified by the tags parameter. """ token_acc, last_tag = "", None # Recursive helper _parse_tagged_tokens gives us tag stacks for token, tag_list in _parse_tagged_tokens(tags, xml): # Take top tag from tag stack, if any top_tag = tag_list[0] if tag_list != [] else None if top_tag == last_tag: # Join tokens whose top tag is the same token_acc += token else: yield (token_acc, last_tag) token_acc, last_tag = token, top_tag yield (token_acc, last_tag)
Python
def join_tagged_tokens(tagged_tokens: Iterable[TaggedToken]) -> str: """Join tokens from tagged token stream. NOTE: forall xml tags, join_tagged_tokens(parse_tagged_token(tags, xml)) = "".join(xml.itertext()) """ return "".join(s for s, _ in tagged_tokens)
def join_tagged_tokens(tagged_tokens: Iterable[TaggedToken]) -> str: """Join tokens from tagged token stream. NOTE: forall xml tags, join_tagged_tokens(parse_tagged_token(tags, xml)) = "".join(xml.itertext()) """ return "".join(s for s, _ in tagged_tokens)
Python
def partition_warnings(stderr: str) -> Tuple[str, str]: """Partition Coq stderr messages into warnings and errors. Warnings are assumed to have the following form: Warning: message_with_newlines [warning_type]\n Everything else is treated as an error message. """ warns: List[str] = [] errs: List[str] = [] # Strip whitespace and drop empty strings for msg in filter(None, map(str.strip, WARNING_RE.split(stderr))): (warns if WARNING_RE.fullmatch(msg) else errs).append(msg) return "\n".join(warns), "\n".join(errs)
def partition_warnings(stderr: str) -> Tuple[str, str]: """Partition Coq stderr messages into warnings and errors. Warnings are assumed to have the following form: Warning: message_with_newlines [warning_type]\n Everything else is treated as an error message. """ warns: List[str] = [] errs: List[str] = [] # Strip whitespace and drop empty strings for msg in filter(None, map(str.strip, WARNING_RE.split(stderr))): (warns if WARNING_RE.fullmatch(msg) else errs).append(msg) return "\n".join(warns), "\n".join(errs)
Python
def prettyxml(xml: bytes) -> str: """Pretty print XML for debugging.""" xml = _unescape(xml) # No stubs for xml.dom.minidom return cast(str, parseString(xml).toprettyxml())
def prettyxml(xml: bytes) -> str: """Pretty print XML for debugging.""" xml = _unescape(xml) # No stubs for xml.dom.minidom return cast(str, parseString(xml).toprettyxml())
Python
def launch(self, filename: str, args: Iterable[str]) -> Tuple[str, ...]: """The command to launch coqtop with the appropriate arguments.""" # Find the executable try: coqs = ( p for p in ( which(pre + self.coq_prog + ext, path=self.coq_path) for pre in ("", "coq-prover.") for ext in ("", ".opt") ) if p is not None ) coq = next(coqs) except StopIteration as e: path = "$PATH" if self.coq_path is None else self.coq_path raise FindCoqtopError( f"Could not find {self.coq_prog} in {path}. Perhaps you need " "to set g:coqtail_coq_path or g:coqtail_coq_prog." ) from e # Confirm the version matches version = parse_version(extract_version(coq)) if version != self.version: raise FindCoqtopError( f"{coq} version does not match version reported by coqc.\n" f"Expected: {self.version} Got: {version}" ) return ( (coq,) + tuple(self.launch_args) + self.topfile(filename, args) + tuple(args) )
def launch(self, filename: str, args: Iterable[str]) -> Tuple[str, ...]: """The command to launch coqtop with the appropriate arguments.""" # Find the executable try: coqs = ( p for p in ( which(pre + self.coq_prog + ext, path=self.coq_path) for pre in ("", "coq-prover.") for ext in ("", ".opt") ) if p is not None ) coq = next(coqs) except StopIteration as e: path = "$PATH" if self.coq_path is None else self.coq_path raise FindCoqtopError( f"Could not find {self.coq_prog} in {path}. Perhaps you need " "to set g:coqtail_coq_path or g:coqtail_coq_prog." ) from e # Confirm the version matches version = parse_version(extract_version(coq)) if version != self.version: raise FindCoqtopError( f"{coq} version does not match version reported by coqc.\n" f"Expected: {self.version} Got: {version}" ) return ( (coq,) + tuple(self.launch_args) + self.topfile(filename, args) + tuple(args) )
Python
def _to_py(self, xml: ET.Element) -> Any: """Parse an XML value into a corresponding Python type.""" try: return self._to_py_funcs[xml.tag](xml) except KeyError as e: raise unexpected(tuple(self._to_py_funcs), xml.tag) from e
def _to_py(self, xml: ET.Element) -> Any: """Parse an XML value into a corresponding Python type.""" try: return self._to_py_funcs[xml.tag](xml) except KeyError as e: raise unexpected(tuple(self._to_py_funcs), xml.tag) from e
Python
def _of_py(self, val: Any) -> ET.Element: """Construct an XML element from a corresponding Python type.""" try: return self._of_py_funcs[type(val).__name__](val) except KeyError as e: raise unexpected(tuple(self._of_py_funcs), type(val).__name__) from e
def _of_py(self, val: Any) -> ET.Element: """Construct an XML element from a corresponding Python type.""" try: return self._of_py_funcs[type(val).__name__](val) except KeyError as e: raise unexpected(tuple(self._of_py_funcs), type(val).__name__) from e
Python
def _build_xml( self, tag: str, val: Optional[str] = None, children: Any = sentinel, text: Optional[str] = None, attrs: Optional[Dict[str, str]] = None, ) -> ET.Element: """Construct an XML element with a given tag, value, and children.""" if attrs is None: attrs = {} if val is not None: attrs.update({"val": val}) # If children is a list then convert each element separately, if it is # a tuple, treat it as a single element if children is self.sentinel: children = () elif isinstance(children, list): children = [self._of_py(child) for child in children] else: children = (self._of_py(children),) xml = ET.Element(tag, attrs) xml.extend(children) xml.text = text return xml
def _build_xml( self, tag: str, val: Optional[str] = None, children: Any = sentinel, text: Optional[str] = None, attrs: Optional[Dict[str, str]] = None, ) -> ET.Element: """Construct an XML element with a given tag, value, and children.""" if attrs is None: attrs = {} if val is not None: attrs.update({"val": val}) # If children is a list then convert each element separately, if it is # a tuple, treat it as a single element if children is self.sentinel: children = () elif isinstance(children, list): children = [self._of_py(child) for child in children] else: children = (self._of_py(children),) xml = ET.Element(tag, attrs) xml.extend(children) xml.text = text return xml
Python
def raw_response(self, data: bytes) -> Optional[Result]: """Try to parse an XML response from Coqtop into an Ok or Err.""" res = None msgs: List[str] = [] try: xmls = ET.fromstring(b"<coqtoproot>" + _unescape(data) + b"</coqtoproot>") except ET.ParseError: # If not all data has been read, the XML might not be well-formed return None # Wait for a 'value' node and store any 'message' nodes for xml in xmls: if xml.tag == "value": res = self._to_response(xml) elif xml.tag in ("message", "feedback"): # _to_py is guaranteed to either return str or # a sequence of tagged tokens for message or feedback msg = self._to_py(xml) if isinstance(msg, list): msg = join_tagged_tokens(msg) # Sanity check if not isinstance(msg, str): raise unexpected((str,), type(msg)) msgs.append(msg.strip()) else: raise unexpected(("value", "message", "feedback"), xml.tag) if res is not None: # Error messages may be duplicated between the 'value' and # 'feedback' tags. # https://coq.discourse.group/t/avoiding-duplicate-error-messages-with-the-xml-protocol/411 msg = res.msg.strip() if msg not in msgs: msgs.insert(0, msg) res.msg = "\n\n".join(msg for msg in msgs if msg != "") return res
def raw_response(self, data: bytes) -> Optional[Result]: """Try to parse an XML response from Coqtop into an Ok or Err.""" res = None msgs: List[str] = [] try: xmls = ET.fromstring(b"<coqtoproot>" + _unescape(data) + b"</coqtoproot>") except ET.ParseError: # If not all data has been read, the XML might not be well-formed return None # Wait for a 'value' node and store any 'message' nodes for xml in xmls: if xml.tag == "value": res = self._to_response(xml) elif xml.tag in ("message", "feedback"): # _to_py is guaranteed to either return str or # a sequence of tagged tokens for message or feedback msg = self._to_py(xml) if isinstance(msg, list): msg = join_tagged_tokens(msg) # Sanity check if not isinstance(msg, str): raise unexpected((str,), type(msg)) msgs.append(msg.strip()) else: raise unexpected(("value", "message", "feedback"), xml.tag) if res is not None: # Error messages may be duplicated between the 'value' and # 'feedback' tags. # https://coq.discourse.group/t/avoiding-duplicate-error-messages-with-the-xml-protocol/411 msg = res.msg.strip() if msg not in msgs: msgs.insert(0, msg) res.msg = "\n\n".join(msg for msg in msgs if msg != "") return res
Python
def standardize(self, cmd: str, res: Result) -> Result: """Put the information in 'res' into a version-independent form.""" # By default return unchanged try: return self._standardize_funcs[cmd](res) except KeyError: return res
def standardize(self, cmd: str, res: Result) -> Result: """Put the information in 'res' into a version-independent form.""" # By default return unchanged try: return self._standardize_funcs[cmd](res) except KeyError: return res
Python
def edit_at( self, state: int, steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location."""
def edit_at( self, state: int, steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location."""
Python
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop."""
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop."""
Python
def parse_option(cmd: str) -> Tuple[Optional[Sequence[OptionArg]], str]: """Parse what option is being set/checked.""" # Assumes cmd is of the form 'Set|Unset|Test {option_name}' opts = cmd.strip(".").split() ty = opts[0] vals: Optional[Sequence[XMLInterfaceBase.OptionArg]] if ty == "Test": vals = None elif ty == "Set": val: XMLInterfaceBase.OptionArg if opts[-1][0].isdigit(): val = int(opts[-1]) opts = opts[:-1] elif opts[-1][-1] == '"': for idx, opt in enumerate(opts): if opt[0] == '"': val = " ".join(opts[idx:]).strip('"') opts = opts[:idx] else: val = True vals = (val,) elif ty == "Unset": # Don't know if the option expects a bool, option int, or option # str, so try all vals = (False, (None, "int"), (None, "str")) else: raise unexpected(("Set", "Unset", "Test"), ty) return vals, " ".join(opts[1:])
def parse_option(cmd: str) -> Tuple[Optional[Sequence[OptionArg]], str]: """Parse what option is being set/checked.""" # Assumes cmd is of the form 'Set|Unset|Test {option_name}' opts = cmd.strip(".").split() ty = opts[0] vals: Optional[Sequence[XMLInterfaceBase.OptionArg]] if ty == "Test": vals = None elif ty == "Set": val: XMLInterfaceBase.OptionArg if opts[-1][0].isdigit(): val = int(opts[-1]) opts = opts[:-1] elif opts[-1][-1] == '"': for idx, opt in enumerate(opts): if opt[0] == '"': val = " ".join(opts[idx:]).strip('"') opts = opts[:idx] else: val = True vals = (val,) elif ty == "Unset": # Don't know if the option expects a bool, option int, or option # str, so try all vals = (False, (None, "int"), (None, "str")) else: raise unexpected(("Set", "Unset", "Test"), ty) return vals, " ".join(opts[1:])
Python
def _to_option_value(self, xml: ET.Element) -> "CoqOptionValue": """Expect: <option_value>bool | option int | string</option_value>""" ty = xml.get("val", None) if ty is not None: if ty.startswith("int"): ty = "int" elif ty.startswith("str"): ty = "str" else: ty = "bool" return self.CoqOptionValue(self._to_py(xml[0]), ty)
def _to_option_value(self, xml: ET.Element) -> "CoqOptionValue": """Expect: <option_value>bool | option int | string</option_value>""" ty = xml.get("val", None) if ty is not None: if ty.startswith("int"): ty = "int" elif ty.startswith("str"): ty = "str" else: ty = "bool" return self.CoqOptionValue(self._to_py(xml[0]), ty)
Python
def init(self, _encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to initialize Coqtop. Not a command in 8.4 so return dummy command. """ return ("Init", None)
def init(self, _encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to initialize Coqtop. Not a command in 8.4 so return dummy command. """ return ("Init", None)
Python
def add( self, cmd: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to advance Coqtop. Attrs: verbose: bool - Verbose output id: int - The current state id Args: cmd: string - The command to evaluate """ return ( "Add", self._make_call( encoding, "interp", attrs={"verbose": "true", "id": str(state)}, arg=cmd, ), )
def add( self, cmd: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to advance Coqtop. Attrs: verbose: bool - Verbose output id: int - The current state id Args: cmd: string - The command to evaluate """ return ( "Add", self._make_call( encoding, "interp", attrs={"verbose": "true", "id": str(state)}, arg=cmd, ), )
Python
def edit_at( self, _state: int, steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location. Attrs: steps: int - The number of steps to rewind """ return ( "Edit_at", self._make_call(encoding, "rewind", attrs={"steps": str(steps)}), )
def edit_at( self, _state: int, steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location. Attrs: steps: int - The number of steps to rewind """ return ( "Edit_at", self._make_call(encoding, "rewind", attrs={"steps": str(steps)}), )
Python
def goal(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check the current goal state. Args: _: unit - Empty arg """ return ("Goal", self._make_call(encoding, "goal", children=()))
def goal(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check the current goal state. Args: _: unit - Empty arg """ return ("Goal", self._make_call(encoding, "goal", children=()))
Python
def _standardize_goal(self, res: Result) -> Result: """Standardize the info returned by 'Goal'. Return: fg: list Goal - The current goals bg: list (list Goal * list Goal) - Unfocused goals shelved: list Goal - Shelved goals (dummy value in 8.4) given_up: list Goal - Admitted goals (dummy value in 8.4) """ # pylint: disable=no-self-use if isinstance(res, Ok): opt_goals: XMLInterfaceBase.CoqOption = res.val if opt_goals is not None: goals: XMLInterface84.CoqGoals = opt_goals.val res.val = Goals( [Goal(g.hyp, g.ccl) for g in goals.fg], [ [Goal(g.hyp, g.ccl) for g in pre + post] for pre, post in goals.bg ], [], [], ) return res
def _standardize_goal(self, res: Result) -> Result: """Standardize the info returned by 'Goal'. Return: fg: list Goal - The current goals bg: list (list Goal * list Goal) - Unfocused goals shelved: list Goal - Shelved goals (dummy value in 8.4) given_up: list Goal - Admitted goals (dummy value in 8.4) """ # pylint: disable=no-self-use if isinstance(res, Ok): opt_goals: XMLInterfaceBase.CoqOption = res.val if opt_goals is not None: goals: XMLInterface84.CoqGoals = opt_goals.val res.val = Goals( [Goal(g.hyp, g.ccl) for g in goals.fg], [ [Goal(g.hyp, g.ccl) for g in pre + post] for pre, post in goals.bg ], [], [], ) return res
Python
def status(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check Coqtop's status. Args: _: unit - Empty arg """ return ("Status", self._make_call(encoding, "status", children=()))
def status(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check Coqtop's status. Args: _: unit - Empty arg """ return ("Status", self._make_call(encoding, "status", children=()))
Python
def init(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to initialize Coqtop. Args: option string - A Coq file to add to the LoadPath to do ? """ return ("Init", self._make_call(encoding, "Init", children=None))
def init(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to initialize Coqtop. Args: option string - A Coq file to add to the LoadPath to do ? """ return ("Init", self._make_call(encoding, "Init", children=None))
Python
def add( self, cmd: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to advance Coqtop. Args: cmd: string - The command to evaluate edit_id: int - The current edit id ? state_id: CoqStateId - The current state id verbose: bool - Verbose output """ return ( "Add", self._make_call( encoding, "Add", children=((cmd, -1), (self.CoqStateId(state), True)), ), )
def add( self, cmd: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to advance Coqtop. Args: cmd: string - The command to evaluate edit_id: int - The current edit id ? state_id: CoqStateId - The current state id verbose: bool - Verbose output """ return ( "Add", self._make_call( encoding, "Add", children=((cmd, -1), (self.CoqStateId(state), True)), ), )
Python
def edit_at( self, state: int, _steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location. Args: state_id: CoqStateId - The state id to move to """ return ( "Edit_at", self._make_call(encoding, "Edit_at", children=self.CoqStateId(state)), )
def edit_at( self, state: int, _steps: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to move Coqtop to a specific location. Args: state_id: CoqStateId - The state id to move to """ return ( "Edit_at", self._make_call(encoding, "Edit_at", children=self.CoqStateId(state)), )
Python
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop. Args: query: string - The query to evaluate state_id: CoqStateId - The current state id """ return ( "Query", self._make_call( encoding, "Query", children=(query, self.CoqStateId(state)), ), )
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop. Args: query: string - The query to evaluate state_id: CoqStateId - The current state id """ return ( "Query", self._make_call( encoding, "Query", children=(query, self.CoqStateId(state)), ), )
Python
def goal(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check the current goal state. Args: _: unit - Empty arg """ return ("Goal", self._make_call(encoding, "Goal", children=()))
def goal(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check the current goal state. Args: _: unit - Empty arg """ return ("Goal", self._make_call(encoding, "Goal", children=()))
Python
def _standardize_goal(self, res: Result) -> Result: """Standardize the info returned by 'Goal'. Return: fg: list Goal - The current goals bg: list (list Goal * list Goal) - Unfocused goals shelved: list Goal - Shelved goals given_up: list Goal - Admitted goals """ # pylint: disable=no-self-use if isinstance(res, Ok): opt_goals: XMLInterfaceBase.CoqOption = res.val if opt_goals is not None: goals: XMLInterface85.CoqGoals = opt_goals.val res.val = Goals( [Goal(g.hyp, g.ccl) for g in goals.fg], [ [Goal(g.hyp, g.ccl) for g in pre + post] for pre, post in goals.bg ], [Goal(g.hyp, g.ccl) for g in goals.shelved], [Goal(g.hyp, g.ccl) for g in goals.given_up], ) return res
def _standardize_goal(self, res: Result) -> Result: """Standardize the info returned by 'Goal'. Return: fg: list Goal - The current goals bg: list (list Goal * list Goal) - Unfocused goals shelved: list Goal - Shelved goals given_up: list Goal - Admitted goals """ # pylint: disable=no-self-use if isinstance(res, Ok): opt_goals: XMLInterfaceBase.CoqOption = res.val if opt_goals is not None: goals: XMLInterface85.CoqGoals = opt_goals.val res.val = Goals( [Goal(g.hyp, g.ccl) for g in goals.fg], [ [Goal(g.hyp, g.ccl) for g in pre + post] for pre, post in goals.bg ], [Goal(g.hyp, g.ccl) for g in goals.shelved], [Goal(g.hyp, g.ccl) for g in goals.given_up], ) return res
Python
def status(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check Coqtop's status. Args: force: bool - Force all pending evaluations """ return ("Status", self._make_call(encoding, "Status", children=True))
def status(self, encoding: str = "utf-8") -> Tuple[str, Optional[bytes]]: """Create an XML string to check Coqtop's status. Args: force: bool - Force all pending evaluations """ return ("Status", self._make_call(encoding, "Status", children=True))
Python
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop. Args: route_id: CoqRouteId - The route id ? query: string - The query to evaluate state_id: CoqStateId - The current state id """ return ( "Query", self._make_call( encoding, "Query", (self.CoqRouteId(0), (query, self.CoqStateId(state))), ), )
def query( self, query: str, state: int, encoding: str = "utf-8", ) -> Tuple[str, Optional[bytes]]: """Create an XML string to pose a query to Coqtop. Args: route_id: CoqRouteId - The route id ? query: string - The query to evaluate state_id: CoqStateId - The current state id """ return ( "Query", self._make_call( encoding, "Query", (self.CoqRouteId(0), (query, self.CoqStateId(state))), ), )
Python
def topfile(filename: str, args: Iterable[str]) -> Tuple[str, ...]: """The command to set the top-level module name.""" return ( ("-topfile", filename) if all(arg not in args for arg in ("-top", "-topfile")) and XMLInterfaceBase.valid_module(filename) else () )
def topfile(filename: str, args: Iterable[str]) -> Tuple[str, ...]: """The command to set the top-level module name.""" return ( ("-topfile", filename) if all(arg not in args for arg in ("-top", "-topfile")) and XMLInterfaceBase.valid_module(filename) else () )
Python
def find_coq(coq_path: Optional[str], coq_prog: Optional[str]) -> str: """Find the path to the Coq executable.""" coq_prog = "coqc" if coq_prog is None else coq_prog coq = which(coq_prog, path=coq_path) if coq is None: path = "$PATH" if coq_path is None else coq_path raise FindCoqtopError( f"Could not find {coq_prog} in {path}. Perhaps you need " "to set g:coqtail_coq_path or g:coqtail_coq_prog." ) return coq
def find_coq(coq_path: Optional[str], coq_prog: Optional[str]) -> str: """Find the path to the Coq executable.""" coq_prog = "coqc" if coq_prog is None else coq_prog coq = which(coq_prog, path=coq_path) if coq is None: path = "$PATH" if coq_path is None else coq_path raise FindCoqtopError( f"Could not find {coq_prog} in {path}. Perhaps you need " "to set g:coqtail_coq_path or g:coqtail_coq_prog." ) return coq
Python
def parse_version(version: str) -> Tuple[int, int, int]: """Parse a version string into a 3-tuple.""" match = re.fullmatch(r"(\d+)\.(\d+)(?:(?:\.|pl)(\d+)|\+\w+\d*)?", version) if match is None: raise ValueError(f"Invalid version: {version}") major, minor, patch = match.groups() assert major is not None assert minor is not None patch = "0" if patch is None else patch return (int(major), int(minor), int(patch))
def parse_version(version: str) -> Tuple[int, int, int]: """Parse a version string into a 3-tuple.""" match = re.fullmatch(r"(\d+)\.(\d+)(?:(?:\.|pl)(\d+)|\+\w+\d*)?", version) if match is None: raise ValueError(f"Invalid version: {version}") major, minor, patch = match.groups() assert major is not None assert minor is not None patch = "0" if patch is None else patch return (int(major), int(minor), int(patch))
Python
def XMLInterface( coq_path: Optional[str], coq_prog: Optional[str], ) -> Tuple[XMLInterfaceBase, Optional[str]]: """Return the appropriate XMLInterface class for the given version.""" coq = find_coq(coq_path, coq_prog) coq_path = str(Path(coq).parent) str_version = extract_version(coq) version = parse_version(str_version) for minVer, maxVer, xmlInt in XMLInterfaces: if minVer <= version < maxVer: return xmlInt(version, str_version, coq_path, coq_prog), None return ( XMLInterfaceLatest(version, str_version, coq_path, coq_prog), ".".join(map(str, XMLInterfaces[-1][0])), )
def XMLInterface( coq_path: Optional[str], coq_prog: Optional[str], ) -> Tuple[XMLInterfaceBase, Optional[str]]: """Return the appropriate XMLInterface class for the given version.""" coq = find_coq(coq_path, coq_prog) coq_path = str(Path(coq).parent) str_version = extract_version(coq) version = parse_version(str_version) for minVer, maxVer, xmlInt in XMLInterfaces: if minVer <= version < maxVer: return xmlInt(version, str_version, coq_path, coq_prog), None return ( XMLInterfaceLatest(version, str_version, coq_path, coq_prog), ".".join(map(str, XMLInterfaces[-1][0])), )
Python
def coq() -> Generator[Coqtop, None, None]: """Return a Coqtop for each version.""" ct = Coqtop() if isinstance(ct.start(None, None, "", [])[0], dict): yield ct ct.stop() else: pytest.fail("Failed to create Coqtop instance")
def coq() -> Generator[Coqtop, None, None]: """Return a Coqtop for each version.""" ct = Coqtop() if isinstance(ct.start(None, None, "", [])[0], dict): yield ct ct.stop() else: pytest.fail("Failed to create Coqtop instance")
Python
def lines_and_highlights( tagged_tokens: Union[str, Iterable[TaggedToken]], line_no: int, ) -> Tuple[List[str], List[Highlight]]: """Converts a sequence of tagged tokens into lines and higlight positions. Note that matchaddpos()'s highlight positions are 1-indexed, but this function expects line_no to be 0-indexed. """ # If tagged_tokens turns out to already be a string (which is the case for # older versions of Coq), just return it as is, with no highlights. if isinstance(tagged_tokens, str): return tagged_tokens.splitlines(), [] lines: List[str] = [] highlights: List[Highlight] = [] line_no += 1 # Convert to 1-indexed per matchaddpos()'s spec line, index = "", 1 for token, tag in tagged_tokens: # NOTE: Can't use splitlines or else tokens like ' =\n' won't properly # begin a new line for i, tok in enumerate(token.split("\n")): if i > 0: # Encountered a newline in token lines.append(line) line_no += 1 line, index = "", 1 tok_len = len(tok.encode("utf-8")) if tag is not None: highlights.append(Highlight(line_no, index, tok_len, tag)) line += tok index += tok_len lines.append(line) return lines, highlights
def lines_and_highlights( tagged_tokens: Union[str, Iterable[TaggedToken]], line_no: int, ) -> Tuple[List[str], List[Highlight]]: """Converts a sequence of tagged tokens into lines and higlight positions. Note that matchaddpos()'s highlight positions are 1-indexed, but this function expects line_no to be 0-indexed. """ # If tagged_tokens turns out to already be a string (which is the case for # older versions of Coq), just return it as is, with no highlights. if isinstance(tagged_tokens, str): return tagged_tokens.splitlines(), [] lines: List[str] = [] highlights: List[Highlight] = [] line_no += 1 # Convert to 1-indexed per matchaddpos()'s spec line, index = "", 1 for token, tag in tagged_tokens: # NOTE: Can't use splitlines or else tokens like ' =\n' won't properly # begin a new line for i, tok in enumerate(token.split("\n")): if i > 0: # Encountered a newline in token lines.append(line) line_no += 1 line, index = "", 1 tok_len = len(tok.encode("utf-8")) if tag is not None: highlights.append(Highlight(line_no, index, tok_len, tag)) line += tok index += tok_len lines.append(line) return lines, highlights
Python
def sync(self, opts: VimOptions) -> Optional[str]: """Check if the buffer has been updated and rewind Coqtop if so.""" err = None newchange = self.changedtick if newchange != self.oldchange: newbuf = self.buffer if self.endpoints != []: eline, ecol = self.endpoints[-1] linediff = _find_diff(self.oldbuf, newbuf, eline + 1) if linediff is not None: try: coldiff = _find_diff( self.oldbuf[linediff], newbuf[linediff], ecol if linediff == eline else None, ) except IndexError: linediff = len(newbuf) - 1 coldiff = len(newbuf[-1]) if coldiff is not None: err = self.rewind_to(linediff, coldiff + 1, opts=opts) self.oldchange = newchange self.oldbuf = newbuf return err
def sync(self, opts: VimOptions) -> Optional[str]: """Check if the buffer has been updated and rewind Coqtop if so.""" err = None newchange = self.changedtick if newchange != self.oldchange: newbuf = self.buffer if self.endpoints != []: eline, ecol = self.endpoints[-1] linediff = _find_diff(self.oldbuf, newbuf, eline + 1) if linediff is not None: try: coldiff = _find_diff( self.oldbuf[linediff], newbuf[linediff], ecol if linediff == eline else None, ) except IndexError: linediff = len(newbuf) - 1 coldiff = len(newbuf[-1]) if coldiff is not None: err = self.rewind_to(linediff, coldiff + 1, opts=opts) self.oldchange = newchange self.oldbuf = newbuf return err
Python
def start( self, coq_path: str, coq_prog: str, args: Iterable[str], opts: VimOptions, ) -> Tuple[Union[CT.VersionInfo, str], str]: """Start a new Coqtop instance.""" try: ver_or_err, stderr = self.coqtop.start( coq_path if coq_path != "" else None, coq_prog if coq_prog != "" else None, opts["filename"], args, timeout=opts["timeout"], ) self.print_stderr(stderr) except (ValueError, CT.CoqtopError) as e: ver_or_err, stderr = str(e), "" return ver_or_err, stderr
def start( self, coq_path: str, coq_prog: str, args: Iterable[str], opts: VimOptions, ) -> Tuple[Union[CT.VersionInfo, str], str]: """Start a new Coqtop instance.""" try: ver_or_err, stderr = self.coqtop.start( coq_path if coq_path != "" else None, coq_prog if coq_prog != "" else None, opts["filename"], args, timeout=opts["timeout"], ) self.print_stderr(stderr) except (ValueError, CT.CoqtopError) as e: ver_or_err, stderr = str(e), "" return ver_or_err, stderr
Python
def to_line(self, line: int, col: int, opts: VimOptions) -> Optional[str]: """Advance/rewind Coq to the specified position.""" self.sync(opts=opts) # Get the location of the last '.' eline, ecol = self.endpoints[-1] if self.endpoints != [] else (0, 0) # Check if should rewind or advance if (line, col) < (eline, ecol): return self.rewind_to(line, col + 2, opts=opts) unmatched = None buffer = self.buffer while True: try: to_send = _get_message_range(buffer, (eline, ecol)) except UnmatchedError as e: # Only report unmatched if it occurs after the desired position if e.range[0] <= (line, col): unmatched = e break except NoDotError: break if (line, col) < to_send["stop"]: break eline, ecol = to_send["stop"] ecol += 1 self.send_queue.append(to_send) failed_at, err = self.send_until_fail(buffer, opts=opts) if unmatched is not None and failed_at is None: # Only report unmatched if no other errors occurred first self.set_info(str(unmatched), reset=False) self.error_at = unmatched.range self.refresh(goals=False, opts=opts) return err
def to_line(self, line: int, col: int, opts: VimOptions) -> Optional[str]: """Advance/rewind Coq to the specified position.""" self.sync(opts=opts) # Get the location of the last '.' eline, ecol = self.endpoints[-1] if self.endpoints != [] else (0, 0) # Check if should rewind or advance if (line, col) < (eline, ecol): return self.rewind_to(line, col + 2, opts=opts) unmatched = None buffer = self.buffer while True: try: to_send = _get_message_range(buffer, (eline, ecol)) except UnmatchedError as e: # Only report unmatched if it occurs after the desired position if e.range[0] <= (line, col): unmatched = e break except NoDotError: break if (line, col) < to_send["stop"]: break eline, ecol = to_send["stop"] ecol += 1 self.send_queue.append(to_send) failed_at, err = self.send_until_fail(buffer, opts=opts) if unmatched is not None and failed_at is None: # Only report unmatched if no other errors occurred first self.set_info(str(unmatched), reset=False) self.error_at = unmatched.range self.refresh(goals=False, opts=opts) return err
Python
def query( self, args: Iterable[str], opts: VimOptions, silent: bool = False, ) -> None: """Forward Coq query to Coqtop interface.""" success, msg, stderr = self.do_query(" ".join(args), opts=opts) if not success or not silent: self.set_info(msg, reset=True) self.print_stderr(stderr) self.refresh(goals=False, opts=opts)
def query( self, args: Iterable[str], opts: VimOptions, silent: bool = False, ) -> None: """Forward Coq query to Coqtop interface.""" success, msg, stderr = self.do_query(" ".join(args), opts=opts) if not success or not silent: self.set_info(msg, reset=True) self.print_stderr(stderr) self.refresh(goals=False, opts=opts)
Python
def endpoint(self, opts: VimOptions) -> Tuple[int, int]: """Return the end of the Coq checked section.""" # pylint: disable=unused-argument # opts is always passed by handle(). # Get the location of the last '.' line, col = self.endpoints[-1] if self.endpoints != [] else (0, 1) return (line + 1, col)
def endpoint(self, opts: VimOptions) -> Tuple[int, int]: """Return the end of the Coq checked section.""" # pylint: disable=unused-argument # opts is always passed by handle(). # Get the location of the last '.' line, col = self.endpoints[-1] if self.endpoints != [] else (0, 1) return (line + 1, col)
Python
def errorpoint(self, opts: VimOptions) -> Optional[Tuple[int, int]]: """Return the start of the error region.""" # pylint: disable=unused-argument # opts is always passed by handle(). if self.error_at is not None: line, col = self.error_at[0] return (line + 1, col + 1) return None
def errorpoint(self, opts: VimOptions) -> Optional[Tuple[int, int]]: """Return the start of the error region.""" # pylint: disable=unused-argument # opts is always passed by handle(). if self.error_at is not None: line, col = self.error_at[0] return (line + 1, col + 1) return None
Python
def send_until_fail( self, buffer: Sequence[bytes], opts: VimOptions, ) -> Tuple[Optional[Tuple[int, int]], Optional[str]]: """Send all sentences in 'send_queue' until an error is encountered.""" scroll = len(self.send_queue) > 1 failed_at = None no_msgs = True self.error_at = None while self.send_queue: self.refresh(goals=False, force=False, scroll=scroll, opts=opts) to_send = self.send_queue.popleft() message = _between(buffer, to_send["start"], to_send["stop"]) no_comments, _ = _strip_comments(message) try: success, msg, err_loc, stderr = self.coqtop.dispatch( message.decode("utf-8"), no_comments.decode("utf-8"), encoding=opts["encoding"], timeout=opts["timeout"], ) except CT.CoqtopError as e: return None, str(e) if msg != "": self.set_info(msg, reset=no_msgs) no_msgs = False self.print_stderr(stderr) no_msgs = no_msgs and stderr == "" if success: line, col = to_send["stop"] self.endpoints.append((line, col + 1)) else: self.send_queue.clear() failed_at = to_send["start"] # Highlight error location assert err_loc is not None loc_s, loc_e = err_loc if loc_s == loc_e == -1: self.error_at = (to_send["start"], to_send["stop"]) else: line, col = to_send["start"] sline, scol = _pos_from_offset(col, message, loc_s) eline, ecol = _pos_from_offset(col, message, loc_e) self.error_at = ((line + sline, scol), (line + eline, ecol)) # Clear info if no messages if no_msgs: self.set_info("", reset=True) self.refresh(opts=opts, scroll=scroll) return failed_at, None
def send_until_fail( self, buffer: Sequence[bytes], opts: VimOptions, ) -> Tuple[Optional[Tuple[int, int]], Optional[str]]: """Send all sentences in 'send_queue' until an error is encountered.""" scroll = len(self.send_queue) > 1 failed_at = None no_msgs = True self.error_at = None while self.send_queue: self.refresh(goals=False, force=False, scroll=scroll, opts=opts) to_send = self.send_queue.popleft() message = _between(buffer, to_send["start"], to_send["stop"]) no_comments, _ = _strip_comments(message) try: success, msg, err_loc, stderr = self.coqtop.dispatch( message.decode("utf-8"), no_comments.decode("utf-8"), encoding=opts["encoding"], timeout=opts["timeout"], ) except CT.CoqtopError as e: return None, str(e) if msg != "": self.set_info(msg, reset=no_msgs) no_msgs = False self.print_stderr(stderr) no_msgs = no_msgs and stderr == "" if success: line, col = to_send["stop"] self.endpoints.append((line, col + 1)) else: self.send_queue.clear() failed_at = to_send["start"] # Highlight error location assert err_loc is not None loc_s, loc_e = err_loc if loc_s == loc_e == -1: self.error_at = (to_send["start"], to_send["stop"]) else: line, col = to_send["start"] sline, scol = _pos_from_offset(col, message, loc_s) eline, ecol = _pos_from_offset(col, message, loc_e) self.error_at = ((line + sline, scol), (line + eline, ecol)) # Clear info if no messages if no_msgs: self.set_info("", reset=True) self.refresh(opts=opts, scroll=scroll) return failed_at, None
Python
def rewind_to(self, line: int, col: int, opts: VimOptions) -> Optional[str]: """Rewind to a specific location.""" # Count the number of endpoints after the specified location steps_too_far = sum(pos >= (line, col) for pos in self.endpoints) return self.rewind(steps_too_far, opts=opts)
def rewind_to(self, line: int, col: int, opts: VimOptions) -> Optional[str]: """Rewind to a specific location.""" # Count the number of endpoints after the specified location steps_too_far = sum(pos >= (line, col) for pos in self.endpoints) return self.rewind(steps_too_far, opts=opts)
Python
def do_query(self, query: str, opts: VimOptions) -> Tuple[bool, str, str]: """Execute a query and return the reply.""" # Ensure that the query ends in '.' if not query.endswith("."): query += "." try: success, msg, _, stderr = self.coqtop.dispatch( query, in_script=False, encoding=opts["encoding"], timeout=opts["timeout"], ) except CT.CoqtopError as e: return False, str(e), "" return success, msg, stderr
def do_query(self, query: str, opts: VimOptions) -> Tuple[bool, str, str]: """Execute a query and return the reply.""" # Ensure that the query ends in '.' if not query.endswith("."): query += "." try: success, msg, _, stderr = self.coqtop.dispatch( query, in_script=False, encoding=opts["encoding"], timeout=opts["timeout"], ) except CT.CoqtopError as e: return False, str(e), "" return success, msg, stderr
Python
def find_lib(self, lib: str, opts: VimOptions) -> Optional[str]: """Find the path to the .v file corresponding to the libary 'lib'.""" success, locate, _ = self.do_query(f"Locate Library {lib}.", opts=opts) if not success: return None path = re.search(r"file\s+(.*)\.vo", locate) return path.group(1) if path is not None else None
def find_lib(self, lib: str, opts: VimOptions) -> Optional[str]: """Find the path to the .v file corresponding to the libary 'lib'.""" success, locate, _ = self.do_query(f"Locate Library {lib}.", opts=opts) if not success: return None path = re.search(r"file\s+(.*)\.vo", locate) return path.group(1) if path is not None else None
Python
def find_qual( self, qual_tgt: str, tgt_type: str, opts: VimOptions, ) -> Optional[Tuple[str, str]]: """Find the Coq file containing the qualified name 'qual_tgt'.""" qual_comps = qual_tgt.split(".") base_name = qual_comps[-1] # If 'qual_comps' starts with Top or 'tgt_type' is Variable then # 'qual_tgt' is defined in the current file if qual_comps[0] == "Top" or tgt_type == "Variable": return opts["filename"], base_name # Find the longest prefix of 'qual_tgt' that matches a logical path in # 'path_map' for end in range(-1, -len(qual_comps), -1): path = self.find_lib(".".join(qual_comps[:end]), opts=opts) if path is not None: return path + ".v", base_name return None
def find_qual( self, qual_tgt: str, tgt_type: str, opts: VimOptions, ) -> Optional[Tuple[str, str]]: """Find the Coq file containing the qualified name 'qual_tgt'.""" qual_comps = qual_tgt.split(".") base_name = qual_comps[-1] # If 'qual_comps' starts with Top or 'tgt_type' is Variable then # 'qual_tgt' is defined in the current file if qual_comps[0] == "Top" or tgt_type == "Variable": return opts["filename"], base_name # Find the longest prefix of 'qual_tgt' that matches a logical path in # 'path_map' for end in range(-1, -len(qual_comps), -1): path = self.find_lib(".".join(qual_comps[:end]), opts=opts) if path is not None: return path + ".v", base_name return None
Python
def find_def( self, target: str, opts: VimOptions, ) -> Optional[Tuple[str, List[str]]]: """Create patterns to jump to the definition of 'target'.""" # Get the fully qualified version of 'target' qual = self.qual_name(target, opts=opts) if qual is None: return None qual_tgt, tgt_type = qual # Find what file the definition is in and what type it is tgt = self.find_qual(qual_tgt, tgt_type, opts=opts) if tgt is None: return None tgt_file, tgt_name = tgt return tgt_file, get_searches(tgt_type, tgt_name)
def find_def( self, target: str, opts: VimOptions, ) -> Optional[Tuple[str, List[str]]]: """Create patterns to jump to the definition of 'target'.""" # Get the fully qualified version of 'target' qual = self.qual_name(target, opts=opts) if qual is None: return None qual_tgt, tgt_type = qual # Find what file the definition is in and what type it is tgt = self.find_qual(qual_tgt, tgt_type, opts=opts) if tgt is None: return None tgt_file, tgt_name = tgt return tgt_file, get_searches(tgt_type, tgt_name)
Python
def next_bullet(self, opts: VimOptions) -> Optional[str]: """Check the bullet expected for the next subgoal.""" success, show, _ = self.do_query("Show.", opts=opts) if not success: return None bmatch = re.search(r'(?:bullet |unfocusing with ")([-+*}]+)', show) return bmatch.group(1) if bmatch is not None else None
def next_bullet(self, opts: VimOptions) -> Optional[str]: """Check the bullet expected for the next subgoal.""" success, show, _ = self.do_query("Show.", opts=opts) if not success: return None bmatch = re.search(r'(?:bullet |unfocusing with ")([-+*}]+)', show) return bmatch.group(1) if bmatch is not None else None
Python
def highlights(self) -> Dict[str, Optional[str]]: """Vim match patterns for highlighting.""" matches: Dict[str, Optional[str]] = { "coqtail_checked": None, "coqtail_sent": None, "coqtail_error": None, } if self.endpoints != []: line, col = self.endpoints[-1] matches["coqtail_checked"] = matcher[: line + 1, :col] if self.send_queue: sline, scol = self.endpoints[-1] if self.endpoints != [] else (0, -1) eline, ecol = self.send_queue[-1]["stop"] matches["coqtail_sent"] = matcher[sline : eline + 1, scol:ecol] if self.error_at is not None: (sline, scol), (eline, ecol) = self.error_at matches["coqtail_error"] = matcher[sline : eline + 1, scol:ecol] return matches
def highlights(self) -> Dict[str, Optional[str]]: """Vim match patterns for highlighting.""" matches: Dict[str, Optional[str]] = { "coqtail_checked": None, "coqtail_sent": None, "coqtail_error": None, } if self.endpoints != []: line, col = self.endpoints[-1] matches["coqtail_checked"] = matcher[: line + 1, :col] if self.send_queue: sline, scol = self.endpoints[-1] if self.endpoints != [] else (0, -1) eline, ecol = self.send_queue[-1]["stop"] matches["coqtail_sent"] = matcher[sline : eline + 1, scol:ecol] if self.error_at is not None: (sline, scol), (eline, ecol) = self.error_at matches["coqtail_error"] = matcher[sline : eline + 1, scol:ecol] return matches
Python
def toggle_debug(self, opts: VimOptions) -> None: """Enable or disable logging of debug messages.""" log = self.coqtop.toggle_debug() if log is None: msg = "Debugging disabled." self.log = "" else: msg = f"Debugging enabled. Log: {log}." self.log = log self.set_info(msg, reset=True) self.refresh(goals=False, opts=opts)
def toggle_debug(self, opts: VimOptions) -> None: """Enable or disable logging of debug messages.""" log = self.coqtop.toggle_debug() if log is None: msg = "Debugging disabled." self.log = "" else: msg = f"Debugging enabled. Log: {log}." self.log = log self.set_info(msg, reset=True) self.refresh(goals=False, opts=opts)
Python
def buffer(self) -> List[bytes]: """The contents of this buffer.""" lines: List[str] = self.handler.vimcall( "getbufline", True, self.handler.bnum, 1, "$", ) return [line.encode("utf-8") for line in lines]
def buffer(self) -> List[bytes]: """The contents of this buffer.""" lines: List[str] = self.handler.vimcall( "getbufline", True, self.handler.bnum, 1, "$", ) return [line.encode("utf-8") for line in lines]
Python
def parse_msgs(self) -> None: """Parse messages sent over a Vim channel.""" while not self.closed: try: msg = self.rfile.readline() msg_id, data = json.loads(msg) except (json.JSONDecodeError, ConnectionError): # Check if channel closed self.closed = True break if msg_id >= 0: # request from Vim bnum, func, args = data if func == "interrupt": self.interrupt() else: self.reqs.put((msg_id, bnum, func, args)) else: # response to a `vimeval` request # NOTE: Accessing self.resps concurrently creates a race # condition where defaultdict could construct a Queue twice with self.resp_lk: self.resps[-msg_id].put((msg_id, data))
def parse_msgs(self) -> None: """Parse messages sent over a Vim channel.""" while not self.closed: try: msg = self.rfile.readline() msg_id, data = json.loads(msg) except (json.JSONDecodeError, ConnectionError): # Check if channel closed self.closed = True break if msg_id >= 0: # request from Vim bnum, func, args = data if func == "interrupt": self.interrupt() else: self.reqs.put((msg_id, bnum, func, args)) else: # response to a `vimeval` request # NOTE: Accessing self.resps concurrently creates a race # condition where defaultdict could construct a Queue twice with self.resp_lk: self.resps[-msg_id].put((msg_id, data))
Python
def handle(self) -> None: """Forward requests from Vim to the appropriate Coqtail function.""" self.coq = Coqtail(self) self.closed = False # Requests from Vim (`s:call`) self.reqs: ReqQueue = Queue() # Responses to `vimeval` requests. # The key is the id of Vim's request that was being handled at the # moment of `vimeval` call. self.resps: DefaultDict[int, ResQueue] = ddict(Queue) self.resp_lk = threading.Lock() threading.Thread(target=self.parse_msgs, daemon=True).start() while not self.closed: try: self.working = False # pylint: disable=unpacking-non-sequence self.msg_id, self.bnum, func, args = self.get_msg() self.refresh_time = 0.0 self.working = True except EOFError: break handlers: Mapping[str, Callable[..., object]] = { "start": self.coq.start, "stop": self.coq.stop, "step": self.coq.step, "rewind": self.coq.rewind, "to_line": self.coq.to_line, "to_top": self.coq.to_top, "query": self.coq.query, "endpoint": self.coq.endpoint, "errorpoint": self.coq.errorpoint, "toggle_debug": self.coq.toggle_debug, "splash": self.coq.splash, "sync": self.coq.sync, "find_def": self.coq.find_def, "find_lib": self.coq.find_lib, "refresh": self.coq.refresh, } handler = handlers.get(func, None) try: ret = handler(**args) if handler is not None else None msg = [self.msg_id, {"buf": self.bnum, "ret": ret}] self.wfile.write(_to_jsonl(msg)) except (EOFError, ConnectionError): break try: del self.resps[self.msg_id] except KeyError: pass if func == "stop": break
def handle(self) -> None: """Forward requests from Vim to the appropriate Coqtail function.""" self.coq = Coqtail(self) self.closed = False # Requests from Vim (`s:call`) self.reqs: ReqQueue = Queue() # Responses to `vimeval` requests. # The key is the id of Vim's request that was being handled at the # moment of `vimeval` call. self.resps: DefaultDict[int, ResQueue] = ddict(Queue) self.resp_lk = threading.Lock() threading.Thread(target=self.parse_msgs, daemon=True).start() while not self.closed: try: self.working = False # pylint: disable=unpacking-non-sequence self.msg_id, self.bnum, func, args = self.get_msg() self.refresh_time = 0.0 self.working = True except EOFError: break handlers: Mapping[str, Callable[..., object]] = { "start": self.coq.start, "stop": self.coq.stop, "step": self.coq.step, "rewind": self.coq.rewind, "to_line": self.coq.to_line, "to_top": self.coq.to_top, "query": self.coq.query, "endpoint": self.coq.endpoint, "errorpoint": self.coq.errorpoint, "toggle_debug": self.coq.toggle_debug, "splash": self.coq.splash, "sync": self.coq.sync, "find_def": self.coq.find_def, "find_lib": self.coq.find_lib, "refresh": self.coq.refresh, } handler = handlers.get(func, None) try: ret = handler(**args) if handler is not None else None msg = [self.msg_id, {"buf": self.bnum, "ret": ret}] self.wfile.write(_to_jsonl(msg)) except (EOFError, ConnectionError): break try: del self.resps[self.msg_id] except KeyError: pass if func == "stop": break
Python
def vimvar(self, var: str, val: Optional[Any] = None) -> Any: """Get or set the value of a Vim variable.""" return ( self.vimcall("getbufvar", True, self.bnum, var) if val is None else self.vimcall("setbufvar", True, self.bnum, var, val) )
def vimvar(self, var: str, val: Optional[Any] = None) -> Any: """Get or set the value of a Vim variable.""" return ( self.vimcall("getbufvar", True, self.bnum, var) if val is None else self.vimcall("setbufvar", True, self.bnum, var, val) )
Python
def refresh( self, goals: bool = True, force: bool = True, scroll: bool = False, ) -> None: """Refresh the highlighting and auxiliary panels.""" # pylint: disable=attribute-defined-outside-init # refresh_time is defined in handle() when the connection is opened. if not force: cur_time = time.time() force = cur_time - self.refresh_time > self.refresh_rate self.refresh_time = cur_time if force: self.vimcall( "coqtail#panels#refresh", self.sync, self.bnum, self.coq.highlights, self.coq.panels(goals), scroll, )
def refresh( self, goals: bool = True, force: bool = True, scroll: bool = False, ) -> None: """Refresh the highlighting and auxiliary panels.""" # pylint: disable=attribute-defined-outside-init # refresh_time is defined in handle() when the connection is opened. if not force: cur_time = time.time() force = cur_time - self.refresh_time > self.refresh_rate self.refresh_time = cur_time if force: self.vimcall( "coqtail#panels#refresh", self.sync, self.bnum, self.coq.highlights, self.coq.panels(goals), scroll, )
Python
def interrupt(self) -> None: """Interrupt Coqtop and clear the request queue.""" if self.working: self.working = False while not self.reqs.empty(): try: msg_id, bnum, _, _ = self.reqs.get_nowait() msg = [msg_id, {"buf": bnum, "ret": None}] self.wfile.write(_to_jsonl(msg)) except Empty: break self.coq.coqtop.interrupt()
def interrupt(self) -> None: """Interrupt Coqtop and clear the request queue.""" if self.working: self.working = False while not self.reqs.empty(): try: msg_id, bnum, _, _ = self.reqs.get_nowait() msg = [msg_id, {"buf": bnum, "ret": None}] self.wfile.write(_to_jsonl(msg)) except Empty: break self.coq.coqtop.interrupt()
Python
def send( handle: int, session: Optional[int], expr: str, reply_id: Optional[int] = None, returns: bool = True, ) -> bool: """Send a command request or reply on a channel.""" try: ch = ChannelManager.channels[handle] except KeyError: return False if reply_id is None and session is not None: if ChannelManager.sessions.get(handle, None) == session: return True ChannelManager.sessions[handle] = session msg_id = reply_id if reply_id is not None else next(ChannelManager.msg_id) ch.sendall(_to_jsonl([msg_id, expr])) if returns: ChannelManager.results[handle] = ChannelManager.pool.submit( ChannelManager._recv, ChannelManager.channels[handle], ) return True
def send( handle: int, session: Optional[int], expr: str, reply_id: Optional[int] = None, returns: bool = True, ) -> bool: """Send a command request or reply on a channel.""" try: ch = ChannelManager.channels[handle] except KeyError: return False if reply_id is None and session is not None: if ChannelManager.sessions.get(handle, None) == session: return True ChannelManager.sessions[handle] = session msg_id = reply_id if reply_id is not None else next(ChannelManager.msg_id) ch.sendall(_to_jsonl([msg_id, expr])) if returns: ChannelManager.results[handle] = ChannelManager.pool.submit( ChannelManager._recv, ChannelManager.channels[handle], ) return True
Python
def poll(handle: int) -> Optional[str]: """Wait for a response on a channel.""" try: return ChannelManager.results[handle].result(timeout=0) except futures.TimeoutError: return None
def poll(handle: int) -> Optional[str]: """Wait for a response on a channel.""" try: return ChannelManager.results[handle].result(timeout=0) except futures.TimeoutError: return None