Search is not available for this dataset
text
stringlengths
75
104k
def _load_class(class_path, default): """ Loads the class from the class_path string """ if class_path is None: return default component = class_path.rsplit('.', 1) result_processor = getattr( importlib.import_module(component[0]), component[1], default ) if len(component) > 1 else default return result_processor
def _is_iterable(item): """ Checks if an item is iterable (list, tuple, generator), but not string """ return isinstance(item, collections.Iterable) and not isinstance(item, six.string_types)
def _process_pagination_values(request): """ process pagination requests from request parameter """ size = 20 page = 0 from_ = 0 if "page_size" in request.POST: size = int(request.POST["page_size"]) max_page_size = getattr(settings, "SEARCH_MAX_PAGE_SIZE", 100) # The parens below are superfluous, but make it much clearer to the reader what is going on if not (0 < size <= max_page_size): # pylint: disable=superfluous-parens raise ValueError(_('Invalid page size of {page_size}').format(page_size=size)) if "page_index" in request.POST: page = int(request.POST["page_index"]) from_ = page * size return size, from_, page
def _process_field_values(request): """ Create separate dictionary of supported filter values provided """ return { field_key: request.POST[field_key] for field_key in request.POST if field_key in course_discovery_filter_fields() }
def do_search(request, course_id=None): """ Search view for http requests Args: request (required) - django request object course_id (optional) - course_id within which to restrict search Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these results "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (required) - text upon which to search "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ # Setup search environment SearchInitializer.set_search_enviroment(request=request, course_id=course_id) results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: if not search_term: raise ValueError(_('No search term provided for search')) size, from_, page = _process_pagination_values(request) # Analytics - log search request track.emit( 'edx.course.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = perform_search( search_term, user=request.user, size=size, from_=from_, course_id=course_id ) status_code = 200 # Analytics - log search results before sending to browser track.emit( 'edx.course.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
def course_discovery(request): """ Search for courses Args: request (required) - django request object Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these resutls "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (optional) - text with which to search for courses "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: size, from_, page = _process_pagination_values(request) field_dictionary = _process_field_values(request) # Analytics - log search request track.emit( 'edx.course_discovery.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = course_discovery_search( search_term=search_term, size=size, from_=from_, field_dictionary=field_dictionary, ) # Analytics - log search results before sending to browser track.emit( 'edx.course_discovery.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) status_code = 200 except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
def _translate_hits(es_response): """ Provide resultset in our desired format from elasticsearch results """ def translate_result(result): """ Any conversion from ES result syntax into our search engine syntax """ translated_result = copy.copy(result) data = translated_result.pop("_source") translated_result.update({ "data": data, "score": translated_result["_score"] }) return translated_result def translate_facet(result): """ Any conversion from ES facet syntax into our search engine sytax """ terms = {term["term"]: term["count"] for term in result["terms"]} return { "terms": terms, "total": result["total"], "other": result["other"], } results = [translate_result(hit) for hit in es_response["hits"]["hits"]] response = { "took": es_response["took"], "total": es_response["hits"]["total"], "max_score": es_response["hits"]["max_score"], "results": results, } if "facets" in es_response: response["facets"] = {facet: translate_facet(es_response["facets"][facet]) for facet in es_response["facets"]} return response
def _get_filter_field(field_name, field_value): """ Return field to apply into filter, if an array then use a range, otherwise look for a term match """ filter_field = None if isinstance(field_value, ValueRange): range_values = {} if field_value.lower: range_values.update({"gte": field_value.lower_string}) if field_value.upper: range_values.update({"lte": field_value.upper_string}) filter_field = { "range": { field_name: range_values } } elif _is_iterable(field_value): filter_field = { "terms": { field_name: field_value } } else: filter_field = { "term": { field_name: field_value } } return filter_field
def _process_field_queries(field_dictionary): """ We have a field_dictionary - we want to match the values for an elasticsearch "match" query This is only potentially useful when trying to tune certain search operations """ def field_item(field): """ format field match as "match" item for elasticsearch query """ return { "match": { field: field_dictionary[field] } } return [field_item(field) for field in field_dictionary]
def _process_filters(filter_dictionary): """ We have a filter_dictionary - this means that if the field is included and matches, then we can include, OR if the field is undefined, then we assume it is safe to include """ def filter_item(field): """ format elasticsearch filter to pass if value matches OR field is not included """ if filter_dictionary[field] is not None: return { "or": [ _get_filter_field(field, filter_dictionary[field]), { "missing": { "field": field } } ] } return { "missing": { "field": field } } return [filter_item(field) for field in filter_dictionary]
def _process_exclude_dictionary(exclude_dictionary): """ Based on values in the exclude_dictionary generate a list of term queries that will filter out unwanted results. """ # not_properties will hold the generated term queries. not_properties = [] for exclude_property in exclude_dictionary: exclude_values = exclude_dictionary[exclude_property] if not isinstance(exclude_values, list): exclude_values = [exclude_values] not_properties.extend([{"term": {exclude_property: exclude_value}} for exclude_value in exclude_values]) # Returning a query segment with an empty list freaks out ElasticSearch, # so just return an empty segment. if not not_properties: return {} return { "not": { "filter": { "or": not_properties } } }
def _process_facet_terms(facet_terms): """ We have a list of terms with which we return facets """ elastic_facets = {} for facet in facet_terms: facet_term = {"field": facet} if facet_terms[facet]: for facet_option in facet_terms[facet]: facet_term[facet_option] = facet_terms[facet][facet_option] elastic_facets[facet] = { "terms": facet_term } return elastic_facets
def get_mappings(cls, index_name, doc_type): """ fetch mapped-items structure from cache """ return cache.get(cls.get_cache_item_name(index_name, doc_type), {})
def set_mappings(cls, index_name, doc_type, mappings): """ set new mapped-items structure into cache """ cache.set(cls.get_cache_item_name(index_name, doc_type), mappings)
def log_indexing_error(cls, indexing_errors): """ Logs indexing errors and raises a general ElasticSearch Exception""" indexing_errors_log = [] for indexing_error in indexing_errors: indexing_errors_log.append(str(indexing_error)) raise exceptions.ElasticsearchException(', '.join(indexing_errors_log))
def _get_mappings(self, doc_type): """ Interfaces with the elasticsearch mappings for the index prevents multiple loading of the same mappings from ES when called more than once Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We cache the properties of each doc_type, if they are not available, we'll load them again from Elasticsearch """ # Try loading the mapping from the cache. mapping = ElasticSearchEngine.get_mappings(self.index_name, doc_type) # Fall back to Elasticsearch if not mapping: mapping = self._es.indices.get_mapping( index=self.index_name, doc_type=doc_type, ).get(self.index_name, {}).get('mappings', {}).get(doc_type, {}) # Cache the mapping, if one was retrieved if mapping: ElasticSearchEngine.set_mappings( self.index_name, doc_type, mapping ) return mapping
def _check_mappings(self, doc_type, body): """ We desire to index content so that anything we want to be textually searchable(and therefore needing to be analysed), but the other fields are designed to be filters, and only require an exact match. So, we want to set up the mappings for these fields as "not_analyzed" - this will allow our filters to work faster because they only have to work off exact matches """ # Make fields other than content be indexed as unanalyzed terms - content # contains fields that are to be analyzed exclude_fields = ["content"] field_properties = getattr(settings, "ELASTIC_FIELD_MAPPINGS", {}) def field_property(field_name, field_value): """ Prepares field as property syntax for providing correct mapping desired for field Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We can only add new ones, but the format is the same """ prop_val = None if field_name in field_properties: prop_val = field_properties[field_name] elif isinstance(field_value, dict): props = {fn: field_property(fn, field_value[fn]) for fn in field_value} prop_val = {"properties": props} else: prop_val = { "type": "string", "index": "not_analyzed", } return prop_val new_properties = { field: field_property(field, value) for field, value in body.items() if (field not in exclude_fields) and (field not in self._get_mappings(doc_type).get('properties', {})) } if new_properties: self._es.indices.put_mapping( index=self.index_name, doc_type=doc_type, body={ doc_type: { "properties": new_properties, } } ) self._clear_mapping(doc_type)
def index(self, doc_type, sources, **kwargs): """ Implements call to add documents to the ES index Note the call to _check_mappings which will setup fields with the desired mappings """ try: actions = [] for source in sources: self._check_mappings(doc_type, source) id_ = source['id'] if 'id' in source else None log.debug("indexing %s object with id %s", doc_type, id_) action = { "_index": self.index_name, "_type": doc_type, "_id": id_, "_source": source } actions.append(action) # bulk() returns a tuple with summary information # number of successfully executed actions and number of errors if stats_only is set to True. _, indexing_errors = bulk( self._es, actions, **kwargs ) if indexing_errors: ElasticSearchEngine.log_indexing_error(indexing_errors) # Broad exception handler to protect around bulk call except Exception as ex: # log information and re-raise log.exception("error while indexing - %s", str(ex)) raise
def remove(self, doc_type, doc_ids, **kwargs): """ Implements call to remove the documents from the index """ try: # ignore is flagged as an unexpected-keyword-arg; ES python client documents that it can be used # pylint: disable=unexpected-keyword-arg actions = [] for doc_id in doc_ids: log.debug("Removing document of type %s and index %s", doc_type, doc_id) action = { '_op_type': 'delete', "_index": self.index_name, "_type": doc_type, "_id": doc_id } actions.append(action) bulk(self._es, actions, **kwargs) except BulkIndexError as ex: valid_errors = [error for error in ex.errors if error['delete']['status'] != 404] if valid_errors: log.exception("An error occurred while removing documents from the index.") raise
def search(self, query_string=None, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, facet_terms=None, exclude_ids=None, use_field_match=False, **kwargs): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, arguments-differ """ Implements call to search the index for the desired content. Args: query_string (str): the string of values upon which to search within the content of the objects within the index field_dictionary (dict): dictionary of values which _must_ exist and _must_ match in order for the documents to be included in the results filter_dictionary (dict): dictionary of values which _must_ match if the field exists in order for the documents to be included in the results; documents for which the field does not exist may be included in the results if they are not otherwise filtered out exclude_dictionary(dict): dictionary of values all of which which must not match in order for the documents to be included in the results; documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set facet_terms (dict): dictionary of terms to include within search facets list - key is the term desired to facet upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many facet results to return (can be an empty dictionary to use default size for underlying engine): e.g. { "org": {"size": 10}, # only show top 10 organizations "modes": {} } use_field_match (bool): flag to indicate whether to use elastic filtering or elastic matching for field matches - this is nothing but a potential performance tune for certain queries (deprecated) exclude_ids (list): list of id values to exclude from the results - useful for finding maches that aren't "one of these" Returns: dict object with results in the desired format { "took": 3, "total": 4, "max_score": 2.0123, "results": [ { "score": 2.0123, "data": { ... } }, { "score": 0.0983, "data": { ... } } ], "facets": { "org": { "total": total_count, "other": 1, "terms": { "MITx": 25, "HarvardX": 18 } }, "modes": { "total": modes_count, "other": 15, "terms": { "honor": 58, "verified": 44, } } } } Raises: ElasticsearchException when there is a problem with the response from elasticsearch Example usage: .search( "find the words within this string", { "must_have_field": "mast_have_value for must_have_field" }, { } ) """ log.debug("searching index with %s", query_string) elastic_queries = [] elastic_filters = [] # We have a query string, search all fields for matching text within the "content" node if query_string: if six.PY2: query_string = query_string.encode('utf-8').translate(None, RESERVED_CHARACTERS) else: query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) elastic_queries.append({ "query_string": { "fields": ["content.*"], "query": query_string } }) if field_dictionary: if use_field_match: elastic_queries.extend(_process_field_queries(field_dictionary)) else: elastic_filters.extend(_process_field_filters(field_dictionary)) if filter_dictionary: elastic_filters.extend(_process_filters(filter_dictionary)) # Support deprecated argument of exclude_ids if exclude_ids: if not exclude_dictionary: exclude_dictionary = {} if "_id" not in exclude_dictionary: exclude_dictionary["_id"] = [] exclude_dictionary["_id"].extend(exclude_ids) if exclude_dictionary: elastic_filters.append(_process_exclude_dictionary(exclude_dictionary)) query_segment = { "match_all": {} } if elastic_queries: query_segment = { "bool": { "must": elastic_queries } } query = query_segment if elastic_filters: filter_segment = { "bool": { "must": elastic_filters } } query = { "filtered": { "query": query_segment, "filter": filter_segment, } } body = {"query": query} if facet_terms: facet_query = _process_facet_terms(facet_terms) if facet_query: body["facets"] = facet_query try: es_response = self._es.search( index=self.index_name, body=body, **kwargs ) except exceptions.ElasticsearchException as ex: message = six.text_type(ex) if 'QueryParsingException' in message: log.exception("Malformed search query: %s", message) raise QueryParseError('Malformed search query.') else: # log information and re-raise log.exception("error while searching index - %s", str(message)) raise return _translate_hits(es_response)
def get_search_engine(index=None): """ Returns the desired implementor (defined in settings) """ search_engine_class = _load_class(getattr(settings, "SEARCH_ENGINE", None), None) return search_engine_class(index=index) if search_engine_class else None
def perform_search( search_term, user=None, size=10, from_=0, course_id=None): """ Call the search engine with the appropriate parameters """ # field_, filter_ and exclude_dictionary(s) can be overridden by calling application # field_dictionary includes course if course_id provided (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters( user=user, course_id=course_id ) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search_string( search_term, field_dictionary=field_dictionary, filter_dictionary=filter_dictionary, exclude_dictionary=exclude_dictionary, size=size, from_=from_, doc_type="courseware_content", ) # post-process the result for result in results["results"]: result["data"] = SearchResultProcessor.process_result(result["data"], search_term, user) results["access_denied_count"] = len([r for r in results["results"] if r["data"] is None]) results["results"] = [r for r in results["results"] if r["data"] is not None] return results
def course_discovery_search(search_term=None, size=20, from_=0, field_dictionary=None): """ Course Discovery activities against the search engine index of course details """ # We'll ignore the course-enrollemnt informaiton in field and filter # dictionary, and use our own logic upon enrollment dates for these use_search_fields = ["org"] (search_fields, _, exclude_dictionary) = SearchFilterGenerator.generate_field_filters() use_field_dictionary = {} use_field_dictionary.update({field: search_fields[field] for field in search_fields if field in use_search_fields}) if field_dictionary: use_field_dictionary.update(field_dictionary) if not getattr(settings, "SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING", False): use_field_dictionary["enrollment_start"] = DateRange(None, datetime.utcnow()) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search( query_string=search_term, doc_type="course_info", size=size, from_=from_, # only show when enrollment start IS provided and is before now field_dictionary=use_field_dictionary, # show if no enrollment end is provided and has not yet been reached filter_dictionary={"enrollment_end": DateRange(datetime.utcnow(), None)}, exclude_dictionary=exclude_dictionary, facet_terms=course_discovery_facets(), ) return results
def strings_in_dictionary(dictionary): """ Used by default implementation for finding excerpt """ strings = [value for value in six.itervalues(dictionary) if not isinstance(value, dict)] for child_dict in [dv for dv in six.itervalues(dictionary) if isinstance(dv, dict)]: strings.extend(SearchResultProcessor.strings_in_dictionary(child_dict)) return strings
def find_matches(strings, words, length_hoped): """ Used by default property excerpt """ lower_words = [w.lower() for w in words] def has_match(string): """ Do any of the words match within the string """ lower_string = string.lower() for test_word in lower_words: if test_word in lower_string: return True return False shortened_strings = [textwrap.wrap(s) for s in strings] short_string_list = list(chain.from_iterable(shortened_strings)) matches = [ms for ms in short_string_list if has_match(ms)] cumulative_len = 0 break_at = None for idx, match in enumerate(matches): cumulative_len += len(match) if cumulative_len >= length_hoped: break_at = idx break return matches[0:break_at]
def decorate_matches(match_in, match_word): """ decorate the matches within the excerpt """ matches = re.finditer(match_word, match_in, re.IGNORECASE) for matched_string in set([match.group() for match in matches]): match_in = match_in.replace( matched_string, getattr(settings, "SEARCH_MATCH_DECORATION", u"<b>{}</b>").format(matched_string) ) return match_in
def add_properties(self): """ Called during post processing of result Any properties defined in your subclass will get exposed as members of the result json from the search """ for property_name in [p[0] for p in inspect.getmembers(self.__class__) if isinstance(p[1], property)]: self._results_fields[property_name] = getattr(self, property_name, None)
def process_result(cls, dictionary, match_phrase, user): """ Called from within search handler. Finds desired subclass and decides if the result should be removed and adds properties derived from the result information """ result_processor = _load_class(getattr(settings, "SEARCH_RESULT_PROCESSOR", None), cls) srp = result_processor(dictionary, match_phrase) if srp.should_remove(user): return None try: srp.add_properties() # protect around any problems introduced by subclasses within their properties except Exception as ex: # pylint: disable=broad-except log.exception("error processing properties for %s - %s: will remove from results", json.dumps(dictionary, cls=DjangoJSONEncoder), str(ex)) return None return dictionary
def excerpt(self): """ Property to display a useful excerpt representing the matches within the results """ if "content" not in self._results_fields: return None match_phrases = [self._match_phrase] if six.PY2: separate_phrases = [ phrase.decode('utf-8') for phrase in shlex.split(self._match_phrase.encode('utf-8')) ] else: separate_phrases = [ phrase for phrase in shlex.split(self._match_phrase) ] if len(separate_phrases) > 1: match_phrases.extend(separate_phrases) else: match_phrases = separate_phrases matches = SearchResultProcessor.find_matches( SearchResultProcessor.strings_in_dictionary(self._results_fields["content"]), match_phrases, DESIRED_EXCERPT_LENGTH ) excerpt_text = ELLIPSIS.join(matches) for match_word in match_phrases: excerpt_text = SearchResultProcessor.decorate_matches(excerpt_text, match_word) return excerpt_text
def generate_field_filters(cls, **kwargs): """ Called from within search handler Finds desired subclass and adds filter information based upon user information """ generator = _load_class(getattr(settings, "SEARCH_FILTER_GENERATOR", None), cls)() return ( generator.field_dictionary(**kwargs), generator.filter_dictionary(**kwargs), generator.exclude_dictionary(**kwargs), )
def set_search_enviroment(cls, **kwargs): """ Called from within search handler Finds desired subclass and calls initialize method """ initializer = _load_class(getattr(settings, "SEARCH_INITIALIZER", None), cls)() return initializer.initialize(**kwargs)
def _parse(self, filename): """Opens data file and for each line, calls _eat_name_line""" self.names = {} with codecs.open(filename, encoding="iso8859-1") as f: for line in f: if any(map(lambda c: 128 < ord(c) < 160, line)): line = line.encode("iso8859-1").decode("windows-1252") self._eat_name_line(line.strip())
def _eat_name_line(self, line): """Parses one line of data file""" if line[0] not in "#=": parts = line.split() country_values = line[30:-1] name = map_name(parts[1]) if not self.case_sensitive: name = name.lower() if parts[0] == "M": self._set(name, u"male", country_values) elif parts[0] == "1M" or parts[0] == "?M": self._set(name, u"mostly_male", country_values) elif parts[0] == "F": self._set(name, u"female", country_values) elif parts[0] == "1F" or parts[0] == "?F": self._set(name, u"mostly_female", country_values) elif parts[0] == "?": self._set(name, self.unknown_value, country_values) else: raise "Not sure what to do with a sex of %s" % parts[0]
def _set(self, name, gender, country_values): """Sets gender and relevant country values for names dictionary of detector""" if '+' in name: for replacement in ['', ' ', '-']: self._set(name.replace('+', replacement), gender, country_values) else: if name not in self.names: self.names[name] = {} self.names[name][gender] = country_values
def _most_popular_gender(self, name, counter): """Finds the most popular gender for the given name counting by given counter""" if name not in self.names: return self.unknown_value max_count, max_tie = (0, 0) best = self.names[name].keys()[0] for gender, country_values in self.names[name].items(): count, tie = counter(country_values) if count > max_count or (count == max_count and tie > max_tie): max_count, max_tie, best = count, tie, gender return best if max_count > 0 else self.unknown_value
def get_gender(self, name, country=None): """Returns best gender for the given name and country pair""" if not self.case_sensitive: name = name.lower() if name not in self.names: return self.unknown_value elif not country: def counter(country_values): country_values = map(ord, country_values.replace(" ", "")) return (len(country_values), sum(map(lambda c: c > 64 and c-55 or c-48, country_values))) return self._most_popular_gender(name, counter) elif country in self.__class__.COUNTRIES: index = self.__class__.COUNTRIES.index(country) counter = lambda e: (ord(e[index])-32, 0) return self._most_popular_gender(name, counter) else: raise NoCountryError("No such country: %s" % country)
def output(self, msg, newline=True): """ Writes the specified string to the output target of the report. :param msg: the message to output. :type msg: str :param newline: whether or not to append a newline to the end of the message :type newline: str """ click.echo(text_type(msg), nl=newline, file=self.output_file)
def execute_tools(config, path, progress=None): """ Executes the suite of TidyPy tools upon the project and returns the issues that are found. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project to analyze :type path: str :param progress: the progress reporter object that will receive callbacks during the execution of the tool suite. If not specified, not progress notifications will occur. :type progress: tidypy.Progress :rtype: tidypy.Collector """ progress = progress or QuietProgress() progress.on_start() manager = SyncManager() manager.start() num_tools = 0 tools = manager.Queue() for name, cls in iteritems(get_tools()): if config[name]['use'] and cls.can_be_used(): num_tools += 1 tools.put({ 'name': name, 'config': config[name], }) collector = Collector(config) if not num_tools: progress.on_finish() return collector notifications = manager.Queue() environment = manager.dict({ 'finder': Finder(path, config), }) workers = [] for _ in range(config['workers']): worker = Worker( args=( tools, notifications, environment, ), ) worker.start() workers.append(worker) while num_tools: try: notification = notifications.get(True, 0.25) except Empty: pass else: if notification['type'] == 'start': progress.on_tool_start(notification['tool']) elif notification['type'] == 'complete': collector.add_issues(notification['issues']) progress.on_tool_finish(notification['tool']) num_tools -= 1 progress.on_finish() return collector
def execute_reports( config, path, collector, on_report_finish=None, output_file=None): """ Executes the configured suite of issue reports. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project that was analyzed :type path: str :param collector: the issues to report :type collector: tidypy.Collector """ reports = get_reports() for report in config.get('requested_reports', []): if report.get('type') and report['type'] in reports: cfg = config.get('report', {}).get(report['type'], {}) cfg.update(report) reporter = reports[report['type']]( cfg, path, output_file=output_file, ) reporter.produce(collector) if on_report_finish: on_report_finish(report)
def is_excluded(self, path): """ Determines whether or not the specified file is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ relpath = path.relative_to(self.base_path).as_posix() return matches_masks(relpath, self.excludes)
def is_excluded_dir(self, path): """ Determines whether or not the specified directory is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ if self.is_excluded(path): return True return matches_masks(path.name, ALWAYS_EXCLUDED_DIRS)
def files(self, filters=None): """ A generator that produces a sequence of paths to files in the project that matches the specified filters. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ filters = compile_masks(filters or [r'.*']) for files in itervalues(self._found): for file_ in files: relpath = text_type(Path(file_).relative_to(self.base_path)) if matches_masks(relpath, filters): yield file_
def directories(self, filters=None, containing=None): """ A generator that produces a sequence of paths to directories in the project that matches the specified filters. :param filters: the regular expressions to use when finding directories in the project. If not specified, all directories are returned. :type filters: list(str) :param containing: if a directory passes through the specified filters, it is checked for the presence of a file that matches one of the regular expressions in this parameter. :type containing: list(str) """ filters = compile_masks(filters or [r'.*']) contains = compile_masks(containing) for dirname, files in iteritems(self._found): relpath = text_type(Path(dirname).relative_to(self.base_path)) if matches_masks(relpath, filters): if not contains or self._contains(files, contains): yield dirname
def modules(self, filters=None): """ A generator that produces a sequence of paths to files that look to be Python modules (e.g., ``*.py``). :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ masks = compile_masks(r'\.py$') for file_ in self.files(filters=filters): if matches_masks(file_, masks): yield file_
def sys_paths(self, filters=None): """ Produces a list of paths that would be suitable to use in ``sys.path`` in order to access the Python modules/packages found in this project. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ paths = set() packages = list(self.packages(filters=filters)) for module in self.modules(filters=filters): parent = text_type(Path(module).parent) if parent not in packages: paths.add(parent) paths.update(self.topmost_directories([ text_type(Path(package).parent) for package in packages ])) return list(paths)
def add_issues(self, issues): """ Adds an issue to the collection. :param issues: the issue(s) to add :type issues: tidypy.Issue or list(tidypy.Issue) """ if not isinstance(issues, (list, tuple)): issues = [issues] with self._lock: self._all_issues.extend(issues) self._cleaned_issues = None
def issue_count(self, include_unclean=False): """ Returns the number of issues in the collection. :param include_unclean: whether or not to include issues that are being ignored due to being a duplicate, excluded, etc. :type include_unclean: bool :rtype: int """ if include_unclean: return len(self._all_issues) self._ensure_cleaned_issues() return len(self._cleaned_issues)
def get_issues(self, sortby=None): """ Retrieves the issues in the collection. :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: list(tidypy.Issue) """ self._ensure_cleaned_issues() return self._sort_issues(self._cleaned_issues, sortby)
def get_grouped_issues(self, keyfunc=None, sortby=None): """ Retrieves the issues in the collection grouped into buckets according to the key generated by the keyfunc. :param keyfunc: a function that will be used to generate the key that identifies the group that an issue will be assigned to. This function receives a single tidypy.Issue argument and must return a string. If not specified, the filename of the issue will be used. :type keyfunc: func :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: OrderedDict """ if not keyfunc: keyfunc = default_group if not sortby: sortby = self.DEFAULT_SORT self._ensure_cleaned_issues() return self._group_issues(self._cleaned_issues, keyfunc, sortby)
def parse(cls, content, is_pyproject=False): """ A convenience method for parsing a TOML-serialized configuration. :param content: a TOML string containing a TidyPy configuration :type content: str :param is_pyproject: whether or not the content is (or resembles) a ``pyproject.toml`` file, where the TidyPy configuration is located within a key named ``tool``. :type is_pyproject: bool :rtype: dict """ parsed = pytoml.loads(content) if is_pyproject: parsed = parsed.get('tool', {}) parsed = parsed.get('tidypy', {}) return parsed
def get_tools(): """ Retrieves the TidyPy tools that are available in the current Python environment. The returned dictionary has keys that are the tool names and values are the tool classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_tools, '_CACHE'): get_tools._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.tools'): try: get_tools._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load tool "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_tools._CACHE
def get_reports(): """ Retrieves the TidyPy issue reports that are available in the current Python environment. The returned dictionary has keys are the report names and values are the report classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_reports, '_CACHE'): get_reports._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.reports'): try: get_reports._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load report "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_reports._CACHE
def get_extenders(): """ Retrieves the TidyPy configuration extenders that are available in the current Python environment. The returned dictionary has keys are the extender names and values are the extender classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_extenders, '_CACHE'): get_extenders._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.extenders'): try: get_extenders._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load extender "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_extenders._CACHE
def purge_config_cache(location=None): """ Clears out the cache of TidyPy configurations that were retrieved from outside the normal locations. """ cache_path = get_cache_path(location) if location: os.remove(cache_path) else: shutil.rmtree(cache_path)
def get_default_config(): """ Produces a stock/out-of-the-box TidyPy configuration. :rtype: dict """ config = {} for name, cls in iteritems(get_tools()): config[name] = cls.get_default_config() try: workers = multiprocessing.cpu_count() - 1 except NotImplementedError: # pragma: no cover workers = 1 workers = max(1, min(4, workers)) config.update({ 'exclude': [], 'merge-issues': True, 'workers': workers, 'requested_reports': [ { 'type': 'console', }, ], 'disabled': [], 'noqa': True, 'extends': [], 'ignore-missing-extends': False, }) return config
def get_user_config(project_path, use_cache=True): """ Produces a TidyPy configuration that incorporates the configuration files stored in the current user's home directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ if sys.platform == 'win32': user_config = os.path.expanduser(r'~\\tidypy') else: user_config = os.path.join( os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), 'tidypy' ) if os.path.exists(user_config): with open(user_config, 'r') as config_file: config = pytoml.load(config_file).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
def get_local_config(project_path, use_cache=True): """ Produces a TidyPy configuration using the ``pyproject.toml`` in the project's directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ pyproject_path = os.path.join(project_path, 'pyproject.toml') if os.path.exists(pyproject_path): with open(pyproject_path, 'r') as config_file: config = pytoml.load(config_file) config = config.get('tool', {}).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
def get_project_config(project_path, use_cache=True): """ Produces the Tidypy configuration to use for the specified project. If a ``pyproject.toml`` exists, the configuration will be based on that. If not, the TidyPy configuration in the user's home directory will be used. If one does not exist, the default configuration will be used. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ return get_local_config(project_path, use_cache=use_cache) \ or get_user_config(project_path, use_cache=use_cache) \ or get_default_config()
def merge_list(list1, list2): """ Merges the contents of two lists into a new list. :param list1: the first list :type list1: list :param list2: the second list :type list2: list :returns: list """ merged = list(list1) for value in list2: if value not in merged: merged.append(value) return merged
def merge_dict(dict1, dict2, merge_lists=False): """ Recursively merges the contents of two dictionaries into a new dictionary. When both input dictionaries share a key, the value from ``dict2`` is kept. :param dict1: the first dictionary :type dict1: dict :param dict2: the second dictionary :type dict2: dict :param merge_lists: when this function encounters a key that contains lists in both input dictionaries, this parameter dictates whether or not those lists should be merged. If not specified, defaults to ``False``. :type merge_lists: bool :returns: dict """ merged = dict(dict1) for key, value in iteritems(dict2): if isinstance(merged.get(key), dict): merged[key] = merge_dict(merged[key], value) elif merge_lists and isinstance(merged.get(key), list): merged[key] = merge_list(merged[key], value) else: merged[key] = value return merged
def output_error(msg): """ Prints the specified string to ``stderr``. :param msg: the message to print :type msg: str """ click.echo(click.style(msg, fg='red'), err=True)
def mod_sys_path(paths): """ A context manager that will append the specified paths to Python's ``sys.path`` during the execution of the block. :param paths: the paths to append :type paths: list(str) """ old_path = sys.path sys.path = paths + sys.path try: yield finally: sys.path = old_path
def compile_masks(masks): """ Compiles a list of regular expressions. :param masks: the regular expressions to compile :type masks: list(str) or str :returns: list(regular expression object) """ if not masks: masks = [] elif not isinstance(masks, (list, tuple)): masks = [masks] return [ re.compile(mask) for mask in masks ]
def matches_masks(target, masks): """ Determines whether or not the target string matches any of the regular expressions specified. :param target: the string to check :type target: str :param masks: the regular expressions to check against :type masks: list(regular expression object) :returns: bool """ for mask in masks: if mask.search(target): return True return False
def read_file(filepath): """ Retrieves the contents of the specified file. This function performs simple caching so that the same file isn't read more than once per process. :param filepath: the file to read :type filepath: str :returns: str """ with _FILE_CACHE_LOCK: if filepath not in _FILE_CACHE: _FILE_CACHE[filepath] = _read_file(filepath) return _FILE_CACHE[filepath]
def parse_python_file(filepath): """ Retrieves the AST of the specified file. This function performs simple caching so that the same file isn't read or parsed more than once per process. :param filepath: the file to parse :type filepath: str :returns: ast.AST """ with _AST_CACHE_LOCK: if filepath not in _AST_CACHE: source = read_file(filepath) _AST_CACHE[filepath] = ast.parse(source, filename=filepath) return _AST_CACHE[filepath]
def on_tool_finish(self, tool): """ Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str """ with self._lock: if tool in self.current_tools: self.current_tools.remove(tool) self.completed_tools.append(tool)
def exec_command(self, cmdstr): """ Execute an x3270 command `cmdstr` gets sent directly to the x3270 subprocess on it's stdin. """ if self.is_terminated: raise TerminatedError("this TerminalClient instance has been terminated") log.debug("sending command: %s", cmdstr) c = Command(self.app, cmdstr) start = time.time() c.execute() elapsed = time.time() - start log.debug("elapsed execution: {0}".format(elapsed)) self.status = Status(c.status_line) return c
def terminate(self): """ terminates the underlying x3270 subprocess. Once called, this Emulator instance must no longer be used. """ if not self.is_terminated: log.debug("terminal client terminated") try: self.exec_command(b"Quit") except BrokenPipeError: # noqa # x3270 was terminated, since we are just quitting anyway, ignore it. pass except socket.error as e: if e.errno != errno.ECONNRESET: raise # this can happen because wc3270 closes the socket before # the read() can happen, causing a socket error self.app.close() self.is_terminated = True
def is_connected(self): """ Return bool indicating connection state """ # need to wrap in try/except b/c of wc3270's socket connection dynamics try: # this is basically a no-op, but it results in the the current status # getting updated self.exec_command(b"Query(ConnectionState)") # connected status is like 'C(192.168.1.1)', disconnected is 'N' return self.status.connection_state.startswith(b"C(") except NotConnectedException: return False
def connect(self, host): """ Connect to a host """ if not self.app.connect(host): command = "Connect({0})".format(host).encode("ascii") self.exec_command(command) self.last_host = host
def wait_for_field(self): """ Wait until the screen is ready, the cursor has been positioned on a modifiable field, and the keyboard is unlocked. Sometimes the server will "unlock" the keyboard but the screen will not yet be ready. In that case, an attempt to read or write to the screen will result in a 'E' keyboard status because we tried to read from a screen that is not yet ready. Using this method tells the client to wait until a field is detected and the cursor has been positioned on it. """ self.exec_command("Wait({0}, InputField)".format(self.timeout).encode("ascii")) if self.status.keyboard != b"U": raise KeyboardStateError( "keyboard not unlocked, state was: {0}".format( self.status.keyboard.decode("ascii") ) )
def move_to(self, ypos, xpos): """ move the cursor to the given co-ordinates. Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 self.exec_command("MoveCursor({0}, {1})".format(ypos, xpos).encode("ascii"))
def send_string(self, tosend, ypos=None, xpos=None): """ Send a string to the screen at the current cursor location or at screen co-ordinates `ypos`/`xpos` if they are both given. Co-ordinates are 1 based, as listed in the status area of the terminal. """ if xpos is not None and ypos is not None: self.move_to(ypos, xpos) # escape double quotes in the data to send tosend = tosend.replace('"', '"') self.exec_command('String("{0}")'.format(tosend).encode("ascii"))
def string_get(self, ypos, xpos, length): """ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 cmd = self.exec_command( "Ascii({0},{1},{2})".format(ypos, xpos, length).encode("ascii") ) # this usage of ascii should only return a single line of data assert len(cmd.data) == 1, cmd.data return cmd.data[0].decode("ascii")
def string_found(self, ypos, xpos, string): """ Return True if `string` is found at screen co-ordinates `ypos`/`xpos`, False otherwise. Co-ordinates are 1 based, as listed in the status area of the terminal. """ found = self.string_get(ypos, xpos, len(string)) log.debug('string_found() saw "{0}"'.format(found)) return found == string
def fill_field(self, ypos, xpos, tosend, length): """ clears the field at the position given and inserts the string `tosend` tosend: the string to insert length: the length of the field Co-ordinates are 1 based, as listed in the status area of the terminal. raises: FieldTruncateError if `tosend` is longer than `length`. """ if length < len(tosend): raise FieldTruncateError('length limit %d, but got "%s"' % (length, tosend)) if xpos is not None and ypos is not None: self.move_to(ypos, xpos) self.delete_field() self.send_string(tosend)
def setEnvironmentalData(self, humidity, temperature): ''' Humidity is stored as an unsigned 16 bits in 1/512%RH. The default value is 50% = 0x64, 0x00. As an example 48.5% humidity would be 0x61, 0x00.''' ''' Temperature is stored as an unsigned 16 bits integer in 1/512 degrees there is an offset: 0 maps to -25C. The default value is 25C = 0x64, 0x00. As an example 23.5% temperature would be 0x61, 0x00. The internal algorithm uses these values (or default values if not set by the application) to compensate for changes in relative humidity and ambient temperature.''' hum_perc = humidity << 1 parts = math.fmod(temperature) fractional = parts[0] temperature = parts[1] temp_high = ((temperature + 25) << 9) temp_low = ((fractional / 0.001953125) & 0x1FF) temp_conv = (temp_high | temp_low) buf = [hum_perc, 0x00,((temp_conv >> 8) & 0xFF), (temp_conv & 0xFF)] self._device.writeList(CCS811_ENV_DATA, buf)
def from_func(cls, func, variables, vartype, name=None): """Construct a constraint from a validation function. Args: func (function): Function that evaluates True when the variables satisfy the constraint. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that binary variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ['a', 'b'], 'BINARY') >>> print(const.name) Constraint >>> (0, 1) in const.configurations True This example creates a constraint that :math:`out = NOT(x)` for spin variables. >>> import dwavebinarycsp >>> def not_(y, x): # y=NOT(x) for spin variables ... return (y == -x) ... >>> const = dwavebinarycsp.Constraint.from_func( ... not_, ... ['out', 'in'], ... {1, -1}, ... name='not_spin') >>> print(const.name) not_spin >>> (1, -1) in const.configurations True """ variables = tuple(variables) configurations = frozenset(config for config in itertools.product(vartype.value, repeat=len(variables)) if func(*config)) return cls(func, configurations, variables, vartype, name)
def from_configurations(cls, configurations, variables, vartype, name=None): """Construct a constraint from valid configurations. Args: configurations (iterable[tuple]): Valid configurations of the variables. Each configuration is a tuple of variable assignments ordered by :attr:`~Constraint.variables`. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> print(const.name) Constraint >>> (0, 0) in const.configurations # Order matches variables: a,b False This example creates a constraint based on specified valid configurations that represents an OR gate for spin variables. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations( ... [(-1, -1, -1), (1, -1, 1), (1, 1, -1), (1, 1, 1)], ... ['y', 'x1', 'x2'], ... dwavebinarycsp.SPIN, name='or_spin') >>> print(const.name) or_spin >>> (1, 1, -1) in const.configurations # Order matches variables: y,x1,x2 True """ def func(*args): return args in configurations return cls(func, configurations, variables, vartype, name)
def check(self, solution): """Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two candidate solutions, with additional unconstrained variable c. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> solution = {'a': 1, 'b': 1, 'c': 0} >>> const.check(solution) False >>> solution = {'a': 1, 'b': 0, 'c': 0} >>> const.check(solution) True """ return self.func(*(solution[v] for v in self.variables))
def fix_variable(self, v, value): """Fix the value of a variable and remove it from the constraint. Args: v (variable): Variable in the constraint to be set to a constant value. val (int): Value assigned to the variable. Values must match the :class:`.Vartype` of the constraint. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables, fixes variable a to 0, and tests two candidate solutions. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.fix_variable('a', 0) >>> const.check({'b': 1}) True >>> const.check({'b': 0}) False """ variables = self.variables try: idx = variables.index(v) except ValueError: raise ValueError("given variable {} is not part of the constraint".format(v)) if value not in self.vartype.value: raise ValueError("expected value to be in {}, received {} instead".format(self.vartype.value, value)) configurations = frozenset(config[:idx] + config[idx + 1:] # exclude the fixed var for config in self.configurations if config[idx] == value) if not configurations: raise UnsatError("fixing {} to {} makes this constraint unsatisfiable".format(v, value)) variables = variables[:idx] + variables[idx + 1:] self.configurations = configurations self.variables = variables def func(*args): return args in configurations self.func = func self.name = '{} ({} fixed to {})'.format(self.name, v, value)
def flip_variable(self, v): """Flip a variable in the constraint. Args: v (variable): Variable in the constraint to take the complementary value of its construction value. Examples: This example creates a constraint that :math:`a = b` on binary variables and flips variable a. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.eq, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.check({'a': 0, 'b': 0}) True >>> const.flip_variable('a') >>> const.check({'a': 1, 'b': 0}) True >>> const.check({'a': 0, 'b': 0}) False """ try: idx = self.variables.index(v) except ValueError: raise ValueError("variable {} is not a variable in constraint {}".format(v, self.name)) if self.vartype is dimod.BINARY: original_func = self.func def func(*args): new_args = list(args) new_args[idx] = 1 - new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (1 - config[idx],) + config[idx + 1:] for config in self.configurations) else: # SPIN original_func = self.func def func(*args): new_args = list(args) new_args[idx] = -new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (-config[idx],) + config[idx + 1:] for config in self.configurations) self.name = '{} ({} flipped)'.format(self.name, v)
def copy(self): """Create a copy. Examples: This example copies constraint :math:`a \\ne b` and tests a solution on the copied constraint. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], 'BINARY') >>> const2 = const.copy() >>> const2 is const False >>> const2.check({'a': 1, 'b': 1}) False """ # each object is itself immutable (except the function) return self.__class__(self.func, self.configurations, self.variables, self.vartype, name=self.name)
def projection(self, variables): """Create a new constraint that is the projection onto a subset of the variables. Args: variables (iterable): Subset of the constraint's variables. Returns: :obj:`.Constraint`: A new constraint over a subset of the variables. Examples: >>> import dwavebinarycsp ... >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 0), (0, 1)], ... ['a', 'b'], ... dwavebinarycsp.BINARY) >>> proj = const.projection(['a']) >>> proj.variables ['a'] >>> proj.configurations {(0,)} """ # resolve iterables or mutability problems by casting the variables to a set variables = set(variables) if not variables.issubset(self.variables): raise ValueError("Cannot project to variables not in the constraint.") idxs = [i for i, v in enumerate(self.variables) if v in variables] configurations = frozenset(tuple(config[i] for i in idxs) for config in self.configurations) variables = tuple(self.variables[i] for i in idxs) return self.from_configurations(configurations, variables, self.vartype)
def multiplication_circuit(nbit, vartype=dimod.BINARY): """Multiplication circuit constraint satisfaction problem. A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`, where the multiplicands are binary variables of length `nbit`; for example, :math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`. The square below shows a graphic representation of the circuit:: ________________________________________________________________________________ | and20 and10 and00 | | | | | | | and21 add11──and11 add01──and01 | | | |β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜|β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | and22 add12──and12 add02──and02 | | | | |β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜|β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | | add13─────────add03 | | | | | β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜| | | | | | | p5 p4 p3 p2 p1 p0 | -------------------------------------------------------------------------------- Args: nbit (int): Number of bits in the multiplicands. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when variables :math:`a,b,p` are assigned values that correctly solve binary multiplication :math:`ab=p`. Examples: This example creates a multiplication circuit CSP that multiplies two 3-bit numbers, which is then formulated as a binary quadratic model (BQM). It fixes the multiplacands as :math:`a=5, b=6` (:math:`101` and :math:`110`) and uses a simulated annealing sampler to find the product, :math:`p=30` (:math:`111100`). >>> import dwavebinarycsp >>> from dwavebinarycsp.factories.csp.circuits import multiplication_circuit >>> import neal >>> csp = multiplication_circuit(3) >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.fix_variable('a0', 1); bqm.fix_variable('a1', 0); bqm.fix_variable('a2', 1) >>> bqm.fix_variable('b0', 1); bqm.fix_variable('b1', 1); bqm.fix_variable('b2', 0) >>> sampler = neal.SimulatedAnnealingSampler() >>> response = sampler.sample(bqm) >>> p = next(response.samples(n=1, sorted_by='energy')) >>> print(p['p0'], p['p1'], p['p2'], p['p3'], p['p4'], p['p5']) # doctest: +SKIP 1 1 1 1 0 0 """ if nbit < 1: raise ValueError("num_multiplier_bits, num_multiplicand_bits must be positive integers") num_multiplier_bits = num_multiplicand_bits = nbit # also checks the vartype argument csp = ConstraintSatisfactionProblem(vartype) # throughout, we will use the following convention: # i to refer to the bits of the multiplier # j to refer to the bits of the multiplicand # k to refer to the bits of the product # create the variables corresponding to the input and output wires for the circuit a = {i: 'a%d' % i for i in range(nbit)} b = {j: 'b%d' % j for j in range(nbit)} p = {k: 'p%d' % k for k in range(nbit + nbit)} # we will want to store the internal variables somewhere AND = defaultdict(dict) # the output of the AND gate associated with ai, bj is stored in AND[i][j] SUM = defaultdict(dict) # the sum of the ADDER gate associated with ai, bj is stored in SUM[i][j] CARRY = defaultdict(dict) # the carry of the ADDER gate associated with ai, bj is stored in CARRY[i][j] # we follow a shift adder for i in range(num_multiplier_bits): for j in range(num_multiplicand_bits): ai = a[i] bj = b[j] if i == 0 and j == 0: # in this case there are no inputs from lower bits, so our only input is the AND # gate. And since we only have one bit to add, we don't need an adder, no have a # carry out andij = AND[i][j] = p[0] gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) continue # we always need an AND gate andij = AND[i][j] = 'and%s,%s' % (i, j) gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) # the number of inputs will determine the type of adder inputs = [andij] # determine if there is a carry in if i - 1 in CARRY and j in CARRY[i - 1]: inputs.append(CARRY[i - 1][j]) # determine if there is a sum in if i - 1 in SUM and j + 1 in SUM[i - 1]: inputs.append(SUM[i - 1][j + 1]) # ok, add create adders if necessary if len(inputs) == 1: # we don't need an adder and we don't have a carry SUM[i][j] = andij elif len(inputs) == 2: # we need a HALFADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumij, carryij) gate = halfadder_gate([inputs[0], inputs[1], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) else: assert len(inputs) == 3, 'unexpected number of inputs' # we need a FULLADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumij, carryij) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) # now we have a final row of full adders for col in range(nbit - 1): inputs = [CARRY[nbit - 1][col], SUM[nbit - 1][col + 1]] if col == 0: sumout = p[nbit + col] carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumout, carryout) gate = halfadder_gate([inputs[0], inputs[1], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) continue inputs.append(CARRY[nbit][col - 1]) sumout = p[nbit + col] if col < nbit - 2: carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) else: carryout = p[2 * nbit - 1] name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumout, carryout) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) return csp
def xor_fault(a, b, out, fault): """Returns True if XOR(a, b) == out and fault == 0 or XOR(a, b) != out and fault == 1.""" if (a != b) == out: return fault == 0 else: return fault == 1
def and_fault(a, b, out, fault): """Returns True if AND(a, b) == out and fault == 0 or AND(a, b) != out and fault == 1.""" if (a and b) == out: return fault == 0 else: return fault == 1
def or_fault(a, b, out, fault): """Returns True if OR(a, b) == out and fault == 0 or OR(a, b) != out and fault == 1.""" if (a or b) == out: return fault == 0 else: return fault == 1
def assert_penaltymodel_factory_available(): """For `dwavebinarycsp` to be functional, at least one penalty model factory has to be installed. See discussion in setup.py for details. """ from pkg_resources import iter_entry_points from penaltymodel.core import FACTORY_ENTRYPOINT from itertools import chain supported = ('maxgap', 'mip') factories = chain(*(iter_entry_points(FACTORY_ENTRYPOINT, name) for name in supported)) try: next(factories) except StopIteration: raise AssertionError( "To use 'dwavebinarycsp', at least one penaltymodel factory must be installed. " "Try {}.".format( " or ".join("'pip install dwavebinarycsp[{}]'".format(name) for name in supported) ))
def add_constraint(self, constraint, variables=tuple()): """Add a constraint. Args: constraint (function/iterable/:obj:`.Constraint`): Constraint definition in one of the supported formats: 1. Function, with input arguments matching the order and :attr:`~.ConstraintSatisfactionProblem.vartype` type of the `variables` argument, that evaluates True when the constraint is satisfied. 2. List explicitly specifying each allowed configuration as a tuple. 3. :obj:`.Constraint` object built either explicitly or by :mod:`dwavebinarycsp.factories`. variables(iterable): Variables associated with the constraint. Not required when `constraint` is a :obj:`.Constraint` object. Examples: This example defines a function that evaluates True when the constraint is satisfied. The function's input arguments match the order and type of the `variables` argument. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> def all_equal(a, b, c): # works for both dwavebinarycsp.BINARY and dwavebinarycsp.SPIN ... return (a == b) and (b == c) >>> csp.add_constraint(all_equal, ['a', 'b', 'c']) >>> csp.check({'a': 0, 'b': 0, 'c': 0}) True >>> csp.check({'a': 0, 'b': 0, 'c': 1}) False This example explicitly lists allowed configurations. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.SPIN) >>> eq_configurations = {(-1, -1), (1, 1)} >>> csp.add_constraint(eq_configurations, ['v0', 'v1']) >>> csp.check({'v0': -1, 'v1': +1}) False >>> csp.check({'v0': -1, 'v1': -1}) True This example uses a :obj:`.Constraint` object built by :mod:`dwavebinarycsp.factories`. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'])) # add an AND gate >>> csp.add_constraint(gates.xor_gate(['a', 'c', 'd'])) # add an XOR gate >>> csp.check({'a': 1, 'b': 0, 'c': 0, 'd': 1}) True """ if isinstance(constraint, Constraint): if variables and (tuple(variables) != constraint.variables): raise ValueError("mismatched variables and Constraint") elif isinstance(constraint, Callable): constraint = Constraint.from_func(constraint, variables, self.vartype) elif isinstance(constraint, Iterable): constraint = Constraint.from_configurations(constraint, variables, self.vartype) else: raise TypeError("Unknown constraint type given") self.constraints.append(constraint) for v in constraint.variables: self.variables[v].append(constraint)
def stitch(csp, min_classical_gap=2.0, max_graph_size=8): """Build a binary quadratic model with minimal energy levels at solutions to the specified constraint satisfaction problem. Args: csp (:obj:`.ConstraintSatisfactionProblem`): Constraint satisfaction problem. min_classical_gap (float, optional, default=2.0): Minimum energy gap from ground. Each constraint violated by the solution increases the energy level of the binary quadratic model by at least this much relative to ground energy. max_graph_size (int, optional, default=8): Maximum number of variables in the binary quadratic model that can be used to represent a single constraint. Returns: :class:`~dimod.BinaryQuadraticModel` Notes: For a `min_classical_gap` > 2 or constraints with more than two variables, requires access to factories from the penaltymodel_ ecosystem to construct the binary quadratic model. .. _penaltymodel: https://github.com/dwavesystems/penaltymodel Examples: This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy level of -2 such that each constraint violation by a solution adds the default minimum energy gap. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.energy({'a': 0, 'b': 0, 'c': 1}) # satisfies csp -2.0 >>> bqm.energy({'a': 0, 'b': 0, 'c': 0}) # violates one constraint 0.0 >>> bqm.energy({'a': 1, 'b': 0, 'c': 0}) # violates two constraints 2.0 This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy gap of 4. Note that in this case the conversion to binary quadratic model adds two ancillary variables that must be minimized over when solving. >>> import dwavebinarycsp >>> import operator >>> import itertools >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0) >>> list(bqm) # # doctest: +SKIP ['a', 'aux1', 'aux0', 'b', 'c'] >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 1, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # satisfies csp -6.0 >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates one constraint -2.0 >>> min([bqm.energy({'a': 1, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates two constraints 2.0 This example finds for the previous example the minimum graph size. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> for n in range(8, 1, -1): ... try: ... bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0, max_graph_size=n) ... except dwavebinarycsp.exceptions.ImpossibleBQM: ... print(n+1) ... 3 """ # ensure we have penaltymodel factory available try: dwavebinarycsp.assert_penaltymodel_factory_available() except AssertionError as e: raise RuntimeError(e) def aux_factory(): for i in count(): yield 'aux{}'.format(i) aux = aux_factory() bqm = dimod.BinaryQuadraticModel.empty(csp.vartype) # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models = {} for const in csp.constraints: configurations = const.configurations if len(const.variables) > max_graph_size: msg = ("The given csp contains a constraint {const} with {num_var} variables. " "This cannot be mapped to a graph with {max_graph_size} nodes. " "Consider checking whether your constraint is irreducible." "").format(const=const, num_var=len(const.variables), max_graph_size=max_graph_size) raise ImpossibleBQM(msg) pmodel = None if len(const) == 0: # empty constraint continue if min_classical_gap <= 2.0: if len(const) == 1 and max_graph_size >= 1: bqm.update(_bqm_from_1sat(const)) continue elif len(const) == 2 and max_graph_size >= 2: bqm.update(_bqm_from_2sat(const)) continue # developer note: we could cache them and relabel, for now though let's do the simple thing # if configurations in penalty_models: # raise NotImplementedError for G in iter_complete_graphs(const.variables, max_graph_size + 1, aux): # construct a specification spec = pm.Specification( graph=G, decision_variables=const.variables, feasible_configurations=configurations, min_classical_gap=min_classical_gap, vartype=csp.vartype ) # try to use the penaltymodel ecosystem try: pmodel = pm.get_penalty_model(spec) except pm.ImpossiblePenaltyModel: # hopefully adding more variables will make it possible continue if pmodel.classical_gap >= min_classical_gap: break # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models[configurations] = pmodel else: msg = ("No penalty model can be build for constraint {}".format(const)) raise ImpossibleBQM(msg) bqm.update(pmodel.model) return bqm
def _bqm_from_1sat(constraint): """create a bqm for a constraint with only one variable bqm will have exactly classical gap 2. """ configurations = constraint.configurations num_configurations = len(configurations) bqm = dimod.BinaryQuadraticModel.empty(constraint.vartype) if num_configurations == 1: val, = next(iter(configurations)) v, = constraint.variables bqm.add_variable(v, -1 if val > 0 else +1, vartype=dimod.SPIN) else: bqm.add_variables_from((v, 0.0) for v in constraint.variables) return bqm
def _bqm_from_2sat(constraint): """create a bqm for a constraint with two variables. bqm will have exactly classical gap 2. """ configurations = constraint.configurations variables = constraint.variables vartype = constraint.vartype u, v = constraint.variables # if all configurations are present, then nothing is infeasible and the bqm is just all # 0.0s if len(configurations) == 4: return dimod.BinaryQuadraticModel.empty(constraint.vartype) # check if the constraint is irreducible, and if so, build the bqm for its two # components components = irreducible_components(constraint) if len(components) > 1: const0 = Constraint.from_configurations(((config[0],) for config in configurations), (u,), vartype) const1 = Constraint.from_configurations(((config[1],) for config in configurations), (v,), vartype) bqm = _bqm_from_1sat(const0) bqm.update(_bqm_from_1sat(const1)) return bqm assert len(configurations) > 1, "single configurations should be irreducible" # if it is not irreducible, and there are infeasible configurations, then it is time to # start building a bqm bqm = dimod.BinaryQuadraticModel.empty(vartype) # if the constraint is not irreducible and has two configurations, then it is either eq or ne if all(operator.eq(*config) for config in configurations): bqm.add_interaction(u, v, -1, vartype=dimod.SPIN) # equality elif all(operator.ne(*config) for config in configurations): bqm.add_interaction(u, v, +1, vartype=dimod.SPIN) # inequality elif (1, 1) not in configurations: bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) # penalize (1, 1) elif (-1, +1) not in configurations and (0, 1) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(v, 2, vartype=dimod.BINARY) elif (+1, -1) not in configurations and (1, 0) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(u, 2, vartype=dimod.BINARY) else: # (0, 0) not in configurations bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) bqm.add_variable(u, -2, vartype=dimod.BINARY) bqm.add_variable(v, -2, vartype=dimod.BINARY) return bqm
def iter_complete_graphs(start, stop, factory=None): """Iterate over complete graphs. Args: start (int/iterable): Define the size of the starting graph. If an int, the nodes will be index-labeled, otherwise should be an iterable of node labels. stop (int): Stops yielding graphs when the size equals stop. factory (iterator, optional): If provided, nodes added will be labeled according to the values returned by factory. Otherwise the extra nodes will be index-labeled. Yields: :class:`nx.Graph` """ _, nodes = start nodes = list(nodes) # we'll be appending if factory is None: factory = count() while len(nodes) < stop: # we need to construct a new graph each time, this is actually faster than copy and add # the new edges in any case G = nx.complete_graph(nodes) yield G v = next(factory) while v in G: v = next(factory) nodes.append(v)
def load_cnf(fp): """Load a constraint satisfaction problem from a .cnf file. Args: fp (file, optional): `.write()`-supporting `file object`_ DIMACS CNF formatted_ file. Returns: :obj:`.ConstraintSatisfactionProblem` a binary-valued SAT problem. Examples: >>> import dwavebinarycsp as dbcsp ... >>> with open('test.cnf', 'r') as fp: # doctest: +SKIP ... csp = dbcsp.cnf.load_cnf(fp) .. _file object: https://docs.python.org/3/glossary.html#term-file-object .. _formatted: http://www.satcompetition.org/2009/format-benchmarks2009.html """ fp = iter(fp) # handle lists/tuples/etc csp = ConstraintSatisfactionProblem(dimod.BINARY) # first look for the problem num_clauses = num_variables = 0 problem_pattern = re.compile(_PROBLEM_REGEX) for line in fp: matches = problem_pattern.findall(line) if matches: if len(matches) > 1: raise ValueError nv, nc = matches[0] num_variables, num_clauses = int(nv), int(nc) break # now parse the clauses, picking up where we left off looking for the header clause_pattern = re.compile(_CLAUSE_REGEX) for line in fp: if clause_pattern.match(line) is not None: clause = [int(v) for v in line.split(' ')[:-1]] # line ends with a trailing 0 # -1 is the notation for NOT(1) variables = [abs(v) for v in clause] f = _cnf_or(clause) csp.add_constraint(f, variables) for v in range(1, num_variables+1): csp.add_variable(v) for v in csp.variables: if v > num_variables: msg = ("given .cnf file's header defines variables [1, {}] and {} clauses " "but constraints a reference to variable {}").format(num_variables, num_clauses, v) raise ValueError(msg) if len(csp) != num_clauses: msg = ("given .cnf file's header defines {} " "clauses but the file contains {}").format(num_clauses, len(csp)) raise ValueError(msg) return csp
def and_gate(variables, vartype=dimod.BINARY, name='AND'): """AND gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='AND'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an AND gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'], name='AND1')) >>> csp.check({'a': 1, 'b': 0, 'c': 0}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configurations = frozenset([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 1)]) def func(in1, in2, out): return (in1 and in2) == out else: # SPIN, vartype is checked by the decorator configurations = frozenset([(-1, -1, -1), (-1, +1, -1), (+1, -1, -1), (+1, +1, +1)]) def func(in1, in2, out): return ((in1 > 0) and (in2 > 0)) == (out > 0) return Constraint(func, configurations, variables, vartype=vartype, name=name)
def xor_gate(variables, vartype=dimod.BINARY, name='XOR'): """XOR gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='XOR'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an XOR gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.xor_gate(['x', 'y', 'z'], name='XOR1')) >>> csp.check({'x': 1, 'y': 1, 'z': 1}) False """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)]) def func(in1, in2, out): return (in1 != in2) == out else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1), (-1, +1, +1), (+1, -1, +1), (+1, +1, -1)]) def func(in1, in2, out): return ((in1 > 0) != (in2 > 0)) == (out > 0) return Constraint(func, configs, variables, vartype=vartype, name=name)
def halfadder_gate(variables, vartype=dimod.BINARY, name='HALF_ADDER'): """Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='HALF_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean half adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.halfadder_gate(['a', 'b', 'total', 'carry'], name='HA1')) >>> csp.check({'a': 1, 'b': 1, 'total': 0, 'carry': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1), (-1, +1, +1, -1), (+1, -1, +1, -1), (+1, +1, -1, +1)]) def func(augend, addend, sum_, carry): total = (augend > 0) + (addend > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
def fulladder_gate(variables, vartype=dimod.BINARY, name='FULL_ADDER'): """Full adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, in3, sum, carry]`, where `in1, in2, in3` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='FULL_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean full adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.fulladder_gate(['a', 'b', 'c_in', 'total', 'c_out'], name='FA1')) >>> csp.check({'a': 1, 'b': 0, 'c_in': 1, 'total': 0, 'c_out': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0, 0), (0, 0, 1, 1, 0), (0, 1, 0, 1, 0), (0, 1, 1, 0, 1), (1, 0, 0, 1, 0), (1, 0, 1, 0, 1), (1, 1, 0, 0, 1), (1, 1, 1, 1, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1, -1), (-1, -1, +1, +1, -1), (-1, +1, -1, +1, -1), (-1, +1, +1, -1, +1), (+1, -1, -1, +1, -1), (+1, -1, +1, -1, +1), (+1, +1, -1, -1, +1), (+1, +1, +1, +1, +1)]) def func(in1, in2, in3, sum_, carry): total = (in1 > 0) + (in2 > 0) + (in3 > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) elif total == 3: return (sum_ > 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)