partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
_load_class
Loads the class from the class_path string
search/utils.py
def _load_class(class_path, default): """ Loads the class from the class_path string """ if class_path is None: return default component = class_path.rsplit('.', 1) result_processor = getattr( importlib.import_module(component[0]), component[1], default ) if len(component) > 1 else default return result_processor
def _load_class(class_path, default): """ Loads the class from the class_path string """ if class_path is None: return default component = class_path.rsplit('.', 1) result_processor = getattr( importlib.import_module(component[0]), component[1], default ) if len(component) > 1 else default return result_processor
[ "Loads", "the", "class", "from", "the", "class_path", "string" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/utils.py#L8-L20
[ "def", "_load_class", "(", "class_path", ",", "default", ")", ":", "if", "class_path", "is", "None", ":", "return", "default", "component", "=", "class_path", ".", "rsplit", "(", "'.'", ",", "1", ")", "result_processor", "=", "getattr", "(", "importlib", ".", "import_module", "(", "component", "[", "0", "]", ")", ",", "component", "[", "1", "]", ",", "default", ")", "if", "len", "(", "component", ")", ">", "1", "else", "default", "return", "result_processor" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_is_iterable
Checks if an item is iterable (list, tuple, generator), but not string
search/utils.py
def _is_iterable(item): """ Checks if an item is iterable (list, tuple, generator), but not string """ return isinstance(item, collections.Iterable) and not isinstance(item, six.string_types)
def _is_iterable(item): """ Checks if an item is iterable (list, tuple, generator), but not string """ return isinstance(item, collections.Iterable) and not isinstance(item, six.string_types)
[ "Checks", "if", "an", "item", "is", "iterable", "(", "list", "tuple", "generator", ")", "but", "not", "string" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/utils.py#L23-L25
[ "def", "_is_iterable", "(", "item", ")", ":", "return", "isinstance", "(", "item", ",", "collections", ".", "Iterable", ")", "and", "not", "isinstance", "(", "item", ",", "six", ".", "string_types", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_pagination_values
process pagination requests from request parameter
search/views.py
def _process_pagination_values(request): """ process pagination requests from request parameter """ size = 20 page = 0 from_ = 0 if "page_size" in request.POST: size = int(request.POST["page_size"]) max_page_size = getattr(settings, "SEARCH_MAX_PAGE_SIZE", 100) # The parens below are superfluous, but make it much clearer to the reader what is going on if not (0 < size <= max_page_size): # pylint: disable=superfluous-parens raise ValueError(_('Invalid page size of {page_size}').format(page_size=size)) if "page_index" in request.POST: page = int(request.POST["page_index"]) from_ = page * size return size, from_, page
def _process_pagination_values(request): """ process pagination requests from request parameter """ size = 20 page = 0 from_ = 0 if "page_size" in request.POST: size = int(request.POST["page_size"]) max_page_size = getattr(settings, "SEARCH_MAX_PAGE_SIZE", 100) # The parens below are superfluous, but make it much clearer to the reader what is going on if not (0 < size <= max_page_size): # pylint: disable=superfluous-parens raise ValueError(_('Invalid page size of {page_size}').format(page_size=size)) if "page_index" in request.POST: page = int(request.POST["page_index"]) from_ = page * size return size, from_, page
[ "process", "pagination", "requests", "from", "request", "parameter" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/views.py#L21-L36
[ "def", "_process_pagination_values", "(", "request", ")", ":", "size", "=", "20", "page", "=", "0", "from_", "=", "0", "if", "\"page_size\"", "in", "request", ".", "POST", ":", "size", "=", "int", "(", "request", ".", "POST", "[", "\"page_size\"", "]", ")", "max_page_size", "=", "getattr", "(", "settings", ",", "\"SEARCH_MAX_PAGE_SIZE\"", ",", "100", ")", "# The parens below are superfluous, but make it much clearer to the reader what is going on", "if", "not", "(", "0", "<", "size", "<=", "max_page_size", ")", ":", "# pylint: disable=superfluous-parens", "raise", "ValueError", "(", "_", "(", "'Invalid page size of {page_size}'", ")", ".", "format", "(", "page_size", "=", "size", ")", ")", "if", "\"page_index\"", "in", "request", ".", "POST", ":", "page", "=", "int", "(", "request", ".", "POST", "[", "\"page_index\"", "]", ")", "from_", "=", "page", "*", "size", "return", "size", ",", "from_", ",", "page" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_field_values
Create separate dictionary of supported filter values provided
search/views.py
def _process_field_values(request): """ Create separate dictionary of supported filter values provided """ return { field_key: request.POST[field_key] for field_key in request.POST if field_key in course_discovery_filter_fields() }
def _process_field_values(request): """ Create separate dictionary of supported filter values provided """ return { field_key: request.POST[field_key] for field_key in request.POST if field_key in course_discovery_filter_fields() }
[ "Create", "separate", "dictionary", "of", "supported", "filter", "values", "provided" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/views.py#L39-L45
[ "def", "_process_field_values", "(", "request", ")", ":", "return", "{", "field_key", ":", "request", ".", "POST", "[", "field_key", "]", "for", "field_key", "in", "request", ".", "POST", "if", "field_key", "in", "course_discovery_filter_fields", "(", ")", "}" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
do_search
Search view for http requests Args: request (required) - django request object course_id (optional) - course_id within which to restrict search Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these results "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (required) - text upon which to search "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0)
search/views.py
def do_search(request, course_id=None): """ Search view for http requests Args: request (required) - django request object course_id (optional) - course_id within which to restrict search Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these results "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (required) - text upon which to search "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ # Setup search environment SearchInitializer.set_search_enviroment(request=request, course_id=course_id) results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: if not search_term: raise ValueError(_('No search term provided for search')) size, from_, page = _process_pagination_values(request) # Analytics - log search request track.emit( 'edx.course.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = perform_search( search_term, user=request.user, size=size, from_=from_, course_id=course_id ) status_code = 200 # Analytics - log search results before sending to browser track.emit( 'edx.course.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
def do_search(request, course_id=None): """ Search view for http requests Args: request (required) - django request object course_id (optional) - course_id within which to restrict search Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these results "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (required) - text upon which to search "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ # Setup search environment SearchInitializer.set_search_enviroment(request=request, course_id=course_id) results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: if not search_term: raise ValueError(_('No search term provided for search')) size, from_, page = _process_pagination_values(request) # Analytics - log search request track.emit( 'edx.course.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = perform_search( search_term, user=request.user, size=size, from_=from_, course_id=course_id ) status_code = 200 # Analytics - log search results before sending to browser track.emit( 'edx.course.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
[ "Search", "view", "for", "http", "requests" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/views.py#L49-L144
[ "def", "do_search", "(", "request", ",", "course_id", "=", "None", ")", ":", "# Setup search environment", "SearchInitializer", ".", "set_search_enviroment", "(", "request", "=", "request", ",", "course_id", "=", "course_id", ")", "results", "=", "{", "\"error\"", ":", "_", "(", "\"Nothing to search\"", ")", "}", "status_code", "=", "500", "search_term", "=", "request", ".", "POST", ".", "get", "(", "\"search_string\"", ",", "None", ")", "try", ":", "if", "not", "search_term", ":", "raise", "ValueError", "(", "_", "(", "'No search term provided for search'", ")", ")", "size", ",", "from_", ",", "page", "=", "_process_pagination_values", "(", "request", ")", "# Analytics - log search request", "track", ".", "emit", "(", "'edx.course.search.initiated'", ",", "{", "\"search_term\"", ":", "search_term", ",", "\"page_size\"", ":", "size", ",", "\"page_number\"", ":", "page", ",", "}", ")", "results", "=", "perform_search", "(", "search_term", ",", "user", "=", "request", ".", "user", ",", "size", "=", "size", ",", "from_", "=", "from_", ",", "course_id", "=", "course_id", ")", "status_code", "=", "200", "# Analytics - log search results before sending to browser", "track", ".", "emit", "(", "'edx.course.search.results_displayed'", ",", "{", "\"search_term\"", ":", "search_term", ",", "\"page_size\"", ":", "size", ",", "\"page_number\"", ":", "page", ",", "\"results_count\"", ":", "results", "[", "\"total\"", "]", ",", "}", ")", "except", "ValueError", "as", "invalid_err", ":", "results", "=", "{", "\"error\"", ":", "six", ".", "text_type", "(", "invalid_err", ")", "}", "log", ".", "debug", "(", "six", ".", "text_type", "(", "invalid_err", ")", ")", "except", "QueryParseError", ":", "results", "=", "{", "\"error\"", ":", "_", "(", "'Your query seems malformed. Check for unmatched quotes.'", ")", "}", "# Allow for broad exceptions here - this is an entry point from external reference", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "results", "=", "{", "\"error\"", ":", "_", "(", "'An error occurred when searching for \"{search_string}\"'", ")", ".", "format", "(", "search_string", "=", "search_term", ")", "}", "log", ".", "exception", "(", "'Search view exception when searching for %s for user %s: %r'", ",", "search_term", ",", "request", ".", "user", ".", "id", ",", "err", ")", "return", "JsonResponse", "(", "results", ",", "status", "=", "status_code", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
course_discovery
Search for courses Args: request (required) - django request object Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these resutls "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (optional) - text with which to search for courses "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0)
search/views.py
def course_discovery(request): """ Search for courses Args: request (required) - django request object Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these resutls "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (optional) - text with which to search for courses "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: size, from_, page = _process_pagination_values(request) field_dictionary = _process_field_values(request) # Analytics - log search request track.emit( 'edx.course_discovery.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = course_discovery_search( search_term=search_term, size=size, from_=from_, field_dictionary=field_dictionary, ) # Analytics - log search results before sending to browser track.emit( 'edx.course_discovery.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) status_code = 200 except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
def course_discovery(request): """ Search for courses Args: request (required) - django request object Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these resutls "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (optional) - text with which to search for courses "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: size, from_, page = _process_pagination_values(request) field_dictionary = _process_field_values(request) # Analytics - log search request track.emit( 'edx.course_discovery.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = course_discovery_search( search_term=search_term, size=size, from_=from_, field_dictionary=field_dictionary, ) # Analytics - log search results before sending to browser track.emit( 'edx.course_discovery.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) status_code = 200 except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
[ "Search", "for", "courses" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/views.py#L148-L235
[ "def", "course_discovery", "(", "request", ")", ":", "results", "=", "{", "\"error\"", ":", "_", "(", "\"Nothing to search\"", ")", "}", "status_code", "=", "500", "search_term", "=", "request", ".", "POST", ".", "get", "(", "\"search_string\"", ",", "None", ")", "try", ":", "size", ",", "from_", ",", "page", "=", "_process_pagination_values", "(", "request", ")", "field_dictionary", "=", "_process_field_values", "(", "request", ")", "# Analytics - log search request", "track", ".", "emit", "(", "'edx.course_discovery.search.initiated'", ",", "{", "\"search_term\"", ":", "search_term", ",", "\"page_size\"", ":", "size", ",", "\"page_number\"", ":", "page", ",", "}", ")", "results", "=", "course_discovery_search", "(", "search_term", "=", "search_term", ",", "size", "=", "size", ",", "from_", "=", "from_", ",", "field_dictionary", "=", "field_dictionary", ",", ")", "# Analytics - log search results before sending to browser", "track", ".", "emit", "(", "'edx.course_discovery.search.results_displayed'", ",", "{", "\"search_term\"", ":", "search_term", ",", "\"page_size\"", ":", "size", ",", "\"page_number\"", ":", "page", ",", "\"results_count\"", ":", "results", "[", "\"total\"", "]", ",", "}", ")", "status_code", "=", "200", "except", "ValueError", "as", "invalid_err", ":", "results", "=", "{", "\"error\"", ":", "six", ".", "text_type", "(", "invalid_err", ")", "}", "log", ".", "debug", "(", "six", ".", "text_type", "(", "invalid_err", ")", ")", "except", "QueryParseError", ":", "results", "=", "{", "\"error\"", ":", "_", "(", "'Your query seems malformed. Check for unmatched quotes.'", ")", "}", "# Allow for broad exceptions here - this is an entry point from external reference", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "results", "=", "{", "\"error\"", ":", "_", "(", "'An error occurred when searching for \"{search_string}\"'", ")", ".", "format", "(", "search_string", "=", "search_term", ")", "}", "log", ".", "exception", "(", "'Search view exception when searching for %s for user %s: %r'", ",", "search_term", ",", "request", ".", "user", ".", "id", ",", "err", ")", "return", "JsonResponse", "(", "results", ",", "status", "=", "status_code", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_translate_hits
Provide resultset in our desired format from elasticsearch results
search/elastic.py
def _translate_hits(es_response): """ Provide resultset in our desired format from elasticsearch results """ def translate_result(result): """ Any conversion from ES result syntax into our search engine syntax """ translated_result = copy.copy(result) data = translated_result.pop("_source") translated_result.update({ "data": data, "score": translated_result["_score"] }) return translated_result def translate_facet(result): """ Any conversion from ES facet syntax into our search engine sytax """ terms = {term["term"]: term["count"] for term in result["terms"]} return { "terms": terms, "total": result["total"], "other": result["other"], } results = [translate_result(hit) for hit in es_response["hits"]["hits"]] response = { "took": es_response["took"], "total": es_response["hits"]["total"], "max_score": es_response["hits"]["max_score"], "results": results, } if "facets" in es_response: response["facets"] = {facet: translate_facet(es_response["facets"][facet]) for facet in es_response["facets"]} return response
def _translate_hits(es_response): """ Provide resultset in our desired format from elasticsearch results """ def translate_result(result): """ Any conversion from ES result syntax into our search engine syntax """ translated_result = copy.copy(result) data = translated_result.pop("_source") translated_result.update({ "data": data, "score": translated_result["_score"] }) return translated_result def translate_facet(result): """ Any conversion from ES facet syntax into our search engine sytax """ terms = {term["term"]: term["count"] for term in result["terms"]} return { "terms": terms, "total": result["total"], "other": result["other"], } results = [translate_result(hit) for hit in es_response["hits"]["hits"]] response = { "took": es_response["took"], "total": es_response["hits"]["total"], "max_score": es_response["hits"]["max_score"], "results": results, } if "facets" in es_response: response["facets"] = {facet: translate_facet(es_response["facets"][facet]) for facet in es_response["facets"]} return response
[ "Provide", "resultset", "in", "our", "desired", "format", "from", "elasticsearch", "results" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L26-L61
[ "def", "_translate_hits", "(", "es_response", ")", ":", "def", "translate_result", "(", "result", ")", ":", "\"\"\" Any conversion from ES result syntax into our search engine syntax \"\"\"", "translated_result", "=", "copy", ".", "copy", "(", "result", ")", "data", "=", "translated_result", ".", "pop", "(", "\"_source\"", ")", "translated_result", ".", "update", "(", "{", "\"data\"", ":", "data", ",", "\"score\"", ":", "translated_result", "[", "\"_score\"", "]", "}", ")", "return", "translated_result", "def", "translate_facet", "(", "result", ")", ":", "\"\"\" Any conversion from ES facet syntax into our search engine sytax \"\"\"", "terms", "=", "{", "term", "[", "\"term\"", "]", ":", "term", "[", "\"count\"", "]", "for", "term", "in", "result", "[", "\"terms\"", "]", "}", "return", "{", "\"terms\"", ":", "terms", ",", "\"total\"", ":", "result", "[", "\"total\"", "]", ",", "\"other\"", ":", "result", "[", "\"other\"", "]", ",", "}", "results", "=", "[", "translate_result", "(", "hit", ")", "for", "hit", "in", "es_response", "[", "\"hits\"", "]", "[", "\"hits\"", "]", "]", "response", "=", "{", "\"took\"", ":", "es_response", "[", "\"took\"", "]", ",", "\"total\"", ":", "es_response", "[", "\"hits\"", "]", "[", "\"total\"", "]", ",", "\"max_score\"", ":", "es_response", "[", "\"hits\"", "]", "[", "\"max_score\"", "]", ",", "\"results\"", ":", "results", ",", "}", "if", "\"facets\"", "in", "es_response", ":", "response", "[", "\"facets\"", "]", "=", "{", "facet", ":", "translate_facet", "(", "es_response", "[", "\"facets\"", "]", "[", "facet", "]", ")", "for", "facet", "in", "es_response", "[", "\"facets\"", "]", "}", "return", "response" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_get_filter_field
Return field to apply into filter, if an array then use a range, otherwise look for a term match
search/elastic.py
def _get_filter_field(field_name, field_value): """ Return field to apply into filter, if an array then use a range, otherwise look for a term match """ filter_field = None if isinstance(field_value, ValueRange): range_values = {} if field_value.lower: range_values.update({"gte": field_value.lower_string}) if field_value.upper: range_values.update({"lte": field_value.upper_string}) filter_field = { "range": { field_name: range_values } } elif _is_iterable(field_value): filter_field = { "terms": { field_name: field_value } } else: filter_field = { "term": { field_name: field_value } } return filter_field
def _get_filter_field(field_name, field_value): """ Return field to apply into filter, if an array then use a range, otherwise look for a term match """ filter_field = None if isinstance(field_value, ValueRange): range_values = {} if field_value.lower: range_values.update({"gte": field_value.lower_string}) if field_value.upper: range_values.update({"lte": field_value.upper_string}) filter_field = { "range": { field_name: range_values } } elif _is_iterable(field_value): filter_field = { "terms": { field_name: field_value } } else: filter_field = { "term": { field_name: field_value } } return filter_field
[ "Return", "field", "to", "apply", "into", "filter", "if", "an", "array", "then", "use", "a", "range", "otherwise", "look", "for", "a", "term", "match" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L64-L90
[ "def", "_get_filter_field", "(", "field_name", ",", "field_value", ")", ":", "filter_field", "=", "None", "if", "isinstance", "(", "field_value", ",", "ValueRange", ")", ":", "range_values", "=", "{", "}", "if", "field_value", ".", "lower", ":", "range_values", ".", "update", "(", "{", "\"gte\"", ":", "field_value", ".", "lower_string", "}", ")", "if", "field_value", ".", "upper", ":", "range_values", ".", "update", "(", "{", "\"lte\"", ":", "field_value", ".", "upper_string", "}", ")", "filter_field", "=", "{", "\"range\"", ":", "{", "field_name", ":", "range_values", "}", "}", "elif", "_is_iterable", "(", "field_value", ")", ":", "filter_field", "=", "{", "\"terms\"", ":", "{", "field_name", ":", "field_value", "}", "}", "else", ":", "filter_field", "=", "{", "\"term\"", ":", "{", "field_name", ":", "field_value", "}", "}", "return", "filter_field" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_field_queries
We have a field_dictionary - we want to match the values for an elasticsearch "match" query This is only potentially useful when trying to tune certain search operations
search/elastic.py
def _process_field_queries(field_dictionary): """ We have a field_dictionary - we want to match the values for an elasticsearch "match" query This is only potentially useful when trying to tune certain search operations """ def field_item(field): """ format field match as "match" item for elasticsearch query """ return { "match": { field: field_dictionary[field] } } return [field_item(field) for field in field_dictionary]
def _process_field_queries(field_dictionary): """ We have a field_dictionary - we want to match the values for an elasticsearch "match" query This is only potentially useful when trying to tune certain search operations """ def field_item(field): """ format field match as "match" item for elasticsearch query """ return { "match": { field: field_dictionary[field] } } return [field_item(field) for field in field_dictionary]
[ "We", "have", "a", "field_dictionary", "-", "we", "want", "to", "match", "the", "values", "for", "an", "elasticsearch", "match", "query", "This", "is", "only", "potentially", "useful", "when", "trying", "to", "tune", "certain", "search", "operations" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L93-L106
[ "def", "_process_field_queries", "(", "field_dictionary", ")", ":", "def", "field_item", "(", "field", ")", ":", "\"\"\" format field match as \"match\" item for elasticsearch query \"\"\"", "return", "{", "\"match\"", ":", "{", "field", ":", "field_dictionary", "[", "field", "]", "}", "}", "return", "[", "field_item", "(", "field", ")", "for", "field", "in", "field_dictionary", "]" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_filters
We have a filter_dictionary - this means that if the field is included and matches, then we can include, OR if the field is undefined, then we assume it is safe to include
search/elastic.py
def _process_filters(filter_dictionary): """ We have a filter_dictionary - this means that if the field is included and matches, then we can include, OR if the field is undefined, then we assume it is safe to include """ def filter_item(field): """ format elasticsearch filter to pass if value matches OR field is not included """ if filter_dictionary[field] is not None: return { "or": [ _get_filter_field(field, filter_dictionary[field]), { "missing": { "field": field } } ] } return { "missing": { "field": field } } return [filter_item(field) for field in filter_dictionary]
def _process_filters(filter_dictionary): """ We have a filter_dictionary - this means that if the field is included and matches, then we can include, OR if the field is undefined, then we assume it is safe to include """ def filter_item(field): """ format elasticsearch filter to pass if value matches OR field is not included """ if filter_dictionary[field] is not None: return { "or": [ _get_filter_field(field, filter_dictionary[field]), { "missing": { "field": field } } ] } return { "missing": { "field": field } } return [filter_item(field) for field in filter_dictionary]
[ "We", "have", "a", "filter_dictionary", "-", "this", "means", "that", "if", "the", "field", "is", "included", "and", "matches", "then", "we", "can", "include", "OR", "if", "the", "field", "is", "undefined", "then", "we", "assume", "it", "is", "safe", "to", "include" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L116-L142
[ "def", "_process_filters", "(", "filter_dictionary", ")", ":", "def", "filter_item", "(", "field", ")", ":", "\"\"\" format elasticsearch filter to pass if value matches OR field is not included \"\"\"", "if", "filter_dictionary", "[", "field", "]", "is", "not", "None", ":", "return", "{", "\"or\"", ":", "[", "_get_filter_field", "(", "field", ",", "filter_dictionary", "[", "field", "]", ")", ",", "{", "\"missing\"", ":", "{", "\"field\"", ":", "field", "}", "}", "]", "}", "return", "{", "\"missing\"", ":", "{", "\"field\"", ":", "field", "}", "}", "return", "[", "filter_item", "(", "field", ")", "for", "field", "in", "filter_dictionary", "]" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_exclude_dictionary
Based on values in the exclude_dictionary generate a list of term queries that will filter out unwanted results.
search/elastic.py
def _process_exclude_dictionary(exclude_dictionary): """ Based on values in the exclude_dictionary generate a list of term queries that will filter out unwanted results. """ # not_properties will hold the generated term queries. not_properties = [] for exclude_property in exclude_dictionary: exclude_values = exclude_dictionary[exclude_property] if not isinstance(exclude_values, list): exclude_values = [exclude_values] not_properties.extend([{"term": {exclude_property: exclude_value}} for exclude_value in exclude_values]) # Returning a query segment with an empty list freaks out ElasticSearch, # so just return an empty segment. if not not_properties: return {} return { "not": { "filter": { "or": not_properties } } }
def _process_exclude_dictionary(exclude_dictionary): """ Based on values in the exclude_dictionary generate a list of term queries that will filter out unwanted results. """ # not_properties will hold the generated term queries. not_properties = [] for exclude_property in exclude_dictionary: exclude_values = exclude_dictionary[exclude_property] if not isinstance(exclude_values, list): exclude_values = [exclude_values] not_properties.extend([{"term": {exclude_property: exclude_value}} for exclude_value in exclude_values]) # Returning a query segment with an empty list freaks out ElasticSearch, # so just return an empty segment. if not not_properties: return {} return { "not": { "filter": { "or": not_properties } } }
[ "Based", "on", "values", "in", "the", "exclude_dictionary", "generate", "a", "list", "of", "term", "queries", "that", "will", "filter", "out", "unwanted", "results", "." ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L145-L169
[ "def", "_process_exclude_dictionary", "(", "exclude_dictionary", ")", ":", "# not_properties will hold the generated term queries.", "not_properties", "=", "[", "]", "for", "exclude_property", "in", "exclude_dictionary", ":", "exclude_values", "=", "exclude_dictionary", "[", "exclude_property", "]", "if", "not", "isinstance", "(", "exclude_values", ",", "list", ")", ":", "exclude_values", "=", "[", "exclude_values", "]", "not_properties", ".", "extend", "(", "[", "{", "\"term\"", ":", "{", "exclude_property", ":", "exclude_value", "}", "}", "for", "exclude_value", "in", "exclude_values", "]", ")", "# Returning a query segment with an empty list freaks out ElasticSearch,", "# so just return an empty segment.", "if", "not", "not_properties", ":", "return", "{", "}", "return", "{", "\"not\"", ":", "{", "\"filter\"", ":", "{", "\"or\"", ":", "not_properties", "}", "}", "}" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
_process_facet_terms
We have a list of terms with which we return facets
search/elastic.py
def _process_facet_terms(facet_terms): """ We have a list of terms with which we return facets """ elastic_facets = {} for facet in facet_terms: facet_term = {"field": facet} if facet_terms[facet]: for facet_option in facet_terms[facet]: facet_term[facet_option] = facet_terms[facet][facet_option] elastic_facets[facet] = { "terms": facet_term } return elastic_facets
def _process_facet_terms(facet_terms): """ We have a list of terms with which we return facets """ elastic_facets = {} for facet in facet_terms: facet_term = {"field": facet} if facet_terms[facet]: for facet_option in facet_terms[facet]: facet_term[facet_option] = facet_terms[facet][facet_option] elastic_facets[facet] = { "terms": facet_term } return elastic_facets
[ "We", "have", "a", "list", "of", "terms", "with", "which", "we", "return", "facets" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L172-L185
[ "def", "_process_facet_terms", "(", "facet_terms", ")", ":", "elastic_facets", "=", "{", "}", "for", "facet", "in", "facet_terms", ":", "facet_term", "=", "{", "\"field\"", ":", "facet", "}", "if", "facet_terms", "[", "facet", "]", ":", "for", "facet_option", "in", "facet_terms", "[", "facet", "]", ":", "facet_term", "[", "facet_option", "]", "=", "facet_terms", "[", "facet", "]", "[", "facet_option", "]", "elastic_facets", "[", "facet", "]", "=", "{", "\"terms\"", ":", "facet_term", "}", "return", "elastic_facets" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.get_mappings
fetch mapped-items structure from cache
search/elastic.py
def get_mappings(cls, index_name, doc_type): """ fetch mapped-items structure from cache """ return cache.get(cls.get_cache_item_name(index_name, doc_type), {})
def get_mappings(cls, index_name, doc_type): """ fetch mapped-items structure from cache """ return cache.get(cls.get_cache_item_name(index_name, doc_type), {})
[ "fetch", "mapped", "-", "items", "structure", "from", "cache" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L201-L203
[ "def", "get_mappings", "(", "cls", ",", "index_name", ",", "doc_type", ")", ":", "return", "cache", ".", "get", "(", "cls", ".", "get_cache_item_name", "(", "index_name", ",", "doc_type", ")", ",", "{", "}", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.set_mappings
set new mapped-items structure into cache
search/elastic.py
def set_mappings(cls, index_name, doc_type, mappings): """ set new mapped-items structure into cache """ cache.set(cls.get_cache_item_name(index_name, doc_type), mappings)
def set_mappings(cls, index_name, doc_type, mappings): """ set new mapped-items structure into cache """ cache.set(cls.get_cache_item_name(index_name, doc_type), mappings)
[ "set", "new", "mapped", "-", "items", "structure", "into", "cache" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L206-L208
[ "def", "set_mappings", "(", "cls", ",", "index_name", ",", "doc_type", ",", "mappings", ")", ":", "cache", ".", "set", "(", "cls", ".", "get_cache_item_name", "(", "index_name", ",", "doc_type", ")", ",", "mappings", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.log_indexing_error
Logs indexing errors and raises a general ElasticSearch Exception
search/elastic.py
def log_indexing_error(cls, indexing_errors): """ Logs indexing errors and raises a general ElasticSearch Exception""" indexing_errors_log = [] for indexing_error in indexing_errors: indexing_errors_log.append(str(indexing_error)) raise exceptions.ElasticsearchException(', '.join(indexing_errors_log))
def log_indexing_error(cls, indexing_errors): """ Logs indexing errors and raises a general ElasticSearch Exception""" indexing_errors_log = [] for indexing_error in indexing_errors: indexing_errors_log.append(str(indexing_error)) raise exceptions.ElasticsearchException(', '.join(indexing_errors_log))
[ "Logs", "indexing", "errors", "and", "raises", "a", "general", "ElasticSearch", "Exception" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L211-L216
[ "def", "log_indexing_error", "(", "cls", ",", "indexing_errors", ")", ":", "indexing_errors_log", "=", "[", "]", "for", "indexing_error", "in", "indexing_errors", ":", "indexing_errors_log", ".", "append", "(", "str", "(", "indexing_error", ")", ")", "raise", "exceptions", ".", "ElasticsearchException", "(", "', '", ".", "join", "(", "indexing_errors_log", ")", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine._get_mappings
Interfaces with the elasticsearch mappings for the index prevents multiple loading of the same mappings from ES when called more than once Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We cache the properties of each doc_type, if they are not available, we'll load them again from Elasticsearch
search/elastic.py
def _get_mappings(self, doc_type): """ Interfaces with the elasticsearch mappings for the index prevents multiple loading of the same mappings from ES when called more than once Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We cache the properties of each doc_type, if they are not available, we'll load them again from Elasticsearch """ # Try loading the mapping from the cache. mapping = ElasticSearchEngine.get_mappings(self.index_name, doc_type) # Fall back to Elasticsearch if not mapping: mapping = self._es.indices.get_mapping( index=self.index_name, doc_type=doc_type, ).get(self.index_name, {}).get('mappings', {}).get(doc_type, {}) # Cache the mapping, if one was retrieved if mapping: ElasticSearchEngine.set_mappings( self.index_name, doc_type, mapping ) return mapping
def _get_mappings(self, doc_type): """ Interfaces with the elasticsearch mappings for the index prevents multiple loading of the same mappings from ES when called more than once Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We cache the properties of each doc_type, if they are not available, we'll load them again from Elasticsearch """ # Try loading the mapping from the cache. mapping = ElasticSearchEngine.get_mappings(self.index_name, doc_type) # Fall back to Elasticsearch if not mapping: mapping = self._es.indices.get_mapping( index=self.index_name, doc_type=doc_type, ).get(self.index_name, {}).get('mappings', {}).get(doc_type, {}) # Cache the mapping, if one was retrieved if mapping: ElasticSearchEngine.set_mappings( self.index_name, doc_type, mapping ) return mapping
[ "Interfaces", "with", "the", "elasticsearch", "mappings", "for", "the", "index", "prevents", "multiple", "loading", "of", "the", "same", "mappings", "from", "ES", "when", "called", "more", "than", "once" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L218-L268
[ "def", "_get_mappings", "(", "self", ",", "doc_type", ")", ":", "# Try loading the mapping from the cache.", "mapping", "=", "ElasticSearchEngine", ".", "get_mappings", "(", "self", ".", "index_name", ",", "doc_type", ")", "# Fall back to Elasticsearch", "if", "not", "mapping", ":", "mapping", "=", "self", ".", "_es", ".", "indices", ".", "get_mapping", "(", "index", "=", "self", ".", "index_name", ",", "doc_type", "=", "doc_type", ",", ")", ".", "get", "(", "self", ".", "index_name", ",", "{", "}", ")", ".", "get", "(", "'mappings'", ",", "{", "}", ")", ".", "get", "(", "doc_type", ",", "{", "}", ")", "# Cache the mapping, if one was retrieved", "if", "mapping", ":", "ElasticSearchEngine", ".", "set_mappings", "(", "self", ".", "index_name", ",", "doc_type", ",", "mapping", ")", "return", "mapping" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine._check_mappings
We desire to index content so that anything we want to be textually searchable(and therefore needing to be analysed), but the other fields are designed to be filters, and only require an exact match. So, we want to set up the mappings for these fields as "not_analyzed" - this will allow our filters to work faster because they only have to work off exact matches
search/elastic.py
def _check_mappings(self, doc_type, body): """ We desire to index content so that anything we want to be textually searchable(and therefore needing to be analysed), but the other fields are designed to be filters, and only require an exact match. So, we want to set up the mappings for these fields as "not_analyzed" - this will allow our filters to work faster because they only have to work off exact matches """ # Make fields other than content be indexed as unanalyzed terms - content # contains fields that are to be analyzed exclude_fields = ["content"] field_properties = getattr(settings, "ELASTIC_FIELD_MAPPINGS", {}) def field_property(field_name, field_value): """ Prepares field as property syntax for providing correct mapping desired for field Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We can only add new ones, but the format is the same """ prop_val = None if field_name in field_properties: prop_val = field_properties[field_name] elif isinstance(field_value, dict): props = {fn: field_property(fn, field_value[fn]) for fn in field_value} prop_val = {"properties": props} else: prop_val = { "type": "string", "index": "not_analyzed", } return prop_val new_properties = { field: field_property(field, value) for field, value in body.items() if (field not in exclude_fields) and (field not in self._get_mappings(doc_type).get('properties', {})) } if new_properties: self._es.indices.put_mapping( index=self.index_name, doc_type=doc_type, body={ doc_type: { "properties": new_properties, } } ) self._clear_mapping(doc_type)
def _check_mappings(self, doc_type, body): """ We desire to index content so that anything we want to be textually searchable(and therefore needing to be analysed), but the other fields are designed to be filters, and only require an exact match. So, we want to set up the mappings for these fields as "not_analyzed" - this will allow our filters to work faster because they only have to work off exact matches """ # Make fields other than content be indexed as unanalyzed terms - content # contains fields that are to be analyzed exclude_fields = ["content"] field_properties = getattr(settings, "ELASTIC_FIELD_MAPPINGS", {}) def field_property(field_name, field_value): """ Prepares field as property syntax for providing correct mapping desired for field Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We can only add new ones, but the format is the same """ prop_val = None if field_name in field_properties: prop_val = field_properties[field_name] elif isinstance(field_value, dict): props = {fn: field_property(fn, field_value[fn]) for fn in field_value} prop_val = {"properties": props} else: prop_val = { "type": "string", "index": "not_analyzed", } return prop_val new_properties = { field: field_property(field, value) for field, value in body.items() if (field not in exclude_fields) and (field not in self._get_mappings(doc_type).get('properties', {})) } if new_properties: self._es.indices.put_mapping( index=self.index_name, doc_type=doc_type, body={ doc_type: { "properties": new_properties, } } ) self._clear_mapping(doc_type)
[ "We", "desire", "to", "index", "content", "so", "that", "anything", "we", "want", "to", "be", "textually", "searchable", "(", "and", "therefore", "needing", "to", "be", "analysed", ")", "but", "the", "other", "fields", "are", "designed", "to", "be", "filters", "and", "only", "require", "an", "exact", "match", ".", "So", "we", "want", "to", "set", "up", "the", "mappings", "for", "these", "fields", "as", "not_analyzed", "-", "this", "will", "allow", "our", "filters", "to", "work", "faster", "because", "they", "only", "have", "to", "work", "off", "exact", "matches" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L281-L355
[ "def", "_check_mappings", "(", "self", ",", "doc_type", ",", "body", ")", ":", "# Make fields other than content be indexed as unanalyzed terms - content", "# contains fields that are to be analyzed", "exclude_fields", "=", "[", "\"content\"", "]", "field_properties", "=", "getattr", "(", "settings", ",", "\"ELASTIC_FIELD_MAPPINGS\"", ",", "{", "}", ")", "def", "field_property", "(", "field_name", ",", "field_value", ")", ":", "\"\"\"\n Prepares field as property syntax for providing correct mapping desired for field\n\n Mappings format in elasticsearch is as follows:\n {\n \"doc_type\": {\n \"properties\": {\n \"nested_property\": {\n \"properties\": {\n \"an_analysed_property\": {\n \"type\": \"string\"\n },\n \"another_analysed_property\": {\n \"type\": \"string\"\n }\n }\n },\n \"a_not_analysed_property\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\"\n },\n \"a_date_property\": {\n \"type\": \"date\"\n }\n }\n }\n }\n\n We can only add new ones, but the format is the same\n \"\"\"", "prop_val", "=", "None", "if", "field_name", "in", "field_properties", ":", "prop_val", "=", "field_properties", "[", "field_name", "]", "elif", "isinstance", "(", "field_value", ",", "dict", ")", ":", "props", "=", "{", "fn", ":", "field_property", "(", "fn", ",", "field_value", "[", "fn", "]", ")", "for", "fn", "in", "field_value", "}", "prop_val", "=", "{", "\"properties\"", ":", "props", "}", "else", ":", "prop_val", "=", "{", "\"type\"", ":", "\"string\"", ",", "\"index\"", ":", "\"not_analyzed\"", ",", "}", "return", "prop_val", "new_properties", "=", "{", "field", ":", "field_property", "(", "field", ",", "value", ")", "for", "field", ",", "value", "in", "body", ".", "items", "(", ")", "if", "(", "field", "not", "in", "exclude_fields", ")", "and", "(", "field", "not", "in", "self", ".", "_get_mappings", "(", "doc_type", ")", ".", "get", "(", "'properties'", ",", "{", "}", ")", ")", "}", "if", "new_properties", ":", "self", ".", "_es", ".", "indices", ".", "put_mapping", "(", "index", "=", "self", ".", "index_name", ",", "doc_type", "=", "doc_type", ",", "body", "=", "{", "doc_type", ":", "{", "\"properties\"", ":", "new_properties", ",", "}", "}", ")", "self", ".", "_clear_mapping", "(", "doc_type", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.index
Implements call to add documents to the ES index Note the call to _check_mappings which will setup fields with the desired mappings
search/elastic.py
def index(self, doc_type, sources, **kwargs): """ Implements call to add documents to the ES index Note the call to _check_mappings which will setup fields with the desired mappings """ try: actions = [] for source in sources: self._check_mappings(doc_type, source) id_ = source['id'] if 'id' in source else None log.debug("indexing %s object with id %s", doc_type, id_) action = { "_index": self.index_name, "_type": doc_type, "_id": id_, "_source": source } actions.append(action) # bulk() returns a tuple with summary information # number of successfully executed actions and number of errors if stats_only is set to True. _, indexing_errors = bulk( self._es, actions, **kwargs ) if indexing_errors: ElasticSearchEngine.log_indexing_error(indexing_errors) # Broad exception handler to protect around bulk call except Exception as ex: # log information and re-raise log.exception("error while indexing - %s", str(ex)) raise
def index(self, doc_type, sources, **kwargs): """ Implements call to add documents to the ES index Note the call to _check_mappings which will setup fields with the desired mappings """ try: actions = [] for source in sources: self._check_mappings(doc_type, source) id_ = source['id'] if 'id' in source else None log.debug("indexing %s object with id %s", doc_type, id_) action = { "_index": self.index_name, "_type": doc_type, "_id": id_, "_source": source } actions.append(action) # bulk() returns a tuple with summary information # number of successfully executed actions and number of errors if stats_only is set to True. _, indexing_errors = bulk( self._es, actions, **kwargs ) if indexing_errors: ElasticSearchEngine.log_indexing_error(indexing_errors) # Broad exception handler to protect around bulk call except Exception as ex: # log information and re-raise log.exception("error while indexing - %s", str(ex)) raise
[ "Implements", "call", "to", "add", "documents", "to", "the", "ES", "index", "Note", "the", "call", "to", "_check_mappings", "which", "will", "setup", "fields", "with", "the", "desired", "mappings" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L357-L389
[ "def", "index", "(", "self", ",", "doc_type", ",", "sources", ",", "*", "*", "kwargs", ")", ":", "try", ":", "actions", "=", "[", "]", "for", "source", "in", "sources", ":", "self", ".", "_check_mappings", "(", "doc_type", ",", "source", ")", "id_", "=", "source", "[", "'id'", "]", "if", "'id'", "in", "source", "else", "None", "log", ".", "debug", "(", "\"indexing %s object with id %s\"", ",", "doc_type", ",", "id_", ")", "action", "=", "{", "\"_index\"", ":", "self", ".", "index_name", ",", "\"_type\"", ":", "doc_type", ",", "\"_id\"", ":", "id_", ",", "\"_source\"", ":", "source", "}", "actions", ".", "append", "(", "action", ")", "# bulk() returns a tuple with summary information", "# number of successfully executed actions and number of errors if stats_only is set to True.", "_", ",", "indexing_errors", "=", "bulk", "(", "self", ".", "_es", ",", "actions", ",", "*", "*", "kwargs", ")", "if", "indexing_errors", ":", "ElasticSearchEngine", ".", "log_indexing_error", "(", "indexing_errors", ")", "# Broad exception handler to protect around bulk call", "except", "Exception", "as", "ex", ":", "# log information and re-raise", "log", ".", "exception", "(", "\"error while indexing - %s\"", ",", "str", "(", "ex", ")", ")", "raise" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.remove
Implements call to remove the documents from the index
search/elastic.py
def remove(self, doc_type, doc_ids, **kwargs): """ Implements call to remove the documents from the index """ try: # ignore is flagged as an unexpected-keyword-arg; ES python client documents that it can be used # pylint: disable=unexpected-keyword-arg actions = [] for doc_id in doc_ids: log.debug("Removing document of type %s and index %s", doc_type, doc_id) action = { '_op_type': 'delete', "_index": self.index_name, "_type": doc_type, "_id": doc_id } actions.append(action) bulk(self._es, actions, **kwargs) except BulkIndexError as ex: valid_errors = [error for error in ex.errors if error['delete']['status'] != 404] if valid_errors: log.exception("An error occurred while removing documents from the index.") raise
def remove(self, doc_type, doc_ids, **kwargs): """ Implements call to remove the documents from the index """ try: # ignore is flagged as an unexpected-keyword-arg; ES python client documents that it can be used # pylint: disable=unexpected-keyword-arg actions = [] for doc_id in doc_ids: log.debug("Removing document of type %s and index %s", doc_type, doc_id) action = { '_op_type': 'delete', "_index": self.index_name, "_type": doc_type, "_id": doc_id } actions.append(action) bulk(self._es, actions, **kwargs) except BulkIndexError as ex: valid_errors = [error for error in ex.errors if error['delete']['status'] != 404] if valid_errors: log.exception("An error occurred while removing documents from the index.") raise
[ "Implements", "call", "to", "remove", "the", "documents", "from", "the", "index" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L391-L413
[ "def", "remove", "(", "self", ",", "doc_type", ",", "doc_ids", ",", "*", "*", "kwargs", ")", ":", "try", ":", "# ignore is flagged as an unexpected-keyword-arg; ES python client documents that it can be used", "# pylint: disable=unexpected-keyword-arg", "actions", "=", "[", "]", "for", "doc_id", "in", "doc_ids", ":", "log", ".", "debug", "(", "\"Removing document of type %s and index %s\"", ",", "doc_type", ",", "doc_id", ")", "action", "=", "{", "'_op_type'", ":", "'delete'", ",", "\"_index\"", ":", "self", ".", "index_name", ",", "\"_type\"", ":", "doc_type", ",", "\"_id\"", ":", "doc_id", "}", "actions", ".", "append", "(", "action", ")", "bulk", "(", "self", ".", "_es", ",", "actions", ",", "*", "*", "kwargs", ")", "except", "BulkIndexError", "as", "ex", ":", "valid_errors", "=", "[", "error", "for", "error", "in", "ex", ".", "errors", "if", "error", "[", "'delete'", "]", "[", "'status'", "]", "!=", "404", "]", "if", "valid_errors", ":", "log", ".", "exception", "(", "\"An error occurred while removing documents from the index.\"", ")", "raise" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
ElasticSearchEngine.search
Implements call to search the index for the desired content. Args: query_string (str): the string of values upon which to search within the content of the objects within the index field_dictionary (dict): dictionary of values which _must_ exist and _must_ match in order for the documents to be included in the results filter_dictionary (dict): dictionary of values which _must_ match if the field exists in order for the documents to be included in the results; documents for which the field does not exist may be included in the results if they are not otherwise filtered out exclude_dictionary(dict): dictionary of values all of which which must not match in order for the documents to be included in the results; documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set facet_terms (dict): dictionary of terms to include within search facets list - key is the term desired to facet upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many facet results to return (can be an empty dictionary to use default size for underlying engine): e.g. { "org": {"size": 10}, # only show top 10 organizations "modes": {} } use_field_match (bool): flag to indicate whether to use elastic filtering or elastic matching for field matches - this is nothing but a potential performance tune for certain queries (deprecated) exclude_ids (list): list of id values to exclude from the results - useful for finding maches that aren't "one of these" Returns: dict object with results in the desired format { "took": 3, "total": 4, "max_score": 2.0123, "results": [ { "score": 2.0123, "data": { ... } }, { "score": 0.0983, "data": { ... } } ], "facets": { "org": { "total": total_count, "other": 1, "terms": { "MITx": 25, "HarvardX": 18 } }, "modes": { "total": modes_count, "other": 15, "terms": { "honor": 58, "verified": 44, } } } } Raises: ElasticsearchException when there is a problem with the response from elasticsearch Example usage: .search( "find the words within this string", { "must_have_field": "mast_have_value for must_have_field" }, { } )
search/elastic.py
def search(self, query_string=None, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, facet_terms=None, exclude_ids=None, use_field_match=False, **kwargs): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, arguments-differ """ Implements call to search the index for the desired content. Args: query_string (str): the string of values upon which to search within the content of the objects within the index field_dictionary (dict): dictionary of values which _must_ exist and _must_ match in order for the documents to be included in the results filter_dictionary (dict): dictionary of values which _must_ match if the field exists in order for the documents to be included in the results; documents for which the field does not exist may be included in the results if they are not otherwise filtered out exclude_dictionary(dict): dictionary of values all of which which must not match in order for the documents to be included in the results; documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set facet_terms (dict): dictionary of terms to include within search facets list - key is the term desired to facet upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many facet results to return (can be an empty dictionary to use default size for underlying engine): e.g. { "org": {"size": 10}, # only show top 10 organizations "modes": {} } use_field_match (bool): flag to indicate whether to use elastic filtering or elastic matching for field matches - this is nothing but a potential performance tune for certain queries (deprecated) exclude_ids (list): list of id values to exclude from the results - useful for finding maches that aren't "one of these" Returns: dict object with results in the desired format { "took": 3, "total": 4, "max_score": 2.0123, "results": [ { "score": 2.0123, "data": { ... } }, { "score": 0.0983, "data": { ... } } ], "facets": { "org": { "total": total_count, "other": 1, "terms": { "MITx": 25, "HarvardX": 18 } }, "modes": { "total": modes_count, "other": 15, "terms": { "honor": 58, "verified": 44, } } } } Raises: ElasticsearchException when there is a problem with the response from elasticsearch Example usage: .search( "find the words within this string", { "must_have_field": "mast_have_value for must_have_field" }, { } ) """ log.debug("searching index with %s", query_string) elastic_queries = [] elastic_filters = [] # We have a query string, search all fields for matching text within the "content" node if query_string: if six.PY2: query_string = query_string.encode('utf-8').translate(None, RESERVED_CHARACTERS) else: query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) elastic_queries.append({ "query_string": { "fields": ["content.*"], "query": query_string } }) if field_dictionary: if use_field_match: elastic_queries.extend(_process_field_queries(field_dictionary)) else: elastic_filters.extend(_process_field_filters(field_dictionary)) if filter_dictionary: elastic_filters.extend(_process_filters(filter_dictionary)) # Support deprecated argument of exclude_ids if exclude_ids: if not exclude_dictionary: exclude_dictionary = {} if "_id" not in exclude_dictionary: exclude_dictionary["_id"] = [] exclude_dictionary["_id"].extend(exclude_ids) if exclude_dictionary: elastic_filters.append(_process_exclude_dictionary(exclude_dictionary)) query_segment = { "match_all": {} } if elastic_queries: query_segment = { "bool": { "must": elastic_queries } } query = query_segment if elastic_filters: filter_segment = { "bool": { "must": elastic_filters } } query = { "filtered": { "query": query_segment, "filter": filter_segment, } } body = {"query": query} if facet_terms: facet_query = _process_facet_terms(facet_terms) if facet_query: body["facets"] = facet_query try: es_response = self._es.search( index=self.index_name, body=body, **kwargs ) except exceptions.ElasticsearchException as ex: message = six.text_type(ex) if 'QueryParsingException' in message: log.exception("Malformed search query: %s", message) raise QueryParseError('Malformed search query.') else: # log information and re-raise log.exception("error while searching index - %s", str(message)) raise return _translate_hits(es_response)
def search(self, query_string=None, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, facet_terms=None, exclude_ids=None, use_field_match=False, **kwargs): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, arguments-differ """ Implements call to search the index for the desired content. Args: query_string (str): the string of values upon which to search within the content of the objects within the index field_dictionary (dict): dictionary of values which _must_ exist and _must_ match in order for the documents to be included in the results filter_dictionary (dict): dictionary of values which _must_ match if the field exists in order for the documents to be included in the results; documents for which the field does not exist may be included in the results if they are not otherwise filtered out exclude_dictionary(dict): dictionary of values all of which which must not match in order for the documents to be included in the results; documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set facet_terms (dict): dictionary of terms to include within search facets list - key is the term desired to facet upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many facet results to return (can be an empty dictionary to use default size for underlying engine): e.g. { "org": {"size": 10}, # only show top 10 organizations "modes": {} } use_field_match (bool): flag to indicate whether to use elastic filtering or elastic matching for field matches - this is nothing but a potential performance tune for certain queries (deprecated) exclude_ids (list): list of id values to exclude from the results - useful for finding maches that aren't "one of these" Returns: dict object with results in the desired format { "took": 3, "total": 4, "max_score": 2.0123, "results": [ { "score": 2.0123, "data": { ... } }, { "score": 0.0983, "data": { ... } } ], "facets": { "org": { "total": total_count, "other": 1, "terms": { "MITx": 25, "HarvardX": 18 } }, "modes": { "total": modes_count, "other": 15, "terms": { "honor": 58, "verified": 44, } } } } Raises: ElasticsearchException when there is a problem with the response from elasticsearch Example usage: .search( "find the words within this string", { "must_have_field": "mast_have_value for must_have_field" }, { } ) """ log.debug("searching index with %s", query_string) elastic_queries = [] elastic_filters = [] # We have a query string, search all fields for matching text within the "content" node if query_string: if six.PY2: query_string = query_string.encode('utf-8').translate(None, RESERVED_CHARACTERS) else: query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) elastic_queries.append({ "query_string": { "fields": ["content.*"], "query": query_string } }) if field_dictionary: if use_field_match: elastic_queries.extend(_process_field_queries(field_dictionary)) else: elastic_filters.extend(_process_field_filters(field_dictionary)) if filter_dictionary: elastic_filters.extend(_process_filters(filter_dictionary)) # Support deprecated argument of exclude_ids if exclude_ids: if not exclude_dictionary: exclude_dictionary = {} if "_id" not in exclude_dictionary: exclude_dictionary["_id"] = [] exclude_dictionary["_id"].extend(exclude_ids) if exclude_dictionary: elastic_filters.append(_process_exclude_dictionary(exclude_dictionary)) query_segment = { "match_all": {} } if elastic_queries: query_segment = { "bool": { "must": elastic_queries } } query = query_segment if elastic_filters: filter_segment = { "bool": { "must": elastic_filters } } query = { "filtered": { "query": query_segment, "filter": filter_segment, } } body = {"query": query} if facet_terms: facet_query = _process_facet_terms(facet_terms) if facet_query: body["facets"] = facet_query try: es_response = self._es.search( index=self.index_name, body=body, **kwargs ) except exceptions.ElasticsearchException as ex: message = six.text_type(ex) if 'QueryParsingException' in message: log.exception("Malformed search query: %s", message) raise QueryParseError('Malformed search query.') else: # log information and re-raise log.exception("error while searching index - %s", str(message)) raise return _translate_hits(es_response)
[ "Implements", "call", "to", "search", "the", "index", "for", "the", "desired", "content", "." ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/elastic.py#L431-L618
[ "def", "search", "(", "self", ",", "query_string", "=", "None", ",", "field_dictionary", "=", "None", ",", "filter_dictionary", "=", "None", ",", "exclude_dictionary", "=", "None", ",", "facet_terms", "=", "None", ",", "exclude_ids", "=", "None", ",", "use_field_match", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=too-many-arguments, too-many-locals, too-many-branches, arguments-differ", "log", ".", "debug", "(", "\"searching index with %s\"", ",", "query_string", ")", "elastic_queries", "=", "[", "]", "elastic_filters", "=", "[", "]", "# We have a query string, search all fields for matching text within the \"content\" node", "if", "query_string", ":", "if", "six", ".", "PY2", ":", "query_string", "=", "query_string", ".", "encode", "(", "'utf-8'", ")", ".", "translate", "(", "None", ",", "RESERVED_CHARACTERS", ")", "else", ":", "query_string", "=", "query_string", ".", "translate", "(", "query_string", ".", "maketrans", "(", "''", ",", "''", ",", "RESERVED_CHARACTERS", ")", ")", "elastic_queries", ".", "append", "(", "{", "\"query_string\"", ":", "{", "\"fields\"", ":", "[", "\"content.*\"", "]", ",", "\"query\"", ":", "query_string", "}", "}", ")", "if", "field_dictionary", ":", "if", "use_field_match", ":", "elastic_queries", ".", "extend", "(", "_process_field_queries", "(", "field_dictionary", ")", ")", "else", ":", "elastic_filters", ".", "extend", "(", "_process_field_filters", "(", "field_dictionary", ")", ")", "if", "filter_dictionary", ":", "elastic_filters", ".", "extend", "(", "_process_filters", "(", "filter_dictionary", ")", ")", "# Support deprecated argument of exclude_ids", "if", "exclude_ids", ":", "if", "not", "exclude_dictionary", ":", "exclude_dictionary", "=", "{", "}", "if", "\"_id\"", "not", "in", "exclude_dictionary", ":", "exclude_dictionary", "[", "\"_id\"", "]", "=", "[", "]", "exclude_dictionary", "[", "\"_id\"", "]", ".", "extend", "(", "exclude_ids", ")", "if", "exclude_dictionary", ":", "elastic_filters", ".", "append", "(", "_process_exclude_dictionary", "(", "exclude_dictionary", ")", ")", "query_segment", "=", "{", "\"match_all\"", ":", "{", "}", "}", "if", "elastic_queries", ":", "query_segment", "=", "{", "\"bool\"", ":", "{", "\"must\"", ":", "elastic_queries", "}", "}", "query", "=", "query_segment", "if", "elastic_filters", ":", "filter_segment", "=", "{", "\"bool\"", ":", "{", "\"must\"", ":", "elastic_filters", "}", "}", "query", "=", "{", "\"filtered\"", ":", "{", "\"query\"", ":", "query_segment", ",", "\"filter\"", ":", "filter_segment", ",", "}", "}", "body", "=", "{", "\"query\"", ":", "query", "}", "if", "facet_terms", ":", "facet_query", "=", "_process_facet_terms", "(", "facet_terms", ")", "if", "facet_query", ":", "body", "[", "\"facets\"", "]", "=", "facet_query", "try", ":", "es_response", "=", "self", ".", "_es", ".", "search", "(", "index", "=", "self", ".", "index_name", ",", "body", "=", "body", ",", "*", "*", "kwargs", ")", "except", "exceptions", ".", "ElasticsearchException", "as", "ex", ":", "message", "=", "six", ".", "text_type", "(", "ex", ")", "if", "'QueryParsingException'", "in", "message", ":", "log", ".", "exception", "(", "\"Malformed search query: %s\"", ",", "message", ")", "raise", "QueryParseError", "(", "'Malformed search query.'", ")", "else", ":", "# log information and re-raise", "log", ".", "exception", "(", "\"error while searching index - %s\"", ",", "str", "(", "message", ")", ")", "raise", "return", "_translate_hits", "(", "es_response", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchEngine.get_search_engine
Returns the desired implementor (defined in settings)
search/search_engine_base.py
def get_search_engine(index=None): """ Returns the desired implementor (defined in settings) """ search_engine_class = _load_class(getattr(settings, "SEARCH_ENGINE", None), None) return search_engine_class(index=index) if search_engine_class else None
def get_search_engine(index=None): """ Returns the desired implementor (defined in settings) """ search_engine_class = _load_class(getattr(settings, "SEARCH_ENGINE", None), None) return search_engine_class(index=index) if search_engine_class else None
[ "Returns", "the", "desired", "implementor", "(", "defined", "in", "settings", ")" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/search_engine_base.py#L46-L51
[ "def", "get_search_engine", "(", "index", "=", "None", ")", ":", "search_engine_class", "=", "_load_class", "(", "getattr", "(", "settings", ",", "\"SEARCH_ENGINE\"", ",", "None", ")", ",", "None", ")", "return", "search_engine_class", "(", "index", "=", "index", ")", "if", "search_engine_class", "else", "None" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
perform_search
Call the search engine with the appropriate parameters
search/api.py
def perform_search( search_term, user=None, size=10, from_=0, course_id=None): """ Call the search engine with the appropriate parameters """ # field_, filter_ and exclude_dictionary(s) can be overridden by calling application # field_dictionary includes course if course_id provided (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters( user=user, course_id=course_id ) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search_string( search_term, field_dictionary=field_dictionary, filter_dictionary=filter_dictionary, exclude_dictionary=exclude_dictionary, size=size, from_=from_, doc_type="courseware_content", ) # post-process the result for result in results["results"]: result["data"] = SearchResultProcessor.process_result(result["data"], search_term, user) results["access_denied_count"] = len([r for r in results["results"] if r["data"] is None]) results["results"] = [r for r in results["results"] if r["data"] is not None] return results
def perform_search( search_term, user=None, size=10, from_=0, course_id=None): """ Call the search engine with the appropriate parameters """ # field_, filter_ and exclude_dictionary(s) can be overridden by calling application # field_dictionary includes course if course_id provided (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters( user=user, course_id=course_id ) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search_string( search_term, field_dictionary=field_dictionary, filter_dictionary=filter_dictionary, exclude_dictionary=exclude_dictionary, size=size, from_=from_, doc_type="courseware_content", ) # post-process the result for result in results["results"]: result["data"] = SearchResultProcessor.process_result(result["data"], search_term, user) results["access_denied_count"] = len([r for r in results["results"] if r["data"] is None]) results["results"] = [r for r in results["results"] if r["data"] is not None] return results
[ "Call", "the", "search", "engine", "with", "the", "appropriate", "parameters" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/api.py#L42-L77
[ "def", "perform_search", "(", "search_term", ",", "user", "=", "None", ",", "size", "=", "10", ",", "from_", "=", "0", ",", "course_id", "=", "None", ")", ":", "# field_, filter_ and exclude_dictionary(s) can be overridden by calling application", "# field_dictionary includes course if course_id provided", "(", "field_dictionary", ",", "filter_dictionary", ",", "exclude_dictionary", ")", "=", "SearchFilterGenerator", ".", "generate_field_filters", "(", "user", "=", "user", ",", "course_id", "=", "course_id", ")", "searcher", "=", "SearchEngine", ".", "get_search_engine", "(", "getattr", "(", "settings", ",", "\"COURSEWARE_INDEX_NAME\"", ",", "\"courseware_index\"", ")", ")", "if", "not", "searcher", ":", "raise", "NoSearchEngineError", "(", "\"No search engine specified in settings.SEARCH_ENGINE\"", ")", "results", "=", "searcher", ".", "search_string", "(", "search_term", ",", "field_dictionary", "=", "field_dictionary", ",", "filter_dictionary", "=", "filter_dictionary", ",", "exclude_dictionary", "=", "exclude_dictionary", ",", "size", "=", "size", ",", "from_", "=", "from_", ",", "doc_type", "=", "\"courseware_content\"", ",", ")", "# post-process the result", "for", "result", "in", "results", "[", "\"results\"", "]", ":", "result", "[", "\"data\"", "]", "=", "SearchResultProcessor", ".", "process_result", "(", "result", "[", "\"data\"", "]", ",", "search_term", ",", "user", ")", "results", "[", "\"access_denied_count\"", "]", "=", "len", "(", "[", "r", "for", "r", "in", "results", "[", "\"results\"", "]", "if", "r", "[", "\"data\"", "]", "is", "None", "]", ")", "results", "[", "\"results\"", "]", "=", "[", "r", "for", "r", "in", "results", "[", "\"results\"", "]", "if", "r", "[", "\"data\"", "]", "is", "not", "None", "]", "return", "results" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
course_discovery_search
Course Discovery activities against the search engine index of course details
search/api.py
def course_discovery_search(search_term=None, size=20, from_=0, field_dictionary=None): """ Course Discovery activities against the search engine index of course details """ # We'll ignore the course-enrollemnt informaiton in field and filter # dictionary, and use our own logic upon enrollment dates for these use_search_fields = ["org"] (search_fields, _, exclude_dictionary) = SearchFilterGenerator.generate_field_filters() use_field_dictionary = {} use_field_dictionary.update({field: search_fields[field] for field in search_fields if field in use_search_fields}) if field_dictionary: use_field_dictionary.update(field_dictionary) if not getattr(settings, "SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING", False): use_field_dictionary["enrollment_start"] = DateRange(None, datetime.utcnow()) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search( query_string=search_term, doc_type="course_info", size=size, from_=from_, # only show when enrollment start IS provided and is before now field_dictionary=use_field_dictionary, # show if no enrollment end is provided and has not yet been reached filter_dictionary={"enrollment_end": DateRange(datetime.utcnow(), None)}, exclude_dictionary=exclude_dictionary, facet_terms=course_discovery_facets(), ) return results
def course_discovery_search(search_term=None, size=20, from_=0, field_dictionary=None): """ Course Discovery activities against the search engine index of course details """ # We'll ignore the course-enrollemnt informaiton in field and filter # dictionary, and use our own logic upon enrollment dates for these use_search_fields = ["org"] (search_fields, _, exclude_dictionary) = SearchFilterGenerator.generate_field_filters() use_field_dictionary = {} use_field_dictionary.update({field: search_fields[field] for field in search_fields if field in use_search_fields}) if field_dictionary: use_field_dictionary.update(field_dictionary) if not getattr(settings, "SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING", False): use_field_dictionary["enrollment_start"] = DateRange(None, datetime.utcnow()) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search( query_string=search_term, doc_type="course_info", size=size, from_=from_, # only show when enrollment start IS provided and is before now field_dictionary=use_field_dictionary, # show if no enrollment end is provided and has not yet been reached filter_dictionary={"enrollment_end": DateRange(datetime.utcnow(), None)}, exclude_dictionary=exclude_dictionary, facet_terms=course_discovery_facets(), ) return results
[ "Course", "Discovery", "activities", "against", "the", "search", "engine", "index", "of", "course", "details" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/api.py#L80-L112
[ "def", "course_discovery_search", "(", "search_term", "=", "None", ",", "size", "=", "20", ",", "from_", "=", "0", ",", "field_dictionary", "=", "None", ")", ":", "# We'll ignore the course-enrollemnt informaiton in field and filter", "# dictionary, and use our own logic upon enrollment dates for these", "use_search_fields", "=", "[", "\"org\"", "]", "(", "search_fields", ",", "_", ",", "exclude_dictionary", ")", "=", "SearchFilterGenerator", ".", "generate_field_filters", "(", ")", "use_field_dictionary", "=", "{", "}", "use_field_dictionary", ".", "update", "(", "{", "field", ":", "search_fields", "[", "field", "]", "for", "field", "in", "search_fields", "if", "field", "in", "use_search_fields", "}", ")", "if", "field_dictionary", ":", "use_field_dictionary", ".", "update", "(", "field_dictionary", ")", "if", "not", "getattr", "(", "settings", ",", "\"SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING\"", ",", "False", ")", ":", "use_field_dictionary", "[", "\"enrollment_start\"", "]", "=", "DateRange", "(", "None", ",", "datetime", ".", "utcnow", "(", ")", ")", "searcher", "=", "SearchEngine", ".", "get_search_engine", "(", "getattr", "(", "settings", ",", "\"COURSEWARE_INDEX_NAME\"", ",", "\"courseware_index\"", ")", ")", "if", "not", "searcher", ":", "raise", "NoSearchEngineError", "(", "\"No search engine specified in settings.SEARCH_ENGINE\"", ")", "results", "=", "searcher", ".", "search", "(", "query_string", "=", "search_term", ",", "doc_type", "=", "\"course_info\"", ",", "size", "=", "size", ",", "from_", "=", "from_", ",", "# only show when enrollment start IS provided and is before now", "field_dictionary", "=", "use_field_dictionary", ",", "# show if no enrollment end is provided and has not yet been reached", "filter_dictionary", "=", "{", "\"enrollment_end\"", ":", "DateRange", "(", "datetime", ".", "utcnow", "(", ")", ",", "None", ")", "}", ",", "exclude_dictionary", "=", "exclude_dictionary", ",", "facet_terms", "=", "course_discovery_facets", "(", ")", ",", ")", "return", "results" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.strings_in_dictionary
Used by default implementation for finding excerpt
search/result_processor.py
def strings_in_dictionary(dictionary): """ Used by default implementation for finding excerpt """ strings = [value for value in six.itervalues(dictionary) if not isinstance(value, dict)] for child_dict in [dv for dv in six.itervalues(dictionary) if isinstance(dv, dict)]: strings.extend(SearchResultProcessor.strings_in_dictionary(child_dict)) return strings
def strings_in_dictionary(dictionary): """ Used by default implementation for finding excerpt """ strings = [value for value in six.itervalues(dictionary) if not isinstance(value, dict)] for child_dict in [dv for dv in six.itervalues(dictionary) if isinstance(dv, dict)]: strings.extend(SearchResultProcessor.strings_in_dictionary(child_dict)) return strings
[ "Used", "by", "default", "implementation", "for", "finding", "excerpt" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L46-L51
[ "def", "strings_in_dictionary", "(", "dictionary", ")", ":", "strings", "=", "[", "value", "for", "value", "in", "six", ".", "itervalues", "(", "dictionary", ")", "if", "not", "isinstance", "(", "value", ",", "dict", ")", "]", "for", "child_dict", "in", "[", "dv", "for", "dv", "in", "six", ".", "itervalues", "(", "dictionary", ")", "if", "isinstance", "(", "dv", ",", "dict", ")", "]", ":", "strings", ".", "extend", "(", "SearchResultProcessor", ".", "strings_in_dictionary", "(", "child_dict", ")", ")", "return", "strings" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.find_matches
Used by default property excerpt
search/result_processor.py
def find_matches(strings, words, length_hoped): """ Used by default property excerpt """ lower_words = [w.lower() for w in words] def has_match(string): """ Do any of the words match within the string """ lower_string = string.lower() for test_word in lower_words: if test_word in lower_string: return True return False shortened_strings = [textwrap.wrap(s) for s in strings] short_string_list = list(chain.from_iterable(shortened_strings)) matches = [ms for ms in short_string_list if has_match(ms)] cumulative_len = 0 break_at = None for idx, match in enumerate(matches): cumulative_len += len(match) if cumulative_len >= length_hoped: break_at = idx break return matches[0:break_at]
def find_matches(strings, words, length_hoped): """ Used by default property excerpt """ lower_words = [w.lower() for w in words] def has_match(string): """ Do any of the words match within the string """ lower_string = string.lower() for test_word in lower_words: if test_word in lower_string: return True return False shortened_strings = [textwrap.wrap(s) for s in strings] short_string_list = list(chain.from_iterable(shortened_strings)) matches = [ms for ms in short_string_list if has_match(ms)] cumulative_len = 0 break_at = None for idx, match in enumerate(matches): cumulative_len += len(match) if cumulative_len >= length_hoped: break_at = idx break return matches[0:break_at]
[ "Used", "by", "default", "property", "excerpt" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L54-L78
[ "def", "find_matches", "(", "strings", ",", "words", ",", "length_hoped", ")", ":", "lower_words", "=", "[", "w", ".", "lower", "(", ")", "for", "w", "in", "words", "]", "def", "has_match", "(", "string", ")", ":", "\"\"\" Do any of the words match within the string \"\"\"", "lower_string", "=", "string", ".", "lower", "(", ")", "for", "test_word", "in", "lower_words", ":", "if", "test_word", "in", "lower_string", ":", "return", "True", "return", "False", "shortened_strings", "=", "[", "textwrap", ".", "wrap", "(", "s", ")", "for", "s", "in", "strings", "]", "short_string_list", "=", "list", "(", "chain", ".", "from_iterable", "(", "shortened_strings", ")", ")", "matches", "=", "[", "ms", "for", "ms", "in", "short_string_list", "if", "has_match", "(", "ms", ")", "]", "cumulative_len", "=", "0", "break_at", "=", "None", "for", "idx", ",", "match", "in", "enumerate", "(", "matches", ")", ":", "cumulative_len", "+=", "len", "(", "match", ")", "if", "cumulative_len", ">=", "length_hoped", ":", "break_at", "=", "idx", "break", "return", "matches", "[", "0", ":", "break_at", "]" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.decorate_matches
decorate the matches within the excerpt
search/result_processor.py
def decorate_matches(match_in, match_word): """ decorate the matches within the excerpt """ matches = re.finditer(match_word, match_in, re.IGNORECASE) for matched_string in set([match.group() for match in matches]): match_in = match_in.replace( matched_string, getattr(settings, "SEARCH_MATCH_DECORATION", u"<b>{}</b>").format(matched_string) ) return match_in
def decorate_matches(match_in, match_word): """ decorate the matches within the excerpt """ matches = re.finditer(match_word, match_in, re.IGNORECASE) for matched_string in set([match.group() for match in matches]): match_in = match_in.replace( matched_string, getattr(settings, "SEARCH_MATCH_DECORATION", u"<b>{}</b>").format(matched_string) ) return match_in
[ "decorate", "the", "matches", "within", "the", "excerpt" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L81-L89
[ "def", "decorate_matches", "(", "match_in", ",", "match_word", ")", ":", "matches", "=", "re", ".", "finditer", "(", "match_word", ",", "match_in", ",", "re", ".", "IGNORECASE", ")", "for", "matched_string", "in", "set", "(", "[", "match", ".", "group", "(", ")", "for", "match", "in", "matches", "]", ")", ":", "match_in", "=", "match_in", ".", "replace", "(", "matched_string", ",", "getattr", "(", "settings", ",", "\"SEARCH_MATCH_DECORATION\"", ",", "u\"<b>{}</b>\"", ")", ".", "format", "(", "matched_string", ")", ")", "return", "match_in" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.add_properties
Called during post processing of result Any properties defined in your subclass will get exposed as members of the result json from the search
search/result_processor.py
def add_properties(self): """ Called during post processing of result Any properties defined in your subclass will get exposed as members of the result json from the search """ for property_name in [p[0] for p in inspect.getmembers(self.__class__) if isinstance(p[1], property)]: self._results_fields[property_name] = getattr(self, property_name, None)
def add_properties(self): """ Called during post processing of result Any properties defined in your subclass will get exposed as members of the result json from the search """ for property_name in [p[0] for p in inspect.getmembers(self.__class__) if isinstance(p[1], property)]: self._results_fields[property_name] = getattr(self, property_name, None)
[ "Called", "during", "post", "processing", "of", "result", "Any", "properties", "defined", "in", "your", "subclass", "will", "get", "exposed", "as", "members", "of", "the", "result", "json", "from", "the", "search" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L99-L105
[ "def", "add_properties", "(", "self", ")", ":", "for", "property_name", "in", "[", "p", "[", "0", "]", "for", "p", "in", "inspect", ".", "getmembers", "(", "self", ".", "__class__", ")", "if", "isinstance", "(", "p", "[", "1", "]", ",", "property", ")", "]", ":", "self", ".", "_results_fields", "[", "property_name", "]", "=", "getattr", "(", "self", ",", "property_name", ",", "None", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.process_result
Called from within search handler. Finds desired subclass and decides if the result should be removed and adds properties derived from the result information
search/result_processor.py
def process_result(cls, dictionary, match_phrase, user): """ Called from within search handler. Finds desired subclass and decides if the result should be removed and adds properties derived from the result information """ result_processor = _load_class(getattr(settings, "SEARCH_RESULT_PROCESSOR", None), cls) srp = result_processor(dictionary, match_phrase) if srp.should_remove(user): return None try: srp.add_properties() # protect around any problems introduced by subclasses within their properties except Exception as ex: # pylint: disable=broad-except log.exception("error processing properties for %s - %s: will remove from results", json.dumps(dictionary, cls=DjangoJSONEncoder), str(ex)) return None return dictionary
def process_result(cls, dictionary, match_phrase, user): """ Called from within search handler. Finds desired subclass and decides if the result should be removed and adds properties derived from the result information """ result_processor = _load_class(getattr(settings, "SEARCH_RESULT_PROCESSOR", None), cls) srp = result_processor(dictionary, match_phrase) if srp.should_remove(user): return None try: srp.add_properties() # protect around any problems introduced by subclasses within their properties except Exception as ex: # pylint: disable=broad-except log.exception("error processing properties for %s - %s: will remove from results", json.dumps(dictionary, cls=DjangoJSONEncoder), str(ex)) return None return dictionary
[ "Called", "from", "within", "search", "handler", ".", "Finds", "desired", "subclass", "and", "decides", "if", "the", "result", "should", "be", "removed", "and", "adds", "properties", "derived", "from", "the", "result", "information" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L108-L124
[ "def", "process_result", "(", "cls", ",", "dictionary", ",", "match_phrase", ",", "user", ")", ":", "result_processor", "=", "_load_class", "(", "getattr", "(", "settings", ",", "\"SEARCH_RESULT_PROCESSOR\"", ",", "None", ")", ",", "cls", ")", "srp", "=", "result_processor", "(", "dictionary", ",", "match_phrase", ")", "if", "srp", ".", "should_remove", "(", "user", ")", ":", "return", "None", "try", ":", "srp", ".", "add_properties", "(", ")", "# protect around any problems introduced by subclasses within their properties", "except", "Exception", "as", "ex", ":", "# pylint: disable=broad-except", "log", ".", "exception", "(", "\"error processing properties for %s - %s: will remove from results\"", ",", "json", ".", "dumps", "(", "dictionary", ",", "cls", "=", "DjangoJSONEncoder", ")", ",", "str", "(", "ex", ")", ")", "return", "None", "return", "dictionary" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchResultProcessor.excerpt
Property to display a useful excerpt representing the matches within the results
search/result_processor.py
def excerpt(self): """ Property to display a useful excerpt representing the matches within the results """ if "content" not in self._results_fields: return None match_phrases = [self._match_phrase] if six.PY2: separate_phrases = [ phrase.decode('utf-8') for phrase in shlex.split(self._match_phrase.encode('utf-8')) ] else: separate_phrases = [ phrase for phrase in shlex.split(self._match_phrase) ] if len(separate_phrases) > 1: match_phrases.extend(separate_phrases) else: match_phrases = separate_phrases matches = SearchResultProcessor.find_matches( SearchResultProcessor.strings_in_dictionary(self._results_fields["content"]), match_phrases, DESIRED_EXCERPT_LENGTH ) excerpt_text = ELLIPSIS.join(matches) for match_word in match_phrases: excerpt_text = SearchResultProcessor.decorate_matches(excerpt_text, match_word) return excerpt_text
def excerpt(self): """ Property to display a useful excerpt representing the matches within the results """ if "content" not in self._results_fields: return None match_phrases = [self._match_phrase] if six.PY2: separate_phrases = [ phrase.decode('utf-8') for phrase in shlex.split(self._match_phrase.encode('utf-8')) ] else: separate_phrases = [ phrase for phrase in shlex.split(self._match_phrase) ] if len(separate_phrases) > 1: match_phrases.extend(separate_phrases) else: match_phrases = separate_phrases matches = SearchResultProcessor.find_matches( SearchResultProcessor.strings_in_dictionary(self._results_fields["content"]), match_phrases, DESIRED_EXCERPT_LENGTH ) excerpt_text = ELLIPSIS.join(matches) for match_word in match_phrases: excerpt_text = SearchResultProcessor.decorate_matches(excerpt_text, match_word) return excerpt_text
[ "Property", "to", "display", "a", "useful", "excerpt", "representing", "the", "matches", "within", "the", "results" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/result_processor.py#L127-L160
[ "def", "excerpt", "(", "self", ")", ":", "if", "\"content\"", "not", "in", "self", ".", "_results_fields", ":", "return", "None", "match_phrases", "=", "[", "self", ".", "_match_phrase", "]", "if", "six", ".", "PY2", ":", "separate_phrases", "=", "[", "phrase", ".", "decode", "(", "'utf-8'", ")", "for", "phrase", "in", "shlex", ".", "split", "(", "self", ".", "_match_phrase", ".", "encode", "(", "'utf-8'", ")", ")", "]", "else", ":", "separate_phrases", "=", "[", "phrase", "for", "phrase", "in", "shlex", ".", "split", "(", "self", ".", "_match_phrase", ")", "]", "if", "len", "(", "separate_phrases", ")", ">", "1", ":", "match_phrases", ".", "extend", "(", "separate_phrases", ")", "else", ":", "match_phrases", "=", "separate_phrases", "matches", "=", "SearchResultProcessor", ".", "find_matches", "(", "SearchResultProcessor", ".", "strings_in_dictionary", "(", "self", ".", "_results_fields", "[", "\"content\"", "]", ")", ",", "match_phrases", ",", "DESIRED_EXCERPT_LENGTH", ")", "excerpt_text", "=", "ELLIPSIS", ".", "join", "(", "matches", ")", "for", "match_word", "in", "match_phrases", ":", "excerpt_text", "=", "SearchResultProcessor", ".", "decorate_matches", "(", "excerpt_text", ",", "match_word", ")", "return", "excerpt_text" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchFilterGenerator.generate_field_filters
Called from within search handler Finds desired subclass and adds filter information based upon user information
search/filter_generator.py
def generate_field_filters(cls, **kwargs): """ Called from within search handler Finds desired subclass and adds filter information based upon user information """ generator = _load_class(getattr(settings, "SEARCH_FILTER_GENERATOR", None), cls)() return ( generator.field_dictionary(**kwargs), generator.filter_dictionary(**kwargs), generator.exclude_dictionary(**kwargs), )
def generate_field_filters(cls, **kwargs): """ Called from within search handler Finds desired subclass and adds filter information based upon user information """ generator = _load_class(getattr(settings, "SEARCH_FILTER_GENERATOR", None), cls)() return ( generator.field_dictionary(**kwargs), generator.filter_dictionary(**kwargs), generator.exclude_dictionary(**kwargs), )
[ "Called", "from", "within", "search", "handler", "Finds", "desired", "subclass", "and", "adds", "filter", "information", "based", "upon", "user", "information" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/filter_generator.py#L36-L46
[ "def", "generate_field_filters", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "generator", "=", "_load_class", "(", "getattr", "(", "settings", ",", "\"SEARCH_FILTER_GENERATOR\"", ",", "None", ")", ",", "cls", ")", "(", ")", "return", "(", "generator", ".", "field_dictionary", "(", "*", "*", "kwargs", ")", ",", "generator", ".", "filter_dictionary", "(", "*", "*", "kwargs", ")", ",", "generator", ".", "exclude_dictionary", "(", "*", "*", "kwargs", ")", ",", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
SearchInitializer.set_search_enviroment
Called from within search handler Finds desired subclass and calls initialize method
search/initializer.py
def set_search_enviroment(cls, **kwargs): """ Called from within search handler Finds desired subclass and calls initialize method """ initializer = _load_class(getattr(settings, "SEARCH_INITIALIZER", None), cls)() return initializer.initialize(**kwargs)
def set_search_enviroment(cls, **kwargs): """ Called from within search handler Finds desired subclass and calls initialize method """ initializer = _load_class(getattr(settings, "SEARCH_INITIALIZER", None), cls)() return initializer.initialize(**kwargs)
[ "Called", "from", "within", "search", "handler", "Finds", "desired", "subclass", "and", "calls", "initialize", "method" ]
edx/edx-search
python
https://github.com/edx/edx-search/blob/476cf02b71ceba34ae7d8b798f36d60692317c55/search/initializer.py#L23-L29
[ "def", "set_search_enviroment", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "initializer", "=", "_load_class", "(", "getattr", "(", "settings", ",", "\"SEARCH_INITIALIZER\"", ",", "None", ")", ",", "cls", ")", "(", ")", "return", "initializer", ".", "initialize", "(", "*", "*", "kwargs", ")" ]
476cf02b71ceba34ae7d8b798f36d60692317c55
valid
Detector._parse
Opens data file and for each line, calls _eat_name_line
sexmachine/detector.py
def _parse(self, filename): """Opens data file and for each line, calls _eat_name_line""" self.names = {} with codecs.open(filename, encoding="iso8859-1") as f: for line in f: if any(map(lambda c: 128 < ord(c) < 160, line)): line = line.encode("iso8859-1").decode("windows-1252") self._eat_name_line(line.strip())
def _parse(self, filename): """Opens data file and for each line, calls _eat_name_line""" self.names = {} with codecs.open(filename, encoding="iso8859-1") as f: for line in f: if any(map(lambda c: 128 < ord(c) < 160, line)): line = line.encode("iso8859-1").decode("windows-1252") self._eat_name_line(line.strip())
[ "Opens", "data", "file", "and", "for", "each", "line", "calls", "_eat_name_line" ]
ferhatelmas/sexmachine
python
https://github.com/ferhatelmas/sexmachine/blob/85d33bb47ccc017676e69788750f116e391f52db/sexmachine/detector.py#L33-L40
[ "def", "_parse", "(", "self", ",", "filename", ")", ":", "self", ".", "names", "=", "{", "}", "with", "codecs", ".", "open", "(", "filename", ",", "encoding", "=", "\"iso8859-1\"", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "any", "(", "map", "(", "lambda", "c", ":", "128", "<", "ord", "(", "c", ")", "<", "160", ",", "line", ")", ")", ":", "line", "=", "line", ".", "encode", "(", "\"iso8859-1\"", ")", ".", "decode", "(", "\"windows-1252\"", ")", "self", ".", "_eat_name_line", "(", "line", ".", "strip", "(", ")", ")" ]
85d33bb47ccc017676e69788750f116e391f52db
valid
Detector._eat_name_line
Parses one line of data file
sexmachine/detector.py
def _eat_name_line(self, line): """Parses one line of data file""" if line[0] not in "#=": parts = line.split() country_values = line[30:-1] name = map_name(parts[1]) if not self.case_sensitive: name = name.lower() if parts[0] == "M": self._set(name, u"male", country_values) elif parts[0] == "1M" or parts[0] == "?M": self._set(name, u"mostly_male", country_values) elif parts[0] == "F": self._set(name, u"female", country_values) elif parts[0] == "1F" or parts[0] == "?F": self._set(name, u"mostly_female", country_values) elif parts[0] == "?": self._set(name, self.unknown_value, country_values) else: raise "Not sure what to do with a sex of %s" % parts[0]
def _eat_name_line(self, line): """Parses one line of data file""" if line[0] not in "#=": parts = line.split() country_values = line[30:-1] name = map_name(parts[1]) if not self.case_sensitive: name = name.lower() if parts[0] == "M": self._set(name, u"male", country_values) elif parts[0] == "1M" or parts[0] == "?M": self._set(name, u"mostly_male", country_values) elif parts[0] == "F": self._set(name, u"female", country_values) elif parts[0] == "1F" or parts[0] == "?F": self._set(name, u"mostly_female", country_values) elif parts[0] == "?": self._set(name, self.unknown_value, country_values) else: raise "Not sure what to do with a sex of %s" % parts[0]
[ "Parses", "one", "line", "of", "data", "file" ]
ferhatelmas/sexmachine
python
https://github.com/ferhatelmas/sexmachine/blob/85d33bb47ccc017676e69788750f116e391f52db/sexmachine/detector.py#L42-L62
[ "def", "_eat_name_line", "(", "self", ",", "line", ")", ":", "if", "line", "[", "0", "]", "not", "in", "\"#=\"", ":", "parts", "=", "line", ".", "split", "(", ")", "country_values", "=", "line", "[", "30", ":", "-", "1", "]", "name", "=", "map_name", "(", "parts", "[", "1", "]", ")", "if", "not", "self", ".", "case_sensitive", ":", "name", "=", "name", ".", "lower", "(", ")", "if", "parts", "[", "0", "]", "==", "\"M\"", ":", "self", ".", "_set", "(", "name", ",", "u\"male\"", ",", "country_values", ")", "elif", "parts", "[", "0", "]", "==", "\"1M\"", "or", "parts", "[", "0", "]", "==", "\"?M\"", ":", "self", ".", "_set", "(", "name", ",", "u\"mostly_male\"", ",", "country_values", ")", "elif", "parts", "[", "0", "]", "==", "\"F\"", ":", "self", ".", "_set", "(", "name", ",", "u\"female\"", ",", "country_values", ")", "elif", "parts", "[", "0", "]", "==", "\"1F\"", "or", "parts", "[", "0", "]", "==", "\"?F\"", ":", "self", ".", "_set", "(", "name", ",", "u\"mostly_female\"", ",", "country_values", ")", "elif", "parts", "[", "0", "]", "==", "\"?\"", ":", "self", ".", "_set", "(", "name", ",", "self", ".", "unknown_value", ",", "country_values", ")", "else", ":", "raise", "\"Not sure what to do with a sex of %s\"", "%", "parts", "[", "0", "]" ]
85d33bb47ccc017676e69788750f116e391f52db
valid
Detector._set
Sets gender and relevant country values for names dictionary of detector
sexmachine/detector.py
def _set(self, name, gender, country_values): """Sets gender and relevant country values for names dictionary of detector""" if '+' in name: for replacement in ['', ' ', '-']: self._set(name.replace('+', replacement), gender, country_values) else: if name not in self.names: self.names[name] = {} self.names[name][gender] = country_values
def _set(self, name, gender, country_values): """Sets gender and relevant country values for names dictionary of detector""" if '+' in name: for replacement in ['', ' ', '-']: self._set(name.replace('+', replacement), gender, country_values) else: if name not in self.names: self.names[name] = {} self.names[name][gender] = country_values
[ "Sets", "gender", "and", "relevant", "country", "values", "for", "names", "dictionary", "of", "detector" ]
ferhatelmas/sexmachine
python
https://github.com/ferhatelmas/sexmachine/blob/85d33bb47ccc017676e69788750f116e391f52db/sexmachine/detector.py#L64-L72
[ "def", "_set", "(", "self", ",", "name", ",", "gender", ",", "country_values", ")", ":", "if", "'+'", "in", "name", ":", "for", "replacement", "in", "[", "''", ",", "' '", ",", "'-'", "]", ":", "self", ".", "_set", "(", "name", ".", "replace", "(", "'+'", ",", "replacement", ")", ",", "gender", ",", "country_values", ")", "else", ":", "if", "name", "not", "in", "self", ".", "names", ":", "self", ".", "names", "[", "name", "]", "=", "{", "}", "self", ".", "names", "[", "name", "]", "[", "gender", "]", "=", "country_values" ]
85d33bb47ccc017676e69788750f116e391f52db
valid
Detector._most_popular_gender
Finds the most popular gender for the given name counting by given counter
sexmachine/detector.py
def _most_popular_gender(self, name, counter): """Finds the most popular gender for the given name counting by given counter""" if name not in self.names: return self.unknown_value max_count, max_tie = (0, 0) best = self.names[name].keys()[0] for gender, country_values in self.names[name].items(): count, tie = counter(country_values) if count > max_count or (count == max_count and tie > max_tie): max_count, max_tie, best = count, tie, gender return best if max_count > 0 else self.unknown_value
def _most_popular_gender(self, name, counter): """Finds the most popular gender for the given name counting by given counter""" if name not in self.names: return self.unknown_value max_count, max_tie = (0, 0) best = self.names[name].keys()[0] for gender, country_values in self.names[name].items(): count, tie = counter(country_values) if count > max_count or (count == max_count and tie > max_tie): max_count, max_tie, best = count, tie, gender return best if max_count > 0 else self.unknown_value
[ "Finds", "the", "most", "popular", "gender", "for", "the", "given", "name", "counting", "by", "given", "counter" ]
ferhatelmas/sexmachine
python
https://github.com/ferhatelmas/sexmachine/blob/85d33bb47ccc017676e69788750f116e391f52db/sexmachine/detector.py#L74-L86
[ "def", "_most_popular_gender", "(", "self", ",", "name", ",", "counter", ")", ":", "if", "name", "not", "in", "self", ".", "names", ":", "return", "self", ".", "unknown_value", "max_count", ",", "max_tie", "=", "(", "0", ",", "0", ")", "best", "=", "self", ".", "names", "[", "name", "]", ".", "keys", "(", ")", "[", "0", "]", "for", "gender", ",", "country_values", "in", "self", ".", "names", "[", "name", "]", ".", "items", "(", ")", ":", "count", ",", "tie", "=", "counter", "(", "country_values", ")", "if", "count", ">", "max_count", "or", "(", "count", "==", "max_count", "and", "tie", ">", "max_tie", ")", ":", "max_count", ",", "max_tie", ",", "best", "=", "count", ",", "tie", ",", "gender", "return", "best", "if", "max_count", ">", "0", "else", "self", ".", "unknown_value" ]
85d33bb47ccc017676e69788750f116e391f52db
valid
Detector.get_gender
Returns best gender for the given name and country pair
sexmachine/detector.py
def get_gender(self, name, country=None): """Returns best gender for the given name and country pair""" if not self.case_sensitive: name = name.lower() if name not in self.names: return self.unknown_value elif not country: def counter(country_values): country_values = map(ord, country_values.replace(" ", "")) return (len(country_values), sum(map(lambda c: c > 64 and c-55 or c-48, country_values))) return self._most_popular_gender(name, counter) elif country in self.__class__.COUNTRIES: index = self.__class__.COUNTRIES.index(country) counter = lambda e: (ord(e[index])-32, 0) return self._most_popular_gender(name, counter) else: raise NoCountryError("No such country: %s" % country)
def get_gender(self, name, country=None): """Returns best gender for the given name and country pair""" if not self.case_sensitive: name = name.lower() if name not in self.names: return self.unknown_value elif not country: def counter(country_values): country_values = map(ord, country_values.replace(" ", "")) return (len(country_values), sum(map(lambda c: c > 64 and c-55 or c-48, country_values))) return self._most_popular_gender(name, counter) elif country in self.__class__.COUNTRIES: index = self.__class__.COUNTRIES.index(country) counter = lambda e: (ord(e[index])-32, 0) return self._most_popular_gender(name, counter) else: raise NoCountryError("No such country: %s" % country)
[ "Returns", "best", "gender", "for", "the", "given", "name", "and", "country", "pair" ]
ferhatelmas/sexmachine
python
https://github.com/ferhatelmas/sexmachine/blob/85d33bb47ccc017676e69788750f116e391f52db/sexmachine/detector.py#L88-L106
[ "def", "get_gender", "(", "self", ",", "name", ",", "country", "=", "None", ")", ":", "if", "not", "self", ".", "case_sensitive", ":", "name", "=", "name", ".", "lower", "(", ")", "if", "name", "not", "in", "self", ".", "names", ":", "return", "self", ".", "unknown_value", "elif", "not", "country", ":", "def", "counter", "(", "country_values", ")", ":", "country_values", "=", "map", "(", "ord", ",", "country_values", ".", "replace", "(", "\" \"", ",", "\"\"", ")", ")", "return", "(", "len", "(", "country_values", ")", ",", "sum", "(", "map", "(", "lambda", "c", ":", "c", ">", "64", "and", "c", "-", "55", "or", "c", "-", "48", ",", "country_values", ")", ")", ")", "return", "self", ".", "_most_popular_gender", "(", "name", ",", "counter", ")", "elif", "country", "in", "self", ".", "__class__", ".", "COUNTRIES", ":", "index", "=", "self", ".", "__class__", ".", "COUNTRIES", ".", "index", "(", "country", ")", "counter", "=", "lambda", "e", ":", "(", "ord", "(", "e", "[", "index", "]", ")", "-", "32", ",", "0", ")", "return", "self", ".", "_most_popular_gender", "(", "name", ",", "counter", ")", "else", ":", "raise", "NoCountryError", "(", "\"No such country: %s\"", "%", "country", ")" ]
85d33bb47ccc017676e69788750f116e391f52db
valid
Report.output
Writes the specified string to the output target of the report. :param msg: the message to output. :type msg: str :param newline: whether or not to append a newline to the end of the message :type newline: str
src/tidypy/reports/base.py
def output(self, msg, newline=True): """ Writes the specified string to the output target of the report. :param msg: the message to output. :type msg: str :param newline: whether or not to append a newline to the end of the message :type newline: str """ click.echo(text_type(msg), nl=newline, file=self.output_file)
def output(self, msg, newline=True): """ Writes the specified string to the output target of the report. :param msg: the message to output. :type msg: str :param newline: whether or not to append a newline to the end of the message :type newline: str """ click.echo(text_type(msg), nl=newline, file=self.output_file)
[ "Writes", "the", "specified", "string", "to", "the", "output", "target", "of", "the", "report", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/reports/base.py#L64-L75
[ "def", "output", "(", "self", ",", "msg", ",", "newline", "=", "True", ")", ":", "click", ".", "echo", "(", "text_type", "(", "msg", ")", ",", "nl", "=", "newline", ",", "file", "=", "self", ".", "output_file", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
execute_tools
Executes the suite of TidyPy tools upon the project and returns the issues that are found. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project to analyze :type path: str :param progress: the progress reporter object that will receive callbacks during the execution of the tool suite. If not specified, not progress notifications will occur. :type progress: tidypy.Progress :rtype: tidypy.Collector
src/tidypy/core.py
def execute_tools(config, path, progress=None): """ Executes the suite of TidyPy tools upon the project and returns the issues that are found. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project to analyze :type path: str :param progress: the progress reporter object that will receive callbacks during the execution of the tool suite. If not specified, not progress notifications will occur. :type progress: tidypy.Progress :rtype: tidypy.Collector """ progress = progress or QuietProgress() progress.on_start() manager = SyncManager() manager.start() num_tools = 0 tools = manager.Queue() for name, cls in iteritems(get_tools()): if config[name]['use'] and cls.can_be_used(): num_tools += 1 tools.put({ 'name': name, 'config': config[name], }) collector = Collector(config) if not num_tools: progress.on_finish() return collector notifications = manager.Queue() environment = manager.dict({ 'finder': Finder(path, config), }) workers = [] for _ in range(config['workers']): worker = Worker( args=( tools, notifications, environment, ), ) worker.start() workers.append(worker) while num_tools: try: notification = notifications.get(True, 0.25) except Empty: pass else: if notification['type'] == 'start': progress.on_tool_start(notification['tool']) elif notification['type'] == 'complete': collector.add_issues(notification['issues']) progress.on_tool_finish(notification['tool']) num_tools -= 1 progress.on_finish() return collector
def execute_tools(config, path, progress=None): """ Executes the suite of TidyPy tools upon the project and returns the issues that are found. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project to analyze :type path: str :param progress: the progress reporter object that will receive callbacks during the execution of the tool suite. If not specified, not progress notifications will occur. :type progress: tidypy.Progress :rtype: tidypy.Collector """ progress = progress or QuietProgress() progress.on_start() manager = SyncManager() manager.start() num_tools = 0 tools = manager.Queue() for name, cls in iteritems(get_tools()): if config[name]['use'] and cls.can_be_used(): num_tools += 1 tools.put({ 'name': name, 'config': config[name], }) collector = Collector(config) if not num_tools: progress.on_finish() return collector notifications = manager.Queue() environment = manager.dict({ 'finder': Finder(path, config), }) workers = [] for _ in range(config['workers']): worker = Worker( args=( tools, notifications, environment, ), ) worker.start() workers.append(worker) while num_tools: try: notification = notifications.get(True, 0.25) except Empty: pass else: if notification['type'] == 'start': progress.on_tool_start(notification['tool']) elif notification['type'] == 'complete': collector.add_issues(notification['issues']) progress.on_tool_finish(notification['tool']) num_tools -= 1 progress.on_finish() return collector
[ "Executes", "the", "suite", "of", "TidyPy", "tools", "upon", "the", "project", "and", "returns", "the", "issues", "that", "are", "found", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/core.py#L85-L155
[ "def", "execute_tools", "(", "config", ",", "path", ",", "progress", "=", "None", ")", ":", "progress", "=", "progress", "or", "QuietProgress", "(", ")", "progress", ".", "on_start", "(", ")", "manager", "=", "SyncManager", "(", ")", "manager", ".", "start", "(", ")", "num_tools", "=", "0", "tools", "=", "manager", ".", "Queue", "(", ")", "for", "name", ",", "cls", "in", "iteritems", "(", "get_tools", "(", ")", ")", ":", "if", "config", "[", "name", "]", "[", "'use'", "]", "and", "cls", ".", "can_be_used", "(", ")", ":", "num_tools", "+=", "1", "tools", ".", "put", "(", "{", "'name'", ":", "name", ",", "'config'", ":", "config", "[", "name", "]", ",", "}", ")", "collector", "=", "Collector", "(", "config", ")", "if", "not", "num_tools", ":", "progress", ".", "on_finish", "(", ")", "return", "collector", "notifications", "=", "manager", ".", "Queue", "(", ")", "environment", "=", "manager", ".", "dict", "(", "{", "'finder'", ":", "Finder", "(", "path", ",", "config", ")", ",", "}", ")", "workers", "=", "[", "]", "for", "_", "in", "range", "(", "config", "[", "'workers'", "]", ")", ":", "worker", "=", "Worker", "(", "args", "=", "(", "tools", ",", "notifications", ",", "environment", ",", ")", ",", ")", "worker", ".", "start", "(", ")", "workers", ".", "append", "(", "worker", ")", "while", "num_tools", ":", "try", ":", "notification", "=", "notifications", ".", "get", "(", "True", ",", "0.25", ")", "except", "Empty", ":", "pass", "else", ":", "if", "notification", "[", "'type'", "]", "==", "'start'", ":", "progress", ".", "on_tool_start", "(", "notification", "[", "'tool'", "]", ")", "elif", "notification", "[", "'type'", "]", "==", "'complete'", ":", "collector", ".", "add_issues", "(", "notification", "[", "'issues'", "]", ")", "progress", ".", "on_tool_finish", "(", "notification", "[", "'tool'", "]", ")", "num_tools", "-=", "1", "progress", ".", "on_finish", "(", ")", "return", "collector" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
execute_reports
Executes the configured suite of issue reports. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project that was analyzed :type path: str :param collector: the issues to report :type collector: tidypy.Collector
src/tidypy/core.py
def execute_reports( config, path, collector, on_report_finish=None, output_file=None): """ Executes the configured suite of issue reports. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project that was analyzed :type path: str :param collector: the issues to report :type collector: tidypy.Collector """ reports = get_reports() for report in config.get('requested_reports', []): if report.get('type') and report['type'] in reports: cfg = config.get('report', {}).get(report['type'], {}) cfg.update(report) reporter = reports[report['type']]( cfg, path, output_file=output_file, ) reporter.produce(collector) if on_report_finish: on_report_finish(report)
def execute_reports( config, path, collector, on_report_finish=None, output_file=None): """ Executes the configured suite of issue reports. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project that was analyzed :type path: str :param collector: the issues to report :type collector: tidypy.Collector """ reports = get_reports() for report in config.get('requested_reports', []): if report.get('type') and report['type'] in reports: cfg = config.get('report', {}).get(report['type'], {}) cfg.update(report) reporter = reports[report['type']]( cfg, path, output_file=output_file, ) reporter.produce(collector) if on_report_finish: on_report_finish(report)
[ "Executes", "the", "configured", "suite", "of", "issue", "reports", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/core.py#L158-L187
[ "def", "execute_reports", "(", "config", ",", "path", ",", "collector", ",", "on_report_finish", "=", "None", ",", "output_file", "=", "None", ")", ":", "reports", "=", "get_reports", "(", ")", "for", "report", "in", "config", ".", "get", "(", "'requested_reports'", ",", "[", "]", ")", ":", "if", "report", ".", "get", "(", "'type'", ")", "and", "report", "[", "'type'", "]", "in", "reports", ":", "cfg", "=", "config", ".", "get", "(", "'report'", ",", "{", "}", ")", ".", "get", "(", "report", "[", "'type'", "]", ",", "{", "}", ")", "cfg", ".", "update", "(", "report", ")", "reporter", "=", "reports", "[", "report", "[", "'type'", "]", "]", "(", "cfg", ",", "path", ",", "output_file", "=", "output_file", ",", ")", "reporter", ".", "produce", "(", "collector", ")", "if", "on_report_finish", ":", "on_report_finish", "(", "report", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.is_excluded
Determines whether or not the specified file is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool
src/tidypy/finder.py
def is_excluded(self, path): """ Determines whether or not the specified file is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ relpath = path.relative_to(self.base_path).as_posix() return matches_masks(relpath, self.excludes)
def is_excluded(self, path): """ Determines whether or not the specified file is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ relpath = path.relative_to(self.base_path).as_posix() return matches_masks(relpath, self.excludes)
[ "Determines", "whether", "or", "not", "the", "specified", "file", "is", "excluded", "by", "the", "project", "s", "configuration", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L76-L87
[ "def", "is_excluded", "(", "self", ",", "path", ")", ":", "relpath", "=", "path", ".", "relative_to", "(", "self", ".", "base_path", ")", ".", "as_posix", "(", ")", "return", "matches_masks", "(", "relpath", ",", "self", ".", "excludes", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.is_excluded_dir
Determines whether or not the specified directory is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool
src/tidypy/finder.py
def is_excluded_dir(self, path): """ Determines whether or not the specified directory is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ if self.is_excluded(path): return True return matches_masks(path.name, ALWAYS_EXCLUDED_DIRS)
def is_excluded_dir(self, path): """ Determines whether or not the specified directory is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ if self.is_excluded(path): return True return matches_masks(path.name, ALWAYS_EXCLUDED_DIRS)
[ "Determines", "whether", "or", "not", "the", "specified", "directory", "is", "excluded", "by", "the", "project", "s", "configuration", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L89-L101
[ "def", "is_excluded_dir", "(", "self", ",", "path", ")", ":", "if", "self", ".", "is_excluded", "(", "path", ")", ":", "return", "True", "return", "matches_masks", "(", "path", ".", "name", ",", "ALWAYS_EXCLUDED_DIRS", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.files
A generator that produces a sequence of paths to files in the project that matches the specified filters. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str)
src/tidypy/finder.py
def files(self, filters=None): """ A generator that produces a sequence of paths to files in the project that matches the specified filters. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ filters = compile_masks(filters or [r'.*']) for files in itervalues(self._found): for file_ in files: relpath = text_type(Path(file_).relative_to(self.base_path)) if matches_masks(relpath, filters): yield file_
def files(self, filters=None): """ A generator that produces a sequence of paths to files in the project that matches the specified filters. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ filters = compile_masks(filters or [r'.*']) for files in itervalues(self._found): for file_ in files: relpath = text_type(Path(file_).relative_to(self.base_path)) if matches_masks(relpath, filters): yield file_
[ "A", "generator", "that", "produces", "a", "sequence", "of", "paths", "to", "files", "in", "the", "project", "that", "matches", "the", "specified", "filters", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L103-L120
[ "def", "files", "(", "self", ",", "filters", "=", "None", ")", ":", "filters", "=", "compile_masks", "(", "filters", "or", "[", "r'.*'", "]", ")", "for", "files", "in", "itervalues", "(", "self", ".", "_found", ")", ":", "for", "file_", "in", "files", ":", "relpath", "=", "text_type", "(", "Path", "(", "file_", ")", ".", "relative_to", "(", "self", ".", "base_path", ")", ")", "if", "matches_masks", "(", "relpath", ",", "filters", ")", ":", "yield", "file_" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.directories
A generator that produces a sequence of paths to directories in the project that matches the specified filters. :param filters: the regular expressions to use when finding directories in the project. If not specified, all directories are returned. :type filters: list(str) :param containing: if a directory passes through the specified filters, it is checked for the presence of a file that matches one of the regular expressions in this parameter. :type containing: list(str)
src/tidypy/finder.py
def directories(self, filters=None, containing=None): """ A generator that produces a sequence of paths to directories in the project that matches the specified filters. :param filters: the regular expressions to use when finding directories in the project. If not specified, all directories are returned. :type filters: list(str) :param containing: if a directory passes through the specified filters, it is checked for the presence of a file that matches one of the regular expressions in this parameter. :type containing: list(str) """ filters = compile_masks(filters or [r'.*']) contains = compile_masks(containing) for dirname, files in iteritems(self._found): relpath = text_type(Path(dirname).relative_to(self.base_path)) if matches_masks(relpath, filters): if not contains or self._contains(files, contains): yield dirname
def directories(self, filters=None, containing=None): """ A generator that produces a sequence of paths to directories in the project that matches the specified filters. :param filters: the regular expressions to use when finding directories in the project. If not specified, all directories are returned. :type filters: list(str) :param containing: if a directory passes through the specified filters, it is checked for the presence of a file that matches one of the regular expressions in this parameter. :type containing: list(str) """ filters = compile_masks(filters or [r'.*']) contains = compile_masks(containing) for dirname, files in iteritems(self._found): relpath = text_type(Path(dirname).relative_to(self.base_path)) if matches_masks(relpath, filters): if not contains or self._contains(files, contains): yield dirname
[ "A", "generator", "that", "produces", "a", "sequence", "of", "paths", "to", "directories", "in", "the", "project", "that", "matches", "the", "specified", "filters", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L128-L151
[ "def", "directories", "(", "self", ",", "filters", "=", "None", ",", "containing", "=", "None", ")", ":", "filters", "=", "compile_masks", "(", "filters", "or", "[", "r'.*'", "]", ")", "contains", "=", "compile_masks", "(", "containing", ")", "for", "dirname", ",", "files", "in", "iteritems", "(", "self", ".", "_found", ")", ":", "relpath", "=", "text_type", "(", "Path", "(", "dirname", ")", ".", "relative_to", "(", "self", ".", "base_path", ")", ")", "if", "matches_masks", "(", "relpath", ",", "filters", ")", ":", "if", "not", "contains", "or", "self", ".", "_contains", "(", "files", ",", "contains", ")", ":", "yield", "dirname" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.modules
A generator that produces a sequence of paths to files that look to be Python modules (e.g., ``*.py``). :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str)
src/tidypy/finder.py
def modules(self, filters=None): """ A generator that produces a sequence of paths to files that look to be Python modules (e.g., ``*.py``). :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ masks = compile_masks(r'\.py$') for file_ in self.files(filters=filters): if matches_masks(file_, masks): yield file_
def modules(self, filters=None): """ A generator that produces a sequence of paths to files that look to be Python modules (e.g., ``*.py``). :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ masks = compile_masks(r'\.py$') for file_ in self.files(filters=filters): if matches_masks(file_, masks): yield file_
[ "A", "generator", "that", "produces", "a", "sequence", "of", "paths", "to", "files", "that", "look", "to", "be", "Python", "modules", "(", "e", ".", "g", ".", "*", ".", "py", ")", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L169-L183
[ "def", "modules", "(", "self", ",", "filters", "=", "None", ")", ":", "masks", "=", "compile_masks", "(", "r'\\.py$'", ")", "for", "file_", "in", "self", ".", "files", "(", "filters", "=", "filters", ")", ":", "if", "matches_masks", "(", "file_", ",", "masks", ")", ":", "yield", "file_" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Finder.sys_paths
Produces a list of paths that would be suitable to use in ``sys.path`` in order to access the Python modules/packages found in this project. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str)
src/tidypy/finder.py
def sys_paths(self, filters=None): """ Produces a list of paths that would be suitable to use in ``sys.path`` in order to access the Python modules/packages found in this project. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ paths = set() packages = list(self.packages(filters=filters)) for module in self.modules(filters=filters): parent = text_type(Path(module).parent) if parent not in packages: paths.add(parent) paths.update(self.topmost_directories([ text_type(Path(package).parent) for package in packages ])) return list(paths)
def sys_paths(self, filters=None): """ Produces a list of paths that would be suitable to use in ``sys.path`` in order to access the Python modules/packages found in this project. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ paths = set() packages = list(self.packages(filters=filters)) for module in self.modules(filters=filters): parent = text_type(Path(module).parent) if parent not in packages: paths.add(parent) paths.update(self.topmost_directories([ text_type(Path(package).parent) for package in packages ])) return list(paths)
[ "Produces", "a", "list", "of", "paths", "that", "would", "be", "suitable", "to", "use", "in", "sys", ".", "path", "in", "order", "to", "access", "the", "Python", "modules", "/", "packages", "found", "in", "this", "project", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/finder.py#L185-L210
[ "def", "sys_paths", "(", "self", ",", "filters", "=", "None", ")", ":", "paths", "=", "set", "(", ")", "packages", "=", "list", "(", "self", ".", "packages", "(", "filters", "=", "filters", ")", ")", "for", "module", "in", "self", ".", "modules", "(", "filters", "=", "filters", ")", ":", "parent", "=", "text_type", "(", "Path", "(", "module", ")", ".", "parent", ")", "if", "parent", "not", "in", "packages", ":", "paths", ".", "add", "(", "parent", ")", "paths", ".", "update", "(", "self", ".", "topmost_directories", "(", "[", "text_type", "(", "Path", "(", "package", ")", ".", "parent", ")", "for", "package", "in", "packages", "]", ")", ")", "return", "list", "(", "paths", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Collector.add_issues
Adds an issue to the collection. :param issues: the issue(s) to add :type issues: tidypy.Issue or list(tidypy.Issue)
src/tidypy/collector.py
def add_issues(self, issues): """ Adds an issue to the collection. :param issues: the issue(s) to add :type issues: tidypy.Issue or list(tidypy.Issue) """ if not isinstance(issues, (list, tuple)): issues = [issues] with self._lock: self._all_issues.extend(issues) self._cleaned_issues = None
def add_issues(self, issues): """ Adds an issue to the collection. :param issues: the issue(s) to add :type issues: tidypy.Issue or list(tidypy.Issue) """ if not isinstance(issues, (list, tuple)): issues = [issues] with self._lock: self._all_issues.extend(issues) self._cleaned_issues = None
[ "Adds", "an", "issue", "to", "the", "collection", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/collector.py#L46-L58
[ "def", "add_issues", "(", "self", ",", "issues", ")", ":", "if", "not", "isinstance", "(", "issues", ",", "(", "list", ",", "tuple", ")", ")", ":", "issues", "=", "[", "issues", "]", "with", "self", ".", "_lock", ":", "self", ".", "_all_issues", ".", "extend", "(", "issues", ")", "self", ".", "_cleaned_issues", "=", "None" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Collector.issue_count
Returns the number of issues in the collection. :param include_unclean: whether or not to include issues that are being ignored due to being a duplicate, excluded, etc. :type include_unclean: bool :rtype: int
src/tidypy/collector.py
def issue_count(self, include_unclean=False): """ Returns the number of issues in the collection. :param include_unclean: whether or not to include issues that are being ignored due to being a duplicate, excluded, etc. :type include_unclean: bool :rtype: int """ if include_unclean: return len(self._all_issues) self._ensure_cleaned_issues() return len(self._cleaned_issues)
def issue_count(self, include_unclean=False): """ Returns the number of issues in the collection. :param include_unclean: whether or not to include issues that are being ignored due to being a duplicate, excluded, etc. :type include_unclean: bool :rtype: int """ if include_unclean: return len(self._all_issues) self._ensure_cleaned_issues() return len(self._cleaned_issues)
[ "Returns", "the", "number", "of", "issues", "in", "the", "collection", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/collector.py#L60-L74
[ "def", "issue_count", "(", "self", ",", "include_unclean", "=", "False", ")", ":", "if", "include_unclean", ":", "return", "len", "(", "self", ".", "_all_issues", ")", "self", ".", "_ensure_cleaned_issues", "(", ")", "return", "len", "(", "self", ".", "_cleaned_issues", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Collector.get_issues
Retrieves the issues in the collection. :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: list(tidypy.Issue)
src/tidypy/collector.py
def get_issues(self, sortby=None): """ Retrieves the issues in the collection. :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: list(tidypy.Issue) """ self._ensure_cleaned_issues() return self._sort_issues(self._cleaned_issues, sortby)
def get_issues(self, sortby=None): """ Retrieves the issues in the collection. :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: list(tidypy.Issue) """ self._ensure_cleaned_issues() return self._sort_issues(self._cleaned_issues, sortby)
[ "Retrieves", "the", "issues", "in", "the", "collection", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/collector.py#L76-L86
[ "def", "get_issues", "(", "self", ",", "sortby", "=", "None", ")", ":", "self", ".", "_ensure_cleaned_issues", "(", ")", "return", "self", ".", "_sort_issues", "(", "self", ".", "_cleaned_issues", ",", "sortby", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Collector.get_grouped_issues
Retrieves the issues in the collection grouped into buckets according to the key generated by the keyfunc. :param keyfunc: a function that will be used to generate the key that identifies the group that an issue will be assigned to. This function receives a single tidypy.Issue argument and must return a string. If not specified, the filename of the issue will be used. :type keyfunc: func :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: OrderedDict
src/tidypy/collector.py
def get_grouped_issues(self, keyfunc=None, sortby=None): """ Retrieves the issues in the collection grouped into buckets according to the key generated by the keyfunc. :param keyfunc: a function that will be used to generate the key that identifies the group that an issue will be assigned to. This function receives a single tidypy.Issue argument and must return a string. If not specified, the filename of the issue will be used. :type keyfunc: func :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: OrderedDict """ if not keyfunc: keyfunc = default_group if not sortby: sortby = self.DEFAULT_SORT self._ensure_cleaned_issues() return self._group_issues(self._cleaned_issues, keyfunc, sortby)
def get_grouped_issues(self, keyfunc=None, sortby=None): """ Retrieves the issues in the collection grouped into buckets according to the key generated by the keyfunc. :param keyfunc: a function that will be used to generate the key that identifies the group that an issue will be assigned to. This function receives a single tidypy.Issue argument and must return a string. If not specified, the filename of the issue will be used. :type keyfunc: func :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: OrderedDict """ if not keyfunc: keyfunc = default_group if not sortby: sortby = self.DEFAULT_SORT self._ensure_cleaned_issues() return self._group_issues(self._cleaned_issues, keyfunc, sortby)
[ "Retrieves", "the", "issues", "in", "the", "collection", "grouped", "into", "buckets", "according", "to", "the", "key", "generated", "by", "the", "keyfunc", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/collector.py#L88-L109
[ "def", "get_grouped_issues", "(", "self", ",", "keyfunc", "=", "None", ",", "sortby", "=", "None", ")", ":", "if", "not", "keyfunc", ":", "keyfunc", "=", "default_group", "if", "not", "sortby", ":", "sortby", "=", "self", ".", "DEFAULT_SORT", "self", ".", "_ensure_cleaned_issues", "(", ")", "return", "self", ".", "_group_issues", "(", "self", ".", "_cleaned_issues", ",", "keyfunc", ",", "sortby", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Extender.parse
A convenience method for parsing a TOML-serialized configuration. :param content: a TOML string containing a TidyPy configuration :type content: str :param is_pyproject: whether or not the content is (or resembles) a ``pyproject.toml`` file, where the TidyPy configuration is located within a key named ``tool``. :type is_pyproject: bool :rtype: dict
src/tidypy/extenders/base.py
def parse(cls, content, is_pyproject=False): """ A convenience method for parsing a TOML-serialized configuration. :param content: a TOML string containing a TidyPy configuration :type content: str :param is_pyproject: whether or not the content is (or resembles) a ``pyproject.toml`` file, where the TidyPy configuration is located within a key named ``tool``. :type is_pyproject: bool :rtype: dict """ parsed = pytoml.loads(content) if is_pyproject: parsed = parsed.get('tool', {}) parsed = parsed.get('tidypy', {}) return parsed
def parse(cls, content, is_pyproject=False): """ A convenience method for parsing a TOML-serialized configuration. :param content: a TOML string containing a TidyPy configuration :type content: str :param is_pyproject: whether or not the content is (or resembles) a ``pyproject.toml`` file, where the TidyPy configuration is located within a key named ``tool``. :type is_pyproject: bool :rtype: dict """ parsed = pytoml.loads(content) if is_pyproject: parsed = parsed.get('tool', {}) parsed = parsed.get('tidypy', {}) return parsed
[ "A", "convenience", "method", "for", "parsing", "a", "TOML", "-", "serialized", "configuration", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/extenders/base.py#L40-L60
[ "def", "parse", "(", "cls", ",", "content", ",", "is_pyproject", "=", "False", ")", ":", "parsed", "=", "pytoml", ".", "loads", "(", "content", ")", "if", "is_pyproject", ":", "parsed", "=", "parsed", ".", "get", "(", "'tool'", ",", "{", "}", ")", "parsed", "=", "parsed", ".", "get", "(", "'tidypy'", ",", "{", "}", ")", "return", "parsed" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_tools
Retrieves the TidyPy tools that are available in the current Python environment. The returned dictionary has keys that are the tool names and values are the tool classes. :rtype: dict
src/tidypy/config.py
def get_tools(): """ Retrieves the TidyPy tools that are available in the current Python environment. The returned dictionary has keys that are the tool names and values are the tool classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_tools, '_CACHE'): get_tools._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.tools'): try: get_tools._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load tool "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_tools._CACHE
def get_tools(): """ Retrieves the TidyPy tools that are available in the current Python environment. The returned dictionary has keys that are the tool names and values are the tool classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_tools, '_CACHE'): get_tools._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.tools'): try: get_tools._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load tool "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_tools._CACHE
[ "Retrieves", "the", "TidyPy", "tools", "that", "are", "available", "in", "the", "current", "Python", "environment", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L18-L44
[ "def", "get_tools", "(", ")", ":", "# pylint: disable=protected-access", "if", "not", "hasattr", "(", "get_tools", ",", "'_CACHE'", ")", ":", "get_tools", ".", "_CACHE", "=", "dict", "(", ")", "for", "entry", "in", "pkg_resources", ".", "iter_entry_points", "(", "'tidypy.tools'", ")", ":", "try", ":", "get_tools", ".", "_CACHE", "[", "entry", ".", "name", "]", "=", "entry", ".", "load", "(", ")", "except", "ImportError", "as", "exc", ":", "# pragma: no cover", "output_error", "(", "'Could not load tool \"%s\" defined by \"%s\": %s'", "%", "(", "entry", ",", "entry", ".", "dist", ",", "exc", ",", ")", ",", ")", "return", "get_tools", ".", "_CACHE" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_reports
Retrieves the TidyPy issue reports that are available in the current Python environment. The returned dictionary has keys are the report names and values are the report classes. :rtype: dict
src/tidypy/config.py
def get_reports(): """ Retrieves the TidyPy issue reports that are available in the current Python environment. The returned dictionary has keys are the report names and values are the report classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_reports, '_CACHE'): get_reports._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.reports'): try: get_reports._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load report "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_reports._CACHE
def get_reports(): """ Retrieves the TidyPy issue reports that are available in the current Python environment. The returned dictionary has keys are the report names and values are the report classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_reports, '_CACHE'): get_reports._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.reports'): try: get_reports._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load report "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_reports._CACHE
[ "Retrieves", "the", "TidyPy", "issue", "reports", "that", "are", "available", "in", "the", "current", "Python", "environment", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L47-L73
[ "def", "get_reports", "(", ")", ":", "# pylint: disable=protected-access", "if", "not", "hasattr", "(", "get_reports", ",", "'_CACHE'", ")", ":", "get_reports", ".", "_CACHE", "=", "dict", "(", ")", "for", "entry", "in", "pkg_resources", ".", "iter_entry_points", "(", "'tidypy.reports'", ")", ":", "try", ":", "get_reports", ".", "_CACHE", "[", "entry", ".", "name", "]", "=", "entry", ".", "load", "(", ")", "except", "ImportError", "as", "exc", ":", "# pragma: no cover", "output_error", "(", "'Could not load report \"%s\" defined by \"%s\": %s'", "%", "(", "entry", ",", "entry", ".", "dist", ",", "exc", ",", ")", ",", ")", "return", "get_reports", ".", "_CACHE" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_extenders
Retrieves the TidyPy configuration extenders that are available in the current Python environment. The returned dictionary has keys are the extender names and values are the extender classes. :rtype: dict
src/tidypy/config.py
def get_extenders(): """ Retrieves the TidyPy configuration extenders that are available in the current Python environment. The returned dictionary has keys are the extender names and values are the extender classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_extenders, '_CACHE'): get_extenders._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.extenders'): try: get_extenders._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load extender "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_extenders._CACHE
def get_extenders(): """ Retrieves the TidyPy configuration extenders that are available in the current Python environment. The returned dictionary has keys are the extender names and values are the extender classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_extenders, '_CACHE'): get_extenders._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.extenders'): try: get_extenders._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load extender "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_extenders._CACHE
[ "Retrieves", "the", "TidyPy", "configuration", "extenders", "that", "are", "available", "in", "the", "current", "Python", "environment", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L76-L102
[ "def", "get_extenders", "(", ")", ":", "# pylint: disable=protected-access", "if", "not", "hasattr", "(", "get_extenders", ",", "'_CACHE'", ")", ":", "get_extenders", ".", "_CACHE", "=", "dict", "(", ")", "for", "entry", "in", "pkg_resources", ".", "iter_entry_points", "(", "'tidypy.extenders'", ")", ":", "try", ":", "get_extenders", ".", "_CACHE", "[", "entry", ".", "name", "]", "=", "entry", ".", "load", "(", ")", "except", "ImportError", "as", "exc", ":", "# pragma: no cover", "output_error", "(", "'Could not load extender \"%s\" defined by \"%s\": %s'", "%", "(", "entry", ",", "entry", ".", "dist", ",", "exc", ",", ")", ",", ")", "return", "get_extenders", ".", "_CACHE" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
purge_config_cache
Clears out the cache of TidyPy configurations that were retrieved from outside the normal locations.
src/tidypy/config.py
def purge_config_cache(location=None): """ Clears out the cache of TidyPy configurations that were retrieved from outside the normal locations. """ cache_path = get_cache_path(location) if location: os.remove(cache_path) else: shutil.rmtree(cache_path)
def purge_config_cache(location=None): """ Clears out the cache of TidyPy configurations that were retrieved from outside the normal locations. """ cache_path = get_cache_path(location) if location: os.remove(cache_path) else: shutil.rmtree(cache_path)
[ "Clears", "out", "the", "cache", "of", "TidyPy", "configurations", "that", "were", "retrieved", "from", "outside", "the", "normal", "locations", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L124-L135
[ "def", "purge_config_cache", "(", "location", "=", "None", ")", ":", "cache_path", "=", "get_cache_path", "(", "location", ")", "if", "location", ":", "os", ".", "remove", "(", "cache_path", ")", "else", ":", "shutil", ".", "rmtree", "(", "cache_path", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_default_config
Produces a stock/out-of-the-box TidyPy configuration. :rtype: dict
src/tidypy/config.py
def get_default_config(): """ Produces a stock/out-of-the-box TidyPy configuration. :rtype: dict """ config = {} for name, cls in iteritems(get_tools()): config[name] = cls.get_default_config() try: workers = multiprocessing.cpu_count() - 1 except NotImplementedError: # pragma: no cover workers = 1 workers = max(1, min(4, workers)) config.update({ 'exclude': [], 'merge-issues': True, 'workers': workers, 'requested_reports': [ { 'type': 'console', }, ], 'disabled': [], 'noqa': True, 'extends': [], 'ignore-missing-extends': False, }) return config
def get_default_config(): """ Produces a stock/out-of-the-box TidyPy configuration. :rtype: dict """ config = {} for name, cls in iteritems(get_tools()): config[name] = cls.get_default_config() try: workers = multiprocessing.cpu_count() - 1 except NotImplementedError: # pragma: no cover workers = 1 workers = max(1, min(4, workers)) config.update({ 'exclude': [], 'merge-issues': True, 'workers': workers, 'requested_reports': [ { 'type': 'console', }, ], 'disabled': [], 'noqa': True, 'extends': [], 'ignore-missing-extends': False, }) return config
[ "Produces", "a", "stock", "/", "out", "-", "of", "-", "the", "-", "box", "TidyPy", "configuration", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L201-L234
[ "def", "get_default_config", "(", ")", ":", "config", "=", "{", "}", "for", "name", ",", "cls", "in", "iteritems", "(", "get_tools", "(", ")", ")", ":", "config", "[", "name", "]", "=", "cls", ".", "get_default_config", "(", ")", "try", ":", "workers", "=", "multiprocessing", ".", "cpu_count", "(", ")", "-", "1", "except", "NotImplementedError", ":", "# pragma: no cover", "workers", "=", "1", "workers", "=", "max", "(", "1", ",", "min", "(", "4", ",", "workers", ")", ")", "config", ".", "update", "(", "{", "'exclude'", ":", "[", "]", ",", "'merge-issues'", ":", "True", ",", "'workers'", ":", "workers", ",", "'requested_reports'", ":", "[", "{", "'type'", ":", "'console'", ",", "}", ",", "]", ",", "'disabled'", ":", "[", "]", ",", "'noqa'", ":", "True", ",", "'extends'", ":", "[", "]", ",", "'ignore-missing-extends'", ":", "False", ",", "}", ")", "return", "config" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_user_config
Produces a TidyPy configuration that incorporates the configuration files stored in the current user's home directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict
src/tidypy/config.py
def get_user_config(project_path, use_cache=True): """ Produces a TidyPy configuration that incorporates the configuration files stored in the current user's home directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ if sys.platform == 'win32': user_config = os.path.expanduser(r'~\\tidypy') else: user_config = os.path.join( os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), 'tidypy' ) if os.path.exists(user_config): with open(user_config, 'r') as config_file: config = pytoml.load(config_file).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
def get_user_config(project_path, use_cache=True): """ Produces a TidyPy configuration that incorporates the configuration files stored in the current user's home directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ if sys.platform == 'win32': user_config = os.path.expanduser(r'~\\tidypy') else: user_config = os.path.join( os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), 'tidypy' ) if os.path.exists(user_config): with open(user_config, 'r') as config_file: config = pytoml.load(config_file).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
[ "Produces", "a", "TidyPy", "configuration", "that", "incorporates", "the", "configuration", "files", "stored", "in", "the", "current", "user", "s", "home", "directory", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L237-L267
[ "def", "get_user_config", "(", "project_path", ",", "use_cache", "=", "True", ")", ":", "if", "sys", ".", "platform", "==", "'win32'", ":", "user_config", "=", "os", ".", "path", ".", "expanduser", "(", "r'~\\\\tidypy'", ")", "else", ":", "user_config", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getenv", "(", "'XDG_CONFIG_HOME'", ")", "or", "os", ".", "path", ".", "expanduser", "(", "'~/.config'", ")", ",", "'tidypy'", ")", "if", "os", ".", "path", ".", "exists", "(", "user_config", ")", ":", "with", "open", "(", "user_config", ",", "'r'", ")", "as", "config_file", ":", "config", "=", "pytoml", ".", "load", "(", "config_file", ")", ".", "get", "(", "'tidypy'", ",", "{", "}", ")", "config", "=", "merge_dict", "(", "get_default_config", "(", ")", ",", "config", ")", "config", "=", "process_extensions", "(", "config", ",", "project_path", ",", "use_cache", "=", "use_cache", ")", "return", "config", "return", "None" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_local_config
Produces a TidyPy configuration using the ``pyproject.toml`` in the project's directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict
src/tidypy/config.py
def get_local_config(project_path, use_cache=True): """ Produces a TidyPy configuration using the ``pyproject.toml`` in the project's directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ pyproject_path = os.path.join(project_path, 'pyproject.toml') if os.path.exists(pyproject_path): with open(pyproject_path, 'r') as config_file: config = pytoml.load(config_file) config = config.get('tool', {}).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
def get_local_config(project_path, use_cache=True): """ Produces a TidyPy configuration using the ``pyproject.toml`` in the project's directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ pyproject_path = os.path.join(project_path, 'pyproject.toml') if os.path.exists(pyproject_path): with open(pyproject_path, 'r') as config_file: config = pytoml.load(config_file) config = config.get('tool', {}).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
[ "Produces", "a", "TidyPy", "configuration", "using", "the", "pyproject", ".", "toml", "in", "the", "project", "s", "directory", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L270-L295
[ "def", "get_local_config", "(", "project_path", ",", "use_cache", "=", "True", ")", ":", "pyproject_path", "=", "os", ".", "path", ".", "join", "(", "project_path", ",", "'pyproject.toml'", ")", "if", "os", ".", "path", ".", "exists", "(", "pyproject_path", ")", ":", "with", "open", "(", "pyproject_path", ",", "'r'", ")", "as", "config_file", ":", "config", "=", "pytoml", ".", "load", "(", "config_file", ")", "config", "=", "config", ".", "get", "(", "'tool'", ",", "{", "}", ")", ".", "get", "(", "'tidypy'", ",", "{", "}", ")", "config", "=", "merge_dict", "(", "get_default_config", "(", ")", ",", "config", ")", "config", "=", "process_extensions", "(", "config", ",", "project_path", ",", "use_cache", "=", "use_cache", ")", "return", "config", "return", "None" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
get_project_config
Produces the Tidypy configuration to use for the specified project. If a ``pyproject.toml`` exists, the configuration will be based on that. If not, the TidyPy configuration in the user's home directory will be used. If one does not exist, the default configuration will be used. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict
src/tidypy/config.py
def get_project_config(project_path, use_cache=True): """ Produces the Tidypy configuration to use for the specified project. If a ``pyproject.toml`` exists, the configuration will be based on that. If not, the TidyPy configuration in the user's home directory will be used. If one does not exist, the default configuration will be used. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ return get_local_config(project_path, use_cache=use_cache) \ or get_user_config(project_path, use_cache=use_cache) \ or get_default_config()
def get_project_config(project_path, use_cache=True): """ Produces the Tidypy configuration to use for the specified project. If a ``pyproject.toml`` exists, the configuration will be based on that. If not, the TidyPy configuration in the user's home directory will be used. If one does not exist, the default configuration will be used. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ return get_local_config(project_path, use_cache=use_cache) \ or get_user_config(project_path, use_cache=use_cache) \ or get_default_config()
[ "Produces", "the", "Tidypy", "configuration", "to", "use", "for", "the", "specified", "project", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/config.py#L298-L317
[ "def", "get_project_config", "(", "project_path", ",", "use_cache", "=", "True", ")", ":", "return", "get_local_config", "(", "project_path", ",", "use_cache", "=", "use_cache", ")", "or", "get_user_config", "(", "project_path", ",", "use_cache", "=", "use_cache", ")", "or", "get_default_config", "(", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
merge_list
Merges the contents of two lists into a new list. :param list1: the first list :type list1: list :param list2: the second list :type list2: list :returns: list
src/tidypy/util.py
def merge_list(list1, list2): """ Merges the contents of two lists into a new list. :param list1: the first list :type list1: list :param list2: the second list :type list2: list :returns: list """ merged = list(list1) for value in list2: if value not in merged: merged.append(value) return merged
def merge_list(list1, list2): """ Merges the contents of two lists into a new list. :param list1: the first list :type list1: list :param list2: the second list :type list2: list :returns: list """ merged = list(list1) for value in list2: if value not in merged: merged.append(value) return merged
[ "Merges", "the", "contents", "of", "two", "lists", "into", "a", "new", "list", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L23-L40
[ "def", "merge_list", "(", "list1", ",", "list2", ")", ":", "merged", "=", "list", "(", "list1", ")", "for", "value", "in", "list2", ":", "if", "value", "not", "in", "merged", ":", "merged", ".", "append", "(", "value", ")", "return", "merged" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
merge_dict
Recursively merges the contents of two dictionaries into a new dictionary. When both input dictionaries share a key, the value from ``dict2`` is kept. :param dict1: the first dictionary :type dict1: dict :param dict2: the second dictionary :type dict2: dict :param merge_lists: when this function encounters a key that contains lists in both input dictionaries, this parameter dictates whether or not those lists should be merged. If not specified, defaults to ``False``. :type merge_lists: bool :returns: dict
src/tidypy/util.py
def merge_dict(dict1, dict2, merge_lists=False): """ Recursively merges the contents of two dictionaries into a new dictionary. When both input dictionaries share a key, the value from ``dict2`` is kept. :param dict1: the first dictionary :type dict1: dict :param dict2: the second dictionary :type dict2: dict :param merge_lists: when this function encounters a key that contains lists in both input dictionaries, this parameter dictates whether or not those lists should be merged. If not specified, defaults to ``False``. :type merge_lists: bool :returns: dict """ merged = dict(dict1) for key, value in iteritems(dict2): if isinstance(merged.get(key), dict): merged[key] = merge_dict(merged[key], value) elif merge_lists and isinstance(merged.get(key), list): merged[key] = merge_list(merged[key], value) else: merged[key] = value return merged
def merge_dict(dict1, dict2, merge_lists=False): """ Recursively merges the contents of two dictionaries into a new dictionary. When both input dictionaries share a key, the value from ``dict2`` is kept. :param dict1: the first dictionary :type dict1: dict :param dict2: the second dictionary :type dict2: dict :param merge_lists: when this function encounters a key that contains lists in both input dictionaries, this parameter dictates whether or not those lists should be merged. If not specified, defaults to ``False``. :type merge_lists: bool :returns: dict """ merged = dict(dict1) for key, value in iteritems(dict2): if isinstance(merged.get(key), dict): merged[key] = merge_dict(merged[key], value) elif merge_lists and isinstance(merged.get(key), list): merged[key] = merge_list(merged[key], value) else: merged[key] = value return merged
[ "Recursively", "merges", "the", "contents", "of", "two", "dictionaries", "into", "a", "new", "dictionary", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L43-L72
[ "def", "merge_dict", "(", "dict1", ",", "dict2", ",", "merge_lists", "=", "False", ")", ":", "merged", "=", "dict", "(", "dict1", ")", "for", "key", ",", "value", "in", "iteritems", "(", "dict2", ")", ":", "if", "isinstance", "(", "merged", ".", "get", "(", "key", ")", ",", "dict", ")", ":", "merged", "[", "key", "]", "=", "merge_dict", "(", "merged", "[", "key", "]", ",", "value", ")", "elif", "merge_lists", "and", "isinstance", "(", "merged", ".", "get", "(", "key", ")", ",", "list", ")", ":", "merged", "[", "key", "]", "=", "merge_list", "(", "merged", "[", "key", "]", ",", "value", ")", "else", ":", "merged", "[", "key", "]", "=", "value", "return", "merged" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
output_error
Prints the specified string to ``stderr``. :param msg: the message to print :type msg: str
src/tidypy/util.py
def output_error(msg): """ Prints the specified string to ``stderr``. :param msg: the message to print :type msg: str """ click.echo(click.style(msg, fg='red'), err=True)
def output_error(msg): """ Prints the specified string to ``stderr``. :param msg: the message to print :type msg: str """ click.echo(click.style(msg, fg='red'), err=True)
[ "Prints", "the", "specified", "string", "to", "stderr", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L75-L83
[ "def", "output_error", "(", "msg", ")", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "msg", ",", "fg", "=", "'red'", ")", ",", "err", "=", "True", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
mod_sys_path
A context manager that will append the specified paths to Python's ``sys.path`` during the execution of the block. :param paths: the paths to append :type paths: list(str)
src/tidypy/util.py
def mod_sys_path(paths): """ A context manager that will append the specified paths to Python's ``sys.path`` during the execution of the block. :param paths: the paths to append :type paths: list(str) """ old_path = sys.path sys.path = paths + sys.path try: yield finally: sys.path = old_path
def mod_sys_path(paths): """ A context manager that will append the specified paths to Python's ``sys.path`` during the execution of the block. :param paths: the paths to append :type paths: list(str) """ old_path = sys.path sys.path = paths + sys.path try: yield finally: sys.path = old_path
[ "A", "context", "manager", "that", "will", "append", "the", "specified", "paths", "to", "Python", "s", "sys", ".", "path", "during", "the", "execution", "of", "the", "block", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L87-L101
[ "def", "mod_sys_path", "(", "paths", ")", ":", "old_path", "=", "sys", ".", "path", "sys", ".", "path", "=", "paths", "+", "sys", ".", "path", "try", ":", "yield", "finally", ":", "sys", ".", "path", "=", "old_path" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
compile_masks
Compiles a list of regular expressions. :param masks: the regular expressions to compile :type masks: list(str) or str :returns: list(regular expression object)
src/tidypy/util.py
def compile_masks(masks): """ Compiles a list of regular expressions. :param masks: the regular expressions to compile :type masks: list(str) or str :returns: list(regular expression object) """ if not masks: masks = [] elif not isinstance(masks, (list, tuple)): masks = [masks] return [ re.compile(mask) for mask in masks ]
def compile_masks(masks): """ Compiles a list of regular expressions. :param masks: the regular expressions to compile :type masks: list(str) or str :returns: list(regular expression object) """ if not masks: masks = [] elif not isinstance(masks, (list, tuple)): masks = [masks] return [ re.compile(mask) for mask in masks ]
[ "Compiles", "a", "list", "of", "regular", "expressions", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L145-L162
[ "def", "compile_masks", "(", "masks", ")", ":", "if", "not", "masks", ":", "masks", "=", "[", "]", "elif", "not", "isinstance", "(", "masks", ",", "(", "list", ",", "tuple", ")", ")", ":", "masks", "=", "[", "masks", "]", "return", "[", "re", ".", "compile", "(", "mask", ")", "for", "mask", "in", "masks", "]" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
matches_masks
Determines whether or not the target string matches any of the regular expressions specified. :param target: the string to check :type target: str :param masks: the regular expressions to check against :type masks: list(regular expression object) :returns: bool
src/tidypy/util.py
def matches_masks(target, masks): """ Determines whether or not the target string matches any of the regular expressions specified. :param target: the string to check :type target: str :param masks: the regular expressions to check against :type masks: list(regular expression object) :returns: bool """ for mask in masks: if mask.search(target): return True return False
def matches_masks(target, masks): """ Determines whether or not the target string matches any of the regular expressions specified. :param target: the string to check :type target: str :param masks: the regular expressions to check against :type masks: list(regular expression object) :returns: bool """ for mask in masks: if mask.search(target): return True return False
[ "Determines", "whether", "or", "not", "the", "target", "string", "matches", "any", "of", "the", "regular", "expressions", "specified", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L165-L180
[ "def", "matches_masks", "(", "target", ",", "masks", ")", ":", "for", "mask", "in", "masks", ":", "if", "mask", ".", "search", "(", "target", ")", ":", "return", "True", "return", "False" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
read_file
Retrieves the contents of the specified file. This function performs simple caching so that the same file isn't read more than once per process. :param filepath: the file to read :type filepath: str :returns: str
src/tidypy/util.py
def read_file(filepath): """ Retrieves the contents of the specified file. This function performs simple caching so that the same file isn't read more than once per process. :param filepath: the file to read :type filepath: str :returns: str """ with _FILE_CACHE_LOCK: if filepath not in _FILE_CACHE: _FILE_CACHE[filepath] = _read_file(filepath) return _FILE_CACHE[filepath]
def read_file(filepath): """ Retrieves the contents of the specified file. This function performs simple caching so that the same file isn't read more than once per process. :param filepath: the file to read :type filepath: str :returns: str """ with _FILE_CACHE_LOCK: if filepath not in _FILE_CACHE: _FILE_CACHE[filepath] = _read_file(filepath) return _FILE_CACHE[filepath]
[ "Retrieves", "the", "contents", "of", "the", "specified", "file", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L221-L236
[ "def", "read_file", "(", "filepath", ")", ":", "with", "_FILE_CACHE_LOCK", ":", "if", "filepath", "not", "in", "_FILE_CACHE", ":", "_FILE_CACHE", "[", "filepath", "]", "=", "_read_file", "(", "filepath", ")", "return", "_FILE_CACHE", "[", "filepath", "]" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
parse_python_file
Retrieves the AST of the specified file. This function performs simple caching so that the same file isn't read or parsed more than once per process. :param filepath: the file to parse :type filepath: str :returns: ast.AST
src/tidypy/util.py
def parse_python_file(filepath): """ Retrieves the AST of the specified file. This function performs simple caching so that the same file isn't read or parsed more than once per process. :param filepath: the file to parse :type filepath: str :returns: ast.AST """ with _AST_CACHE_LOCK: if filepath not in _AST_CACHE: source = read_file(filepath) _AST_CACHE[filepath] = ast.parse(source, filename=filepath) return _AST_CACHE[filepath]
def parse_python_file(filepath): """ Retrieves the AST of the specified file. This function performs simple caching so that the same file isn't read or parsed more than once per process. :param filepath: the file to parse :type filepath: str :returns: ast.AST """ with _AST_CACHE_LOCK: if filepath not in _AST_CACHE: source = read_file(filepath) _AST_CACHE[filepath] = ast.parse(source, filename=filepath) return _AST_CACHE[filepath]
[ "Retrieves", "the", "AST", "of", "the", "specified", "file", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/util.py#L243-L259
[ "def", "parse_python_file", "(", "filepath", ")", ":", "with", "_AST_CACHE_LOCK", ":", "if", "filepath", "not", "in", "_AST_CACHE", ":", "source", "=", "read_file", "(", "filepath", ")", "_AST_CACHE", "[", "filepath", "]", "=", "ast", ".", "parse", "(", "source", ",", "filename", "=", "filepath", ")", "return", "_AST_CACHE", "[", "filepath", "]" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Progress.on_tool_finish
Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str
src/tidypy/progress.py
def on_tool_finish(self, tool): """ Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str """ with self._lock: if tool in self.current_tools: self.current_tools.remove(tool) self.completed_tools.append(tool)
def on_tool_finish(self, tool): """ Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str """ with self._lock: if tool in self.current_tools: self.current_tools.remove(tool) self.completed_tools.append(tool)
[ "Called", "when", "an", "individual", "tool", "completes", "execution", "." ]
jayclassless/tidypy
python
https://github.com/jayclassless/tidypy/blob/3c3497ca377fbbe937103b77b02b326c860c748f/src/tidypy/progress.py#L41-L52
[ "def", "on_tool_finish", "(", "self", ",", "tool", ")", ":", "with", "self", ".", "_lock", ":", "if", "tool", "in", "self", ".", "current_tools", ":", "self", ".", "current_tools", ".", "remove", "(", "tool", ")", "self", ".", "completed_tools", ".", "append", "(", "tool", ")" ]
3c3497ca377fbbe937103b77b02b326c860c748f
valid
Emulator.exec_command
Execute an x3270 command `cmdstr` gets sent directly to the x3270 subprocess on it's stdin.
py3270/__init__.py
def exec_command(self, cmdstr): """ Execute an x3270 command `cmdstr` gets sent directly to the x3270 subprocess on it's stdin. """ if self.is_terminated: raise TerminatedError("this TerminalClient instance has been terminated") log.debug("sending command: %s", cmdstr) c = Command(self.app, cmdstr) start = time.time() c.execute() elapsed = time.time() - start log.debug("elapsed execution: {0}".format(elapsed)) self.status = Status(c.status_line) return c
def exec_command(self, cmdstr): """ Execute an x3270 command `cmdstr` gets sent directly to the x3270 subprocess on it's stdin. """ if self.is_terminated: raise TerminatedError("this TerminalClient instance has been terminated") log.debug("sending command: %s", cmdstr) c = Command(self.app, cmdstr) start = time.time() c.execute() elapsed = time.time() - start log.debug("elapsed execution: {0}".format(elapsed)) self.status = Status(c.status_line) return c
[ "Execute", "an", "x3270", "command" ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L296-L313
[ "def", "exec_command", "(", "self", ",", "cmdstr", ")", ":", "if", "self", ".", "is_terminated", ":", "raise", "TerminatedError", "(", "\"this TerminalClient instance has been terminated\"", ")", "log", ".", "debug", "(", "\"sending command: %s\"", ",", "cmdstr", ")", "c", "=", "Command", "(", "self", ".", "app", ",", "cmdstr", ")", "start", "=", "time", ".", "time", "(", ")", "c", ".", "execute", "(", ")", "elapsed", "=", "time", ".", "time", "(", ")", "-", "start", "log", ".", "debug", "(", "\"elapsed execution: {0}\"", ".", "format", "(", "elapsed", ")", ")", "self", ".", "status", "=", "Status", "(", "c", ".", "status_line", ")", "return", "c" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.terminate
terminates the underlying x3270 subprocess. Once called, this Emulator instance must no longer be used.
py3270/__init__.py
def terminate(self): """ terminates the underlying x3270 subprocess. Once called, this Emulator instance must no longer be used. """ if not self.is_terminated: log.debug("terminal client terminated") try: self.exec_command(b"Quit") except BrokenPipeError: # noqa # x3270 was terminated, since we are just quitting anyway, ignore it. pass except socket.error as e: if e.errno != errno.ECONNRESET: raise # this can happen because wc3270 closes the socket before # the read() can happen, causing a socket error self.app.close() self.is_terminated = True
def terminate(self): """ terminates the underlying x3270 subprocess. Once called, this Emulator instance must no longer be used. """ if not self.is_terminated: log.debug("terminal client terminated") try: self.exec_command(b"Quit") except BrokenPipeError: # noqa # x3270 was terminated, since we are just quitting anyway, ignore it. pass except socket.error as e: if e.errno != errno.ECONNRESET: raise # this can happen because wc3270 closes the socket before # the read() can happen, causing a socket error self.app.close() self.is_terminated = True
[ "terminates", "the", "underlying", "x3270", "subprocess", ".", "Once", "called", "this", "Emulator", "instance", "must", "no", "longer", "be", "used", "." ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L315-L335
[ "def", "terminate", "(", "self", ")", ":", "if", "not", "self", ".", "is_terminated", ":", "log", ".", "debug", "(", "\"terminal client terminated\"", ")", "try", ":", "self", ".", "exec_command", "(", "b\"Quit\"", ")", "except", "BrokenPipeError", ":", "# noqa", "# x3270 was terminated, since we are just quitting anyway, ignore it.", "pass", "except", "socket", ".", "error", "as", "e", ":", "if", "e", ".", "errno", "!=", "errno", ".", "ECONNRESET", ":", "raise", "# this can happen because wc3270 closes the socket before", "# the read() can happen, causing a socket error", "self", ".", "app", ".", "close", "(", ")", "self", ".", "is_terminated", "=", "True" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.is_connected
Return bool indicating connection state
py3270/__init__.py
def is_connected(self): """ Return bool indicating connection state """ # need to wrap in try/except b/c of wc3270's socket connection dynamics try: # this is basically a no-op, but it results in the the current status # getting updated self.exec_command(b"Query(ConnectionState)") # connected status is like 'C(192.168.1.1)', disconnected is 'N' return self.status.connection_state.startswith(b"C(") except NotConnectedException: return False
def is_connected(self): """ Return bool indicating connection state """ # need to wrap in try/except b/c of wc3270's socket connection dynamics try: # this is basically a no-op, but it results in the the current status # getting updated self.exec_command(b"Query(ConnectionState)") # connected status is like 'C(192.168.1.1)', disconnected is 'N' return self.status.connection_state.startswith(b"C(") except NotConnectedException: return False
[ "Return", "bool", "indicating", "connection", "state" ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L337-L350
[ "def", "is_connected", "(", "self", ")", ":", "# need to wrap in try/except b/c of wc3270's socket connection dynamics", "try", ":", "# this is basically a no-op, but it results in the the current status", "# getting updated", "self", ".", "exec_command", "(", "b\"Query(ConnectionState)\"", ")", "# connected status is like 'C(192.168.1.1)', disconnected is 'N'", "return", "self", ".", "status", ".", "connection_state", ".", "startswith", "(", "b\"C(\"", ")", "except", "NotConnectedException", ":", "return", "False" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.connect
Connect to a host
py3270/__init__.py
def connect(self, host): """ Connect to a host """ if not self.app.connect(host): command = "Connect({0})".format(host).encode("ascii") self.exec_command(command) self.last_host = host
def connect(self, host): """ Connect to a host """ if not self.app.connect(host): command = "Connect({0})".format(host).encode("ascii") self.exec_command(command) self.last_host = host
[ "Connect", "to", "a", "host" ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L352-L359
[ "def", "connect", "(", "self", ",", "host", ")", ":", "if", "not", "self", ".", "app", ".", "connect", "(", "host", ")", ":", "command", "=", "\"Connect({0})\"", ".", "format", "(", "host", ")", ".", "encode", "(", "\"ascii\"", ")", "self", ".", "exec_command", "(", "command", ")", "self", ".", "last_host", "=", "host" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.wait_for_field
Wait until the screen is ready, the cursor has been positioned on a modifiable field, and the keyboard is unlocked. Sometimes the server will "unlock" the keyboard but the screen will not yet be ready. In that case, an attempt to read or write to the screen will result in a 'E' keyboard status because we tried to read from a screen that is not yet ready. Using this method tells the client to wait until a field is detected and the cursor has been positioned on it.
py3270/__init__.py
def wait_for_field(self): """ Wait until the screen is ready, the cursor has been positioned on a modifiable field, and the keyboard is unlocked. Sometimes the server will "unlock" the keyboard but the screen will not yet be ready. In that case, an attempt to read or write to the screen will result in a 'E' keyboard status because we tried to read from a screen that is not yet ready. Using this method tells the client to wait until a field is detected and the cursor has been positioned on it. """ self.exec_command("Wait({0}, InputField)".format(self.timeout).encode("ascii")) if self.status.keyboard != b"U": raise KeyboardStateError( "keyboard not unlocked, state was: {0}".format( self.status.keyboard.decode("ascii") ) )
def wait_for_field(self): """ Wait until the screen is ready, the cursor has been positioned on a modifiable field, and the keyboard is unlocked. Sometimes the server will "unlock" the keyboard but the screen will not yet be ready. In that case, an attempt to read or write to the screen will result in a 'E' keyboard status because we tried to read from a screen that is not yet ready. Using this method tells the client to wait until a field is detected and the cursor has been positioned on it. """ self.exec_command("Wait({0}, InputField)".format(self.timeout).encode("ascii")) if self.status.keyboard != b"U": raise KeyboardStateError( "keyboard not unlocked, state was: {0}".format( self.status.keyboard.decode("ascii") ) )
[ "Wait", "until", "the", "screen", "is", "ready", "the", "cursor", "has", "been", "positioned", "on", "a", "modifiable", "field", "and", "the", "keyboard", "is", "unlocked", "." ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L368-L387
[ "def", "wait_for_field", "(", "self", ")", ":", "self", ".", "exec_command", "(", "\"Wait({0}, InputField)\"", ".", "format", "(", "self", ".", "timeout", ")", ".", "encode", "(", "\"ascii\"", ")", ")", "if", "self", ".", "status", ".", "keyboard", "!=", "b\"U\"", ":", "raise", "KeyboardStateError", "(", "\"keyboard not unlocked, state was: {0}\"", ".", "format", "(", "self", ".", "status", ".", "keyboard", ".", "decode", "(", "\"ascii\"", ")", ")", ")" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.move_to
move the cursor to the given co-ordinates. Co-ordinates are 1 based, as listed in the status area of the terminal.
py3270/__init__.py
def move_to(self, ypos, xpos): """ move the cursor to the given co-ordinates. Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 self.exec_command("MoveCursor({0}, {1})".format(ypos, xpos).encode("ascii"))
def move_to(self, ypos, xpos): """ move the cursor to the given co-ordinates. Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 self.exec_command("MoveCursor({0}, {1})".format(ypos, xpos).encode("ascii"))
[ "move", "the", "cursor", "to", "the", "given", "co", "-", "ordinates", ".", "Co", "-", "ordinates", "are", "1", "based", "as", "listed", "in", "the", "status", "area", "of", "the", "terminal", "." ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L389-L397
[ "def", "move_to", "(", "self", ",", "ypos", ",", "xpos", ")", ":", "# the screen's co-ordinates are 1 based, but the command is 0 based", "xpos", "-=", "1", "ypos", "-=", "1", "self", ".", "exec_command", "(", "\"MoveCursor({0}, {1})\"", ".", "format", "(", "ypos", ",", "xpos", ")", ".", "encode", "(", "\"ascii\"", ")", ")" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.send_string
Send a string to the screen at the current cursor location or at screen co-ordinates `ypos`/`xpos` if they are both given. Co-ordinates are 1 based, as listed in the status area of the terminal.
py3270/__init__.py
def send_string(self, tosend, ypos=None, xpos=None): """ Send a string to the screen at the current cursor location or at screen co-ordinates `ypos`/`xpos` if they are both given. Co-ordinates are 1 based, as listed in the status area of the terminal. """ if xpos is not None and ypos is not None: self.move_to(ypos, xpos) # escape double quotes in the data to send tosend = tosend.replace('"', '"') self.exec_command('String("{0}")'.format(tosend).encode("ascii"))
def send_string(self, tosend, ypos=None, xpos=None): """ Send a string to the screen at the current cursor location or at screen co-ordinates `ypos`/`xpos` if they are both given. Co-ordinates are 1 based, as listed in the status area of the terminal. """ if xpos is not None and ypos is not None: self.move_to(ypos, xpos) # escape double quotes in the data to send tosend = tosend.replace('"', '"') self.exec_command('String("{0}")'.format(tosend).encode("ascii"))
[ "Send", "a", "string", "to", "the", "screen", "at", "the", "current", "cursor", "location", "or", "at", "screen", "co", "-", "ordinates", "ypos", "/", "xpos", "if", "they", "are", "both", "given", "." ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L399-L413
[ "def", "send_string", "(", "self", ",", "tosend", ",", "ypos", "=", "None", ",", "xpos", "=", "None", ")", ":", "if", "xpos", "is", "not", "None", "and", "ypos", "is", "not", "None", ":", "self", ".", "move_to", "(", "ypos", ",", "xpos", ")", "# escape double quotes in the data to send", "tosend", "=", "tosend", ".", "replace", "(", "'\"'", ",", "'\"'", ")", "self", ".", "exec_command", "(", "'String(\"{0}\")'", ".", "format", "(", "tosend", ")", ".", "encode", "(", "\"ascii\"", ")", ")" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.string_get
Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal.
py3270/__init__.py
def string_get(self, ypos, xpos, length): """ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 cmd = self.exec_command( "Ascii({0},{1},{2})".format(ypos, xpos, length).encode("ascii") ) # this usage of ascii should only return a single line of data assert len(cmd.data) == 1, cmd.data return cmd.data[0].decode("ascii")
def string_get(self, ypos, xpos, length): """ Get a string of `length` at screen co-ordinates `ypos`/`xpos` Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 cmd = self.exec_command( "Ascii({0},{1},{2})".format(ypos, xpos, length).encode("ascii") ) # this usage of ascii should only return a single line of data assert len(cmd.data) == 1, cmd.data return cmd.data[0].decode("ascii")
[ "Get", "a", "string", "of", "length", "at", "screen", "co", "-", "ordinates", "ypos", "/", "xpos" ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L440-L455
[ "def", "string_get", "(", "self", ",", "ypos", ",", "xpos", ",", "length", ")", ":", "# the screen's co-ordinates are 1 based, but the command is 0 based", "xpos", "-=", "1", "ypos", "-=", "1", "cmd", "=", "self", ".", "exec_command", "(", "\"Ascii({0},{1},{2})\"", ".", "format", "(", "ypos", ",", "xpos", ",", "length", ")", ".", "encode", "(", "\"ascii\"", ")", ")", "# this usage of ascii should only return a single line of data", "assert", "len", "(", "cmd", ".", "data", ")", "==", "1", ",", "cmd", ".", "data", "return", "cmd", ".", "data", "[", "0", "]", ".", "decode", "(", "\"ascii\"", ")" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.string_found
Return True if `string` is found at screen co-ordinates `ypos`/`xpos`, False otherwise. Co-ordinates are 1 based, as listed in the status area of the terminal.
py3270/__init__.py
def string_found(self, ypos, xpos, string): """ Return True if `string` is found at screen co-ordinates `ypos`/`xpos`, False otherwise. Co-ordinates are 1 based, as listed in the status area of the terminal. """ found = self.string_get(ypos, xpos, len(string)) log.debug('string_found() saw "{0}"'.format(found)) return found == string
def string_found(self, ypos, xpos, string): """ Return True if `string` is found at screen co-ordinates `ypos`/`xpos`, False otherwise. Co-ordinates are 1 based, as listed in the status area of the terminal. """ found = self.string_get(ypos, xpos, len(string)) log.debug('string_found() saw "{0}"'.format(found)) return found == string
[ "Return", "True", "if", "string", "is", "found", "at", "screen", "co", "-", "ordinates", "ypos", "/", "xpos", "False", "otherwise", "." ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L457-L467
[ "def", "string_found", "(", "self", ",", "ypos", ",", "xpos", ",", "string", ")", ":", "found", "=", "self", ".", "string_get", "(", "ypos", ",", "xpos", ",", "len", "(", "string", ")", ")", "log", ".", "debug", "(", "'string_found() saw \"{0}\"'", ".", "format", "(", "found", ")", ")", "return", "found", "==", "string" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Emulator.fill_field
clears the field at the position given and inserts the string `tosend` tosend: the string to insert length: the length of the field Co-ordinates are 1 based, as listed in the status area of the terminal. raises: FieldTruncateError if `tosend` is longer than `length`.
py3270/__init__.py
def fill_field(self, ypos, xpos, tosend, length): """ clears the field at the position given and inserts the string `tosend` tosend: the string to insert length: the length of the field Co-ordinates are 1 based, as listed in the status area of the terminal. raises: FieldTruncateError if `tosend` is longer than `length`. """ if length < len(tosend): raise FieldTruncateError('length limit %d, but got "%s"' % (length, tosend)) if xpos is not None and ypos is not None: self.move_to(ypos, xpos) self.delete_field() self.send_string(tosend)
def fill_field(self, ypos, xpos, tosend, length): """ clears the field at the position given and inserts the string `tosend` tosend: the string to insert length: the length of the field Co-ordinates are 1 based, as listed in the status area of the terminal. raises: FieldTruncateError if `tosend` is longer than `length`. """ if length < len(tosend): raise FieldTruncateError('length limit %d, but got "%s"' % (length, tosend)) if xpos is not None and ypos is not None: self.move_to(ypos, xpos) self.delete_field() self.send_string(tosend)
[ "clears", "the", "field", "at", "the", "position", "given", "and", "inserts", "the", "string", "tosend" ]
py3270/py3270
python
https://github.com/py3270/py3270/blob/c3e91b519f3a18b4be4799a00a96341957a8831f/py3270/__init__.py#L476-L495
[ "def", "fill_field", "(", "self", ",", "ypos", ",", "xpos", ",", "tosend", ",", "length", ")", ":", "if", "length", "<", "len", "(", "tosend", ")", ":", "raise", "FieldTruncateError", "(", "'length limit %d, but got \"%s\"'", "%", "(", "length", ",", "tosend", ")", ")", "if", "xpos", "is", "not", "None", "and", "ypos", "is", "not", "None", ":", "self", ".", "move_to", "(", "ypos", ",", "xpos", ")", "self", ".", "delete_field", "(", ")", "self", ".", "send_string", "(", "tosend", ")" ]
c3e91b519f3a18b4be4799a00a96341957a8831f
valid
Adafruit_CCS811.setEnvironmentalData
Humidity is stored as an unsigned 16 bits in 1/512%RH. The default value is 50% = 0x64, 0x00. As an example 48.5% humidity would be 0x61, 0x00.
Adafruit_CCS811/Adafruit_CCS811.py
def setEnvironmentalData(self, humidity, temperature): ''' Humidity is stored as an unsigned 16 bits in 1/512%RH. The default value is 50% = 0x64, 0x00. As an example 48.5% humidity would be 0x61, 0x00.''' ''' Temperature is stored as an unsigned 16 bits integer in 1/512 degrees there is an offset: 0 maps to -25C. The default value is 25C = 0x64, 0x00. As an example 23.5% temperature would be 0x61, 0x00. The internal algorithm uses these values (or default values if not set by the application) to compensate for changes in relative humidity and ambient temperature.''' hum_perc = humidity << 1 parts = math.fmod(temperature) fractional = parts[0] temperature = parts[1] temp_high = ((temperature + 25) << 9) temp_low = ((fractional / 0.001953125) & 0x1FF) temp_conv = (temp_high | temp_low) buf = [hum_perc, 0x00,((temp_conv >> 8) & 0xFF), (temp_conv & 0xFF)] self._device.writeList(CCS811_ENV_DATA, buf)
def setEnvironmentalData(self, humidity, temperature): ''' Humidity is stored as an unsigned 16 bits in 1/512%RH. The default value is 50% = 0x64, 0x00. As an example 48.5% humidity would be 0x61, 0x00.''' ''' Temperature is stored as an unsigned 16 bits integer in 1/512 degrees there is an offset: 0 maps to -25C. The default value is 25C = 0x64, 0x00. As an example 23.5% temperature would be 0x61, 0x00. The internal algorithm uses these values (or default values if not set by the application) to compensate for changes in relative humidity and ambient temperature.''' hum_perc = humidity << 1 parts = math.fmod(temperature) fractional = parts[0] temperature = parts[1] temp_high = ((temperature + 25) << 9) temp_low = ((fractional / 0.001953125) & 0x1FF) temp_conv = (temp_high | temp_low) buf = [hum_perc, 0x00,((temp_conv >> 8) & 0xFF), (temp_conv & 0xFF)] self._device.writeList(CCS811_ENV_DATA, buf)
[ "Humidity", "is", "stored", "as", "an", "unsigned", "16", "bits", "in", "1", "/", "512%RH", ".", "The", "default", "value", "is", "50%", "=", "0x64", "0x00", ".", "As", "an", "example", "48", ".", "5%", "humidity", "would", "be", "0x61", "0x00", "." ]
adafruit/Adafruit_CCS811_python
python
https://github.com/adafruit/Adafruit_CCS811_python/blob/b75196d2f887247a316a631b81c4a909da395f3d/Adafruit_CCS811/Adafruit_CCS811.py#L147-L174
[ "def", "setEnvironmentalData", "(", "self", ",", "humidity", ",", "temperature", ")", ":", "''' Temperature is stored as an unsigned 16 bits integer in 1/512\n\t\tdegrees there is an offset: 0 maps to -25C. The default value is\n\t\t25C = 0x64, 0x00. As an example 23.5% temperature would be\n\t\t0x61, 0x00.\n\t\tThe internal algorithm uses these values (or default values if\n\t\tnot set by the application) to compensate for changes in\n\t\trelative humidity and ambient temperature.'''", "hum_perc", "=", "humidity", "<<", "1", "parts", "=", "math", ".", "fmod", "(", "temperature", ")", "fractional", "=", "parts", "[", "0", "]", "temperature", "=", "parts", "[", "1", "]", "temp_high", "=", "(", "(", "temperature", "+", "25", ")", "<<", "9", ")", "temp_low", "=", "(", "(", "fractional", "/", "0.001953125", ")", "&", "0x1FF", ")", "temp_conv", "=", "(", "temp_high", "|", "temp_low", ")", "buf", "=", "[", "hum_perc", ",", "0x00", ",", "(", "(", "temp_conv", ">>", "8", ")", "&", "0xFF", ")", ",", "(", "temp_conv", "&", "0xFF", ")", "]", "self", ".", "_device", ".", "writeList", "(", "CCS811_ENV_DATA", ",", "buf", ")" ]
b75196d2f887247a316a631b81c4a909da395f3d
valid
Constraint.from_func
Construct a constraint from a validation function. Args: func (function): Function that evaluates True when the variables satisfy the constraint. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that binary variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ['a', 'b'], 'BINARY') >>> print(const.name) Constraint >>> (0, 1) in const.configurations True This example creates a constraint that :math:`out = NOT(x)` for spin variables. >>> import dwavebinarycsp >>> def not_(y, x): # y=NOT(x) for spin variables ... return (y == -x) ... >>> const = dwavebinarycsp.Constraint.from_func( ... not_, ... ['out', 'in'], ... {1, -1}, ... name='not_spin') >>> print(const.name) not_spin >>> (1, -1) in const.configurations True
dwavebinarycsp/core/constraint.py
def from_func(cls, func, variables, vartype, name=None): """Construct a constraint from a validation function. Args: func (function): Function that evaluates True when the variables satisfy the constraint. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that binary variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ['a', 'b'], 'BINARY') >>> print(const.name) Constraint >>> (0, 1) in const.configurations True This example creates a constraint that :math:`out = NOT(x)` for spin variables. >>> import dwavebinarycsp >>> def not_(y, x): # y=NOT(x) for spin variables ... return (y == -x) ... >>> const = dwavebinarycsp.Constraint.from_func( ... not_, ... ['out', 'in'], ... {1, -1}, ... name='not_spin') >>> print(const.name) not_spin >>> (1, -1) in const.configurations True """ variables = tuple(variables) configurations = frozenset(config for config in itertools.product(vartype.value, repeat=len(variables)) if func(*config)) return cls(func, configurations, variables, vartype, name)
def from_func(cls, func, variables, vartype, name=None): """Construct a constraint from a validation function. Args: func (function): Function that evaluates True when the variables satisfy the constraint. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that binary variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ['a', 'b'], 'BINARY') >>> print(const.name) Constraint >>> (0, 1) in const.configurations True This example creates a constraint that :math:`out = NOT(x)` for spin variables. >>> import dwavebinarycsp >>> def not_(y, x): # y=NOT(x) for spin variables ... return (y == -x) ... >>> const = dwavebinarycsp.Constraint.from_func( ... not_, ... ['out', 'in'], ... {1, -1}, ... name='not_spin') >>> print(const.name) not_spin >>> (1, -1) in const.configurations True """ variables = tuple(variables) configurations = frozenset(config for config in itertools.product(vartype.value, repeat=len(variables)) if func(*config)) return cls(func, configurations, variables, vartype, name)
[ "Construct", "a", "constraint", "from", "a", "validation", "function", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L133-L188
[ "def", "from_func", "(", "cls", ",", "func", ",", "variables", ",", "vartype", ",", "name", "=", "None", ")", ":", "variables", "=", "tuple", "(", "variables", ")", "configurations", "=", "frozenset", "(", "config", "for", "config", "in", "itertools", ".", "product", "(", "vartype", ".", "value", ",", "repeat", "=", "len", "(", "variables", ")", ")", "if", "func", "(", "*", "config", ")", ")", "return", "cls", "(", "func", ",", "configurations", ",", "variables", ",", "vartype", ",", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.from_configurations
Construct a constraint from valid configurations. Args: configurations (iterable[tuple]): Valid configurations of the variables. Each configuration is a tuple of variable assignments ordered by :attr:`~Constraint.variables`. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> print(const.name) Constraint >>> (0, 0) in const.configurations # Order matches variables: a,b False This example creates a constraint based on specified valid configurations that represents an OR gate for spin variables. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations( ... [(-1, -1, -1), (1, -1, 1), (1, 1, -1), (1, 1, 1)], ... ['y', 'x1', 'x2'], ... dwavebinarycsp.SPIN, name='or_spin') >>> print(const.name) or_spin >>> (1, 1, -1) in const.configurations # Order matches variables: y,x1,x2 True
dwavebinarycsp/core/constraint.py
def from_configurations(cls, configurations, variables, vartype, name=None): """Construct a constraint from valid configurations. Args: configurations (iterable[tuple]): Valid configurations of the variables. Each configuration is a tuple of variable assignments ordered by :attr:`~Constraint.variables`. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> print(const.name) Constraint >>> (0, 0) in const.configurations # Order matches variables: a,b False This example creates a constraint based on specified valid configurations that represents an OR gate for spin variables. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations( ... [(-1, -1, -1), (1, -1, 1), (1, 1, -1), (1, 1, 1)], ... ['y', 'x1', 'x2'], ... dwavebinarycsp.SPIN, name='or_spin') >>> print(const.name) or_spin >>> (1, 1, -1) in const.configurations # Order matches variables: y,x1,x2 True """ def func(*args): return args in configurations return cls(func, configurations, variables, vartype, name)
def from_configurations(cls, configurations, variables, vartype, name=None): """Construct a constraint from valid configurations. Args: configurations (iterable[tuple]): Valid configurations of the variables. Each configuration is a tuple of variable assignments ordered by :attr:`~Constraint.variables`. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> print(const.name) Constraint >>> (0, 0) in const.configurations # Order matches variables: a,b False This example creates a constraint based on specified valid configurations that represents an OR gate for spin variables. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations( ... [(-1, -1, -1), (1, -1, 1), (1, 1, -1), (1, 1, 1)], ... ['y', 'x1', 'x2'], ... dwavebinarycsp.SPIN, name='or_spin') >>> print(const.name) or_spin >>> (1, 1, -1) in const.configurations # Order matches variables: y,x1,x2 True """ def func(*args): return args in configurations return cls(func, configurations, variables, vartype, name)
[ "Construct", "a", "constraint", "from", "valid", "configurations", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L191-L239
[ "def", "from_configurations", "(", "cls", ",", "configurations", ",", "variables", ",", "vartype", ",", "name", "=", "None", ")", ":", "def", "func", "(", "*", "args", ")", ":", "return", "args", "in", "configurations", "return", "cls", "(", "func", ",", "configurations", ",", "variables", ",", "vartype", ",", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.check
Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two candidate solutions, with additional unconstrained variable c. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> solution = {'a': 1, 'b': 1, 'c': 0} >>> const.check(solution) False >>> solution = {'a': 1, 'b': 0, 'c': 0} >>> const.check(solution) True
dwavebinarycsp/core/constraint.py
def check(self, solution): """Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two candidate solutions, with additional unconstrained variable c. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> solution = {'a': 1, 'b': 1, 'c': 0} >>> const.check(solution) False >>> solution = {'a': 1, 'b': 0, 'c': 0} >>> const.check(solution) True """ return self.func(*(solution[v] for v in self.variables))
def check(self, solution): """Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two candidate solutions, with additional unconstrained variable c. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> solution = {'a': 1, 'b': 1, 'c': 0} >>> const.check(solution) False >>> solution = {'a': 1, 'b': 0, 'c': 0} >>> const.check(solution) True """ return self.func(*(solution[v] for v in self.variables))
[ "Check", "that", "a", "solution", "satisfies", "the", "constraint", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L330-L356
[ "def", "check", "(", "self", ",", "solution", ")", ":", "return", "self", ".", "func", "(", "*", "(", "solution", "[", "v", "]", "for", "v", "in", "self", ".", "variables", ")", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.fix_variable
Fix the value of a variable and remove it from the constraint. Args: v (variable): Variable in the constraint to be set to a constant value. val (int): Value assigned to the variable. Values must match the :class:`.Vartype` of the constraint. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables, fixes variable a to 0, and tests two candidate solutions. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.fix_variable('a', 0) >>> const.check({'b': 1}) True >>> const.check({'b': 0}) False
dwavebinarycsp/core/constraint.py
def fix_variable(self, v, value): """Fix the value of a variable and remove it from the constraint. Args: v (variable): Variable in the constraint to be set to a constant value. val (int): Value assigned to the variable. Values must match the :class:`.Vartype` of the constraint. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables, fixes variable a to 0, and tests two candidate solutions. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.fix_variable('a', 0) >>> const.check({'b': 1}) True >>> const.check({'b': 0}) False """ variables = self.variables try: idx = variables.index(v) except ValueError: raise ValueError("given variable {} is not part of the constraint".format(v)) if value not in self.vartype.value: raise ValueError("expected value to be in {}, received {} instead".format(self.vartype.value, value)) configurations = frozenset(config[:idx] + config[idx + 1:] # exclude the fixed var for config in self.configurations if config[idx] == value) if not configurations: raise UnsatError("fixing {} to {} makes this constraint unsatisfiable".format(v, value)) variables = variables[:idx] + variables[idx + 1:] self.configurations = configurations self.variables = variables def func(*args): return args in configurations self.func = func self.name = '{} ({} fixed to {})'.format(self.name, v, value)
def fix_variable(self, v, value): """Fix the value of a variable and remove it from the constraint. Args: v (variable): Variable in the constraint to be set to a constant value. val (int): Value assigned to the variable. Values must match the :class:`.Vartype` of the constraint. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables, fixes variable a to 0, and tests two candidate solutions. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.fix_variable('a', 0) >>> const.check({'b': 1}) True >>> const.check({'b': 0}) False """ variables = self.variables try: idx = variables.index(v) except ValueError: raise ValueError("given variable {} is not part of the constraint".format(v)) if value not in self.vartype.value: raise ValueError("expected value to be in {}, received {} instead".format(self.vartype.value, value)) configurations = frozenset(config[:idx] + config[idx + 1:] # exclude the fixed var for config in self.configurations if config[idx] == value) if not configurations: raise UnsatError("fixing {} to {} makes this constraint unsatisfiable".format(v, value)) variables = variables[:idx] + variables[idx + 1:] self.configurations = configurations self.variables = variables def func(*args): return args in configurations self.func = func self.name = '{} ({} fixed to {})'.format(self.name, v, value)
[ "Fix", "the", "value", "of", "a", "variable", "and", "remove", "it", "from", "the", "constraint", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L362-L411
[ "def", "fix_variable", "(", "self", ",", "v", ",", "value", ")", ":", "variables", "=", "self", ".", "variables", "try", ":", "idx", "=", "variables", ".", "index", "(", "v", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"given variable {} is not part of the constraint\"", ".", "format", "(", "v", ")", ")", "if", "value", "not", "in", "self", ".", "vartype", ".", "value", ":", "raise", "ValueError", "(", "\"expected value to be in {}, received {} instead\"", ".", "format", "(", "self", ".", "vartype", ".", "value", ",", "value", ")", ")", "configurations", "=", "frozenset", "(", "config", "[", ":", "idx", "]", "+", "config", "[", "idx", "+", "1", ":", "]", "# exclude the fixed var", "for", "config", "in", "self", ".", "configurations", "if", "config", "[", "idx", "]", "==", "value", ")", "if", "not", "configurations", ":", "raise", "UnsatError", "(", "\"fixing {} to {} makes this constraint unsatisfiable\"", ".", "format", "(", "v", ",", "value", ")", ")", "variables", "=", "variables", "[", ":", "idx", "]", "+", "variables", "[", "idx", "+", "1", ":", "]", "self", ".", "configurations", "=", "configurations", "self", ".", "variables", "=", "variables", "def", "func", "(", "*", "args", ")", ":", "return", "args", "in", "configurations", "self", ".", "func", "=", "func", "self", ".", "name", "=", "'{} ({} fixed to {})'", ".", "format", "(", "self", ".", "name", ",", "v", ",", "value", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.flip_variable
Flip a variable in the constraint. Args: v (variable): Variable in the constraint to take the complementary value of its construction value. Examples: This example creates a constraint that :math:`a = b` on binary variables and flips variable a. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.eq, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.check({'a': 0, 'b': 0}) True >>> const.flip_variable('a') >>> const.check({'a': 1, 'b': 0}) True >>> const.check({'a': 0, 'b': 0}) False
dwavebinarycsp/core/constraint.py
def flip_variable(self, v): """Flip a variable in the constraint. Args: v (variable): Variable in the constraint to take the complementary value of its construction value. Examples: This example creates a constraint that :math:`a = b` on binary variables and flips variable a. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.eq, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.check({'a': 0, 'b': 0}) True >>> const.flip_variable('a') >>> const.check({'a': 1, 'b': 0}) True >>> const.check({'a': 0, 'b': 0}) False """ try: idx = self.variables.index(v) except ValueError: raise ValueError("variable {} is not a variable in constraint {}".format(v, self.name)) if self.vartype is dimod.BINARY: original_func = self.func def func(*args): new_args = list(args) new_args[idx] = 1 - new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (1 - config[idx],) + config[idx + 1:] for config in self.configurations) else: # SPIN original_func = self.func def func(*args): new_args = list(args) new_args[idx] = -new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (-config[idx],) + config[idx + 1:] for config in self.configurations) self.name = '{} ({} flipped)'.format(self.name, v)
def flip_variable(self, v): """Flip a variable in the constraint. Args: v (variable): Variable in the constraint to take the complementary value of its construction value. Examples: This example creates a constraint that :math:`a = b` on binary variables and flips variable a. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.eq, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.check({'a': 0, 'b': 0}) True >>> const.flip_variable('a') >>> const.check({'a': 1, 'b': 0}) True >>> const.check({'a': 0, 'b': 0}) False """ try: idx = self.variables.index(v) except ValueError: raise ValueError("variable {} is not a variable in constraint {}".format(v, self.name)) if self.vartype is dimod.BINARY: original_func = self.func def func(*args): new_args = list(args) new_args[idx] = 1 - new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (1 - config[idx],) + config[idx + 1:] for config in self.configurations) else: # SPIN original_func = self.func def func(*args): new_args = list(args) new_args[idx] = -new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (-config[idx],) + config[idx + 1:] for config in self.configurations) self.name = '{} ({} flipped)'.format(self.name, v)
[ "Flip", "a", "variable", "in", "the", "constraint", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L413-L470
[ "def", "flip_variable", "(", "self", ",", "v", ")", ":", "try", ":", "idx", "=", "self", ".", "variables", ".", "index", "(", "v", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"variable {} is not a variable in constraint {}\"", ".", "format", "(", "v", ",", "self", ".", "name", ")", ")", "if", "self", ".", "vartype", "is", "dimod", ".", "BINARY", ":", "original_func", "=", "self", ".", "func", "def", "func", "(", "*", "args", ")", ":", "new_args", "=", "list", "(", "args", ")", "new_args", "[", "idx", "]", "=", "1", "-", "new_args", "[", "idx", "]", "# negate v", "return", "original_func", "(", "*", "new_args", ")", "self", ".", "func", "=", "func", "self", ".", "configurations", "=", "frozenset", "(", "config", "[", ":", "idx", "]", "+", "(", "1", "-", "config", "[", "idx", "]", ",", ")", "+", "config", "[", "idx", "+", "1", ":", "]", "for", "config", "in", "self", ".", "configurations", ")", "else", ":", "# SPIN", "original_func", "=", "self", ".", "func", "def", "func", "(", "*", "args", ")", ":", "new_args", "=", "list", "(", "args", ")", "new_args", "[", "idx", "]", "=", "-", "new_args", "[", "idx", "]", "# negate v", "return", "original_func", "(", "*", "new_args", ")", "self", ".", "func", "=", "func", "self", ".", "configurations", "=", "frozenset", "(", "config", "[", ":", "idx", "]", "+", "(", "-", "config", "[", "idx", "]", ",", ")", "+", "config", "[", "idx", "+", "1", ":", "]", "for", "config", "in", "self", ".", "configurations", ")", "self", ".", "name", "=", "'{} ({} flipped)'", ".", "format", "(", "self", ".", "name", ",", "v", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.copy
Create a copy. Examples: This example copies constraint :math:`a \\ne b` and tests a solution on the copied constraint. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], 'BINARY') >>> const2 = const.copy() >>> const2 is const False >>> const2.check({'a': 1, 'b': 1}) False
dwavebinarycsp/core/constraint.py
def copy(self): """Create a copy. Examples: This example copies constraint :math:`a \\ne b` and tests a solution on the copied constraint. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], 'BINARY') >>> const2 = const.copy() >>> const2 is const False >>> const2.check({'a': 1, 'b': 1}) False """ # each object is itself immutable (except the function) return self.__class__(self.func, self.configurations, self.variables, self.vartype, name=self.name)
def copy(self): """Create a copy. Examples: This example copies constraint :math:`a \\ne b` and tests a solution on the copied constraint. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], 'BINARY') >>> const2 = const.copy() >>> const2 is const False >>> const2.check({'a': 1, 'b': 1}) False """ # each object is itself immutable (except the function) return self.__class__(self.func, self.configurations, self.variables, self.vartype, name=self.name)
[ "Create", "a", "copy", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L476-L495
[ "def", "copy", "(", "self", ")", ":", "# each object is itself immutable (except the function)", "return", "self", ".", "__class__", "(", "self", ".", "func", ",", "self", ".", "configurations", ",", "self", ".", "variables", ",", "self", ".", "vartype", ",", "name", "=", "self", ".", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
Constraint.projection
Create a new constraint that is the projection onto a subset of the variables. Args: variables (iterable): Subset of the constraint's variables. Returns: :obj:`.Constraint`: A new constraint over a subset of the variables. Examples: >>> import dwavebinarycsp ... >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 0), (0, 1)], ... ['a', 'b'], ... dwavebinarycsp.BINARY) >>> proj = const.projection(['a']) >>> proj.variables ['a'] >>> proj.configurations {(0,)}
dwavebinarycsp/core/constraint.py
def projection(self, variables): """Create a new constraint that is the projection onto a subset of the variables. Args: variables (iterable): Subset of the constraint's variables. Returns: :obj:`.Constraint`: A new constraint over a subset of the variables. Examples: >>> import dwavebinarycsp ... >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 0), (0, 1)], ... ['a', 'b'], ... dwavebinarycsp.BINARY) >>> proj = const.projection(['a']) >>> proj.variables ['a'] >>> proj.configurations {(0,)} """ # resolve iterables or mutability problems by casting the variables to a set variables = set(variables) if not variables.issubset(self.variables): raise ValueError("Cannot project to variables not in the constraint.") idxs = [i for i, v in enumerate(self.variables) if v in variables] configurations = frozenset(tuple(config[i] for i in idxs) for config in self.configurations) variables = tuple(self.variables[i] for i in idxs) return self.from_configurations(configurations, variables, self.vartype)
def projection(self, variables): """Create a new constraint that is the projection onto a subset of the variables. Args: variables (iterable): Subset of the constraint's variables. Returns: :obj:`.Constraint`: A new constraint over a subset of the variables. Examples: >>> import dwavebinarycsp ... >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 0), (0, 1)], ... ['a', 'b'], ... dwavebinarycsp.BINARY) >>> proj = const.projection(['a']) >>> proj.variables ['a'] >>> proj.configurations {(0,)} """ # resolve iterables or mutability problems by casting the variables to a set variables = set(variables) if not variables.issubset(self.variables): raise ValueError("Cannot project to variables not in the constraint.") idxs = [i for i, v in enumerate(self.variables) if v in variables] configurations = frozenset(tuple(config[i] for i in idxs) for config in self.configurations) variables = tuple(self.variables[i] for i in idxs) return self.from_configurations(configurations, variables, self.vartype)
[ "Create", "a", "new", "constraint", "that", "is", "the", "projection", "onto", "a", "subset", "of", "the", "variables", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/constraint.py#L497-L532
[ "def", "projection", "(", "self", ",", "variables", ")", ":", "# resolve iterables or mutability problems by casting the variables to a set", "variables", "=", "set", "(", "variables", ")", "if", "not", "variables", ".", "issubset", "(", "self", ".", "variables", ")", ":", "raise", "ValueError", "(", "\"Cannot project to variables not in the constraint.\"", ")", "idxs", "=", "[", "i", "for", "i", ",", "v", "in", "enumerate", "(", "self", ".", "variables", ")", "if", "v", "in", "variables", "]", "configurations", "=", "frozenset", "(", "tuple", "(", "config", "[", "i", "]", "for", "i", "in", "idxs", ")", "for", "config", "in", "self", ".", "configurations", ")", "variables", "=", "tuple", "(", "self", ".", "variables", "[", "i", "]", "for", "i", "in", "idxs", ")", "return", "self", ".", "from_configurations", "(", "configurations", ",", "variables", ",", "self", ".", "vartype", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
multiplication_circuit
Multiplication circuit constraint satisfaction problem. A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`, where the multiplicands are binary variables of length `nbit`; for example, :math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`. The square below shows a graphic representation of the circuit:: ________________________________________________________________________________ | and20 and10 and00 | | | | | | | and21 add11──and11 add01──and01 | | | |┌───────────┘|┌───────────┘| | | | and22 add12──and12 add02──and02 | | | | |┌───────────┘|┌───────────┘| | | | | add13─────────add03 | | | | | ┌───────────┘| | | | | | | p5 p4 p3 p2 p1 p0 | -------------------------------------------------------------------------------- Args: nbit (int): Number of bits in the multiplicands. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when variables :math:`a,b,p` are assigned values that correctly solve binary multiplication :math:`ab=p`. Examples: This example creates a multiplication circuit CSP that multiplies two 3-bit numbers, which is then formulated as a binary quadratic model (BQM). It fixes the multiplacands as :math:`a=5, b=6` (:math:`101` and :math:`110`) and uses a simulated annealing sampler to find the product, :math:`p=30` (:math:`111100`). >>> import dwavebinarycsp >>> from dwavebinarycsp.factories.csp.circuits import multiplication_circuit >>> import neal >>> csp = multiplication_circuit(3) >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.fix_variable('a0', 1); bqm.fix_variable('a1', 0); bqm.fix_variable('a2', 1) >>> bqm.fix_variable('b0', 1); bqm.fix_variable('b1', 1); bqm.fix_variable('b2', 0) >>> sampler = neal.SimulatedAnnealingSampler() >>> response = sampler.sample(bqm) >>> p = next(response.samples(n=1, sorted_by='energy')) >>> print(p['p0'], p['p1'], p['p2'], p['p3'], p['p4'], p['p5']) # doctest: +SKIP 1 1 1 1 0 0
dwavebinarycsp/factories/csp/circuits.py
def multiplication_circuit(nbit, vartype=dimod.BINARY): """Multiplication circuit constraint satisfaction problem. A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`, where the multiplicands are binary variables of length `nbit`; for example, :math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`. The square below shows a graphic representation of the circuit:: ________________________________________________________________________________ | and20 and10 and00 | | | | | | | and21 add11──and11 add01──and01 | | | |┌───────────┘|┌───────────┘| | | | and22 add12──and12 add02──and02 | | | | |┌───────────┘|┌───────────┘| | | | | add13─────────add03 | | | | | ┌───────────┘| | | | | | | p5 p4 p3 p2 p1 p0 | -------------------------------------------------------------------------------- Args: nbit (int): Number of bits in the multiplicands. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when variables :math:`a,b,p` are assigned values that correctly solve binary multiplication :math:`ab=p`. Examples: This example creates a multiplication circuit CSP that multiplies two 3-bit numbers, which is then formulated as a binary quadratic model (BQM). It fixes the multiplacands as :math:`a=5, b=6` (:math:`101` and :math:`110`) and uses a simulated annealing sampler to find the product, :math:`p=30` (:math:`111100`). >>> import dwavebinarycsp >>> from dwavebinarycsp.factories.csp.circuits import multiplication_circuit >>> import neal >>> csp = multiplication_circuit(3) >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.fix_variable('a0', 1); bqm.fix_variable('a1', 0); bqm.fix_variable('a2', 1) >>> bqm.fix_variable('b0', 1); bqm.fix_variable('b1', 1); bqm.fix_variable('b2', 0) >>> sampler = neal.SimulatedAnnealingSampler() >>> response = sampler.sample(bqm) >>> p = next(response.samples(n=1, sorted_by='energy')) >>> print(p['p0'], p['p1'], p['p2'], p['p3'], p['p4'], p['p5']) # doctest: +SKIP 1 1 1 1 0 0 """ if nbit < 1: raise ValueError("num_multiplier_bits, num_multiplicand_bits must be positive integers") num_multiplier_bits = num_multiplicand_bits = nbit # also checks the vartype argument csp = ConstraintSatisfactionProblem(vartype) # throughout, we will use the following convention: # i to refer to the bits of the multiplier # j to refer to the bits of the multiplicand # k to refer to the bits of the product # create the variables corresponding to the input and output wires for the circuit a = {i: 'a%d' % i for i in range(nbit)} b = {j: 'b%d' % j for j in range(nbit)} p = {k: 'p%d' % k for k in range(nbit + nbit)} # we will want to store the internal variables somewhere AND = defaultdict(dict) # the output of the AND gate associated with ai, bj is stored in AND[i][j] SUM = defaultdict(dict) # the sum of the ADDER gate associated with ai, bj is stored in SUM[i][j] CARRY = defaultdict(dict) # the carry of the ADDER gate associated with ai, bj is stored in CARRY[i][j] # we follow a shift adder for i in range(num_multiplier_bits): for j in range(num_multiplicand_bits): ai = a[i] bj = b[j] if i == 0 and j == 0: # in this case there are no inputs from lower bits, so our only input is the AND # gate. And since we only have one bit to add, we don't need an adder, no have a # carry out andij = AND[i][j] = p[0] gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) continue # we always need an AND gate andij = AND[i][j] = 'and%s,%s' % (i, j) gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) # the number of inputs will determine the type of adder inputs = [andij] # determine if there is a carry in if i - 1 in CARRY and j in CARRY[i - 1]: inputs.append(CARRY[i - 1][j]) # determine if there is a sum in if i - 1 in SUM and j + 1 in SUM[i - 1]: inputs.append(SUM[i - 1][j + 1]) # ok, add create adders if necessary if len(inputs) == 1: # we don't need an adder and we don't have a carry SUM[i][j] = andij elif len(inputs) == 2: # we need a HALFADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumij, carryij) gate = halfadder_gate([inputs[0], inputs[1], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) else: assert len(inputs) == 3, 'unexpected number of inputs' # we need a FULLADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumij, carryij) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) # now we have a final row of full adders for col in range(nbit - 1): inputs = [CARRY[nbit - 1][col], SUM[nbit - 1][col + 1]] if col == 0: sumout = p[nbit + col] carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumout, carryout) gate = halfadder_gate([inputs[0], inputs[1], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) continue inputs.append(CARRY[nbit][col - 1]) sumout = p[nbit + col] if col < nbit - 2: carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) else: carryout = p[2 * nbit - 1] name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumout, carryout) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) return csp
def multiplication_circuit(nbit, vartype=dimod.BINARY): """Multiplication circuit constraint satisfaction problem. A constraint satisfaction problem that represents the binary multiplication :math:`ab=p`, where the multiplicands are binary variables of length `nbit`; for example, :math:`a_0 + 2a_1 + 4a_2 +... +2^ma_{nbit}`. The square below shows a graphic representation of the circuit:: ________________________________________________________________________________ | and20 and10 and00 | | | | | | | and21 add11──and11 add01──and01 | | | |┌───────────┘|┌───────────┘| | | | and22 add12──and12 add02──and02 | | | | |┌───────────┘|┌───────────┘| | | | | add13─────────add03 | | | | | ┌───────────┘| | | | | | | p5 p4 p3 p2 p1 p0 | -------------------------------------------------------------------------------- Args: nbit (int): Number of bits in the multiplicands. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when variables :math:`a,b,p` are assigned values that correctly solve binary multiplication :math:`ab=p`. Examples: This example creates a multiplication circuit CSP that multiplies two 3-bit numbers, which is then formulated as a binary quadratic model (BQM). It fixes the multiplacands as :math:`a=5, b=6` (:math:`101` and :math:`110`) and uses a simulated annealing sampler to find the product, :math:`p=30` (:math:`111100`). >>> import dwavebinarycsp >>> from dwavebinarycsp.factories.csp.circuits import multiplication_circuit >>> import neal >>> csp = multiplication_circuit(3) >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.fix_variable('a0', 1); bqm.fix_variable('a1', 0); bqm.fix_variable('a2', 1) >>> bqm.fix_variable('b0', 1); bqm.fix_variable('b1', 1); bqm.fix_variable('b2', 0) >>> sampler = neal.SimulatedAnnealingSampler() >>> response = sampler.sample(bqm) >>> p = next(response.samples(n=1, sorted_by='energy')) >>> print(p['p0'], p['p1'], p['p2'], p['p3'], p['p4'], p['p5']) # doctest: +SKIP 1 1 1 1 0 0 """ if nbit < 1: raise ValueError("num_multiplier_bits, num_multiplicand_bits must be positive integers") num_multiplier_bits = num_multiplicand_bits = nbit # also checks the vartype argument csp = ConstraintSatisfactionProblem(vartype) # throughout, we will use the following convention: # i to refer to the bits of the multiplier # j to refer to the bits of the multiplicand # k to refer to the bits of the product # create the variables corresponding to the input and output wires for the circuit a = {i: 'a%d' % i for i in range(nbit)} b = {j: 'b%d' % j for j in range(nbit)} p = {k: 'p%d' % k for k in range(nbit + nbit)} # we will want to store the internal variables somewhere AND = defaultdict(dict) # the output of the AND gate associated with ai, bj is stored in AND[i][j] SUM = defaultdict(dict) # the sum of the ADDER gate associated with ai, bj is stored in SUM[i][j] CARRY = defaultdict(dict) # the carry of the ADDER gate associated with ai, bj is stored in CARRY[i][j] # we follow a shift adder for i in range(num_multiplier_bits): for j in range(num_multiplicand_bits): ai = a[i] bj = b[j] if i == 0 and j == 0: # in this case there are no inputs from lower bits, so our only input is the AND # gate. And since we only have one bit to add, we don't need an adder, no have a # carry out andij = AND[i][j] = p[0] gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) continue # we always need an AND gate andij = AND[i][j] = 'and%s,%s' % (i, j) gate = and_gate([ai, bj, andij], vartype=vartype, name='AND(%s, %s) = %s' % (ai, bj, andij)) csp.add_constraint(gate) # the number of inputs will determine the type of adder inputs = [andij] # determine if there is a carry in if i - 1 in CARRY and j in CARRY[i - 1]: inputs.append(CARRY[i - 1][j]) # determine if there is a sum in if i - 1 in SUM and j + 1 in SUM[i - 1]: inputs.append(SUM[i - 1][j + 1]) # ok, add create adders if necessary if len(inputs) == 1: # we don't need an adder and we don't have a carry SUM[i][j] = andij elif len(inputs) == 2: # we need a HALFADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumij, carryij) gate = halfadder_gate([inputs[0], inputs[1], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) else: assert len(inputs) == 3, 'unexpected number of inputs' # we need a FULLADDER so we have a sum and a carry if j == 0: sumij = SUM[i][j] = p[i] else: sumij = SUM[i][j] = 'sum%d,%d' % (i, j) carryij = CARRY[i][j] = 'carry%d,%d' % (i, j) name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumij, carryij) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumij, carryij], vartype=vartype, name=name) csp.add_constraint(gate) # now we have a final row of full adders for col in range(nbit - 1): inputs = [CARRY[nbit - 1][col], SUM[nbit - 1][col + 1]] if col == 0: sumout = p[nbit + col] carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) name = 'HALFADDER(%s, %s) = %s, %s' % (inputs[0], inputs[1], sumout, carryout) gate = halfadder_gate([inputs[0], inputs[1], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) continue inputs.append(CARRY[nbit][col - 1]) sumout = p[nbit + col] if col < nbit - 2: carryout = CARRY[nbit][col] = 'carry%d,%d' % (nbit, col) else: carryout = p[2 * nbit - 1] name = 'FULLADDER(%s, %s, %s) = %s, %s' % (inputs[0], inputs[1], inputs[2], sumout, carryout) gate = fulladder_gate([inputs[0], inputs[1], inputs[2], sumout, carryout], vartype=vartype, name=name) csp.add_constraint(gate) return csp
[ "Multiplication", "circuit", "constraint", "satisfaction", "problem", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/factories/csp/circuits.py#L28-L199
[ "def", "multiplication_circuit", "(", "nbit", ",", "vartype", "=", "dimod", ".", "BINARY", ")", ":", "if", "nbit", "<", "1", ":", "raise", "ValueError", "(", "\"num_multiplier_bits, num_multiplicand_bits must be positive integers\"", ")", "num_multiplier_bits", "=", "num_multiplicand_bits", "=", "nbit", "# also checks the vartype argument", "csp", "=", "ConstraintSatisfactionProblem", "(", "vartype", ")", "# throughout, we will use the following convention:", "# i to refer to the bits of the multiplier", "# j to refer to the bits of the multiplicand", "# k to refer to the bits of the product", "# create the variables corresponding to the input and output wires for the circuit", "a", "=", "{", "i", ":", "'a%d'", "%", "i", "for", "i", "in", "range", "(", "nbit", ")", "}", "b", "=", "{", "j", ":", "'b%d'", "%", "j", "for", "j", "in", "range", "(", "nbit", ")", "}", "p", "=", "{", "k", ":", "'p%d'", "%", "k", "for", "k", "in", "range", "(", "nbit", "+", "nbit", ")", "}", "# we will want to store the internal variables somewhere", "AND", "=", "defaultdict", "(", "dict", ")", "# the output of the AND gate associated with ai, bj is stored in AND[i][j]", "SUM", "=", "defaultdict", "(", "dict", ")", "# the sum of the ADDER gate associated with ai, bj is stored in SUM[i][j]", "CARRY", "=", "defaultdict", "(", "dict", ")", "# the carry of the ADDER gate associated with ai, bj is stored in CARRY[i][j]", "# we follow a shift adder", "for", "i", "in", "range", "(", "num_multiplier_bits", ")", ":", "for", "j", "in", "range", "(", "num_multiplicand_bits", ")", ":", "ai", "=", "a", "[", "i", "]", "bj", "=", "b", "[", "j", "]", "if", "i", "==", "0", "and", "j", "==", "0", ":", "# in this case there are no inputs from lower bits, so our only input is the AND", "# gate. And since we only have one bit to add, we don't need an adder, no have a", "# carry out", "andij", "=", "AND", "[", "i", "]", "[", "j", "]", "=", "p", "[", "0", "]", "gate", "=", "and_gate", "(", "[", "ai", ",", "bj", ",", "andij", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "'AND(%s, %s) = %s'", "%", "(", "ai", ",", "bj", ",", "andij", ")", ")", "csp", ".", "add_constraint", "(", "gate", ")", "continue", "# we always need an AND gate", "andij", "=", "AND", "[", "i", "]", "[", "j", "]", "=", "'and%s,%s'", "%", "(", "i", ",", "j", ")", "gate", "=", "and_gate", "(", "[", "ai", ",", "bj", ",", "andij", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "'AND(%s, %s) = %s'", "%", "(", "ai", ",", "bj", ",", "andij", ")", ")", "csp", ".", "add_constraint", "(", "gate", ")", "# the number of inputs will determine the type of adder", "inputs", "=", "[", "andij", "]", "# determine if there is a carry in", "if", "i", "-", "1", "in", "CARRY", "and", "j", "in", "CARRY", "[", "i", "-", "1", "]", ":", "inputs", ".", "append", "(", "CARRY", "[", "i", "-", "1", "]", "[", "j", "]", ")", "# determine if there is a sum in", "if", "i", "-", "1", "in", "SUM", "and", "j", "+", "1", "in", "SUM", "[", "i", "-", "1", "]", ":", "inputs", ".", "append", "(", "SUM", "[", "i", "-", "1", "]", "[", "j", "+", "1", "]", ")", "# ok, add create adders if necessary", "if", "len", "(", "inputs", ")", "==", "1", ":", "# we don't need an adder and we don't have a carry", "SUM", "[", "i", "]", "[", "j", "]", "=", "andij", "elif", "len", "(", "inputs", ")", "==", "2", ":", "# we need a HALFADDER so we have a sum and a carry", "if", "j", "==", "0", ":", "sumij", "=", "SUM", "[", "i", "]", "[", "j", "]", "=", "p", "[", "i", "]", "else", ":", "sumij", "=", "SUM", "[", "i", "]", "[", "j", "]", "=", "'sum%d,%d'", "%", "(", "i", ",", "j", ")", "carryij", "=", "CARRY", "[", "i", "]", "[", "j", "]", "=", "'carry%d,%d'", "%", "(", "i", ",", "j", ")", "name", "=", "'HALFADDER(%s, %s) = %s, %s'", "%", "(", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "sumij", ",", "carryij", ")", "gate", "=", "halfadder_gate", "(", "[", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "sumij", ",", "carryij", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")", "csp", ".", "add_constraint", "(", "gate", ")", "else", ":", "assert", "len", "(", "inputs", ")", "==", "3", ",", "'unexpected number of inputs'", "# we need a FULLADDER so we have a sum and a carry", "if", "j", "==", "0", ":", "sumij", "=", "SUM", "[", "i", "]", "[", "j", "]", "=", "p", "[", "i", "]", "else", ":", "sumij", "=", "SUM", "[", "i", "]", "[", "j", "]", "=", "'sum%d,%d'", "%", "(", "i", ",", "j", ")", "carryij", "=", "CARRY", "[", "i", "]", "[", "j", "]", "=", "'carry%d,%d'", "%", "(", "i", ",", "j", ")", "name", "=", "'FULLADDER(%s, %s, %s) = %s, %s'", "%", "(", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "inputs", "[", "2", "]", ",", "sumij", ",", "carryij", ")", "gate", "=", "fulladder_gate", "(", "[", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "inputs", "[", "2", "]", ",", "sumij", ",", "carryij", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")", "csp", ".", "add_constraint", "(", "gate", ")", "# now we have a final row of full adders", "for", "col", "in", "range", "(", "nbit", "-", "1", ")", ":", "inputs", "=", "[", "CARRY", "[", "nbit", "-", "1", "]", "[", "col", "]", ",", "SUM", "[", "nbit", "-", "1", "]", "[", "col", "+", "1", "]", "]", "if", "col", "==", "0", ":", "sumout", "=", "p", "[", "nbit", "+", "col", "]", "carryout", "=", "CARRY", "[", "nbit", "]", "[", "col", "]", "=", "'carry%d,%d'", "%", "(", "nbit", ",", "col", ")", "name", "=", "'HALFADDER(%s, %s) = %s, %s'", "%", "(", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "sumout", ",", "carryout", ")", "gate", "=", "halfadder_gate", "(", "[", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "sumout", ",", "carryout", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")", "csp", ".", "add_constraint", "(", "gate", ")", "continue", "inputs", ".", "append", "(", "CARRY", "[", "nbit", "]", "[", "col", "-", "1", "]", ")", "sumout", "=", "p", "[", "nbit", "+", "col", "]", "if", "col", "<", "nbit", "-", "2", ":", "carryout", "=", "CARRY", "[", "nbit", "]", "[", "col", "]", "=", "'carry%d,%d'", "%", "(", "nbit", ",", "col", ")", "else", ":", "carryout", "=", "p", "[", "2", "*", "nbit", "-", "1", "]", "name", "=", "'FULLADDER(%s, %s, %s) = %s, %s'", "%", "(", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "inputs", "[", "2", "]", ",", "sumout", ",", "carryout", ")", "gate", "=", "fulladder_gate", "(", "[", "inputs", "[", "0", "]", ",", "inputs", "[", "1", "]", ",", "inputs", "[", "2", "]", ",", "sumout", ",", "carryout", "]", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")", "csp", ".", "add_constraint", "(", "gate", ")", "return", "csp" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
xor_fault
Returns True if XOR(a, b) == out and fault == 0 or XOR(a, b) != out and fault == 1.
examples/explicit_circuit_fault_diagnosis.py
def xor_fault(a, b, out, fault): """Returns True if XOR(a, b) == out and fault == 0 or XOR(a, b) != out and fault == 1.""" if (a != b) == out: return fault == 0 else: return fault == 1
def xor_fault(a, b, out, fault): """Returns True if XOR(a, b) == out and fault == 0 or XOR(a, b) != out and fault == 1.""" if (a != b) == out: return fault == 0 else: return fault == 1
[ "Returns", "True", "if", "XOR", "(", "a", "b", ")", "==", "out", "and", "fault", "==", "0", "or", "XOR", "(", "a", "b", ")", "!", "=", "out", "and", "fault", "==", "1", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/examples/explicit_circuit_fault_diagnosis.py#L6-L11
[ "def", "xor_fault", "(", "a", ",", "b", ",", "out", ",", "fault", ")", ":", "if", "(", "a", "!=", "b", ")", "==", "out", ":", "return", "fault", "==", "0", "else", ":", "return", "fault", "==", "1" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
and_fault
Returns True if AND(a, b) == out and fault == 0 or AND(a, b) != out and fault == 1.
examples/explicit_circuit_fault_diagnosis.py
def and_fault(a, b, out, fault): """Returns True if AND(a, b) == out and fault == 0 or AND(a, b) != out and fault == 1.""" if (a and b) == out: return fault == 0 else: return fault == 1
def and_fault(a, b, out, fault): """Returns True if AND(a, b) == out and fault == 0 or AND(a, b) != out and fault == 1.""" if (a and b) == out: return fault == 0 else: return fault == 1
[ "Returns", "True", "if", "AND", "(", "a", "b", ")", "==", "out", "and", "fault", "==", "0", "or", "AND", "(", "a", "b", ")", "!", "=", "out", "and", "fault", "==", "1", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/examples/explicit_circuit_fault_diagnosis.py#L14-L19
[ "def", "and_fault", "(", "a", ",", "b", ",", "out", ",", "fault", ")", ":", "if", "(", "a", "and", "b", ")", "==", "out", ":", "return", "fault", "==", "0", "else", ":", "return", "fault", "==", "1" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
or_fault
Returns True if OR(a, b) == out and fault == 0 or OR(a, b) != out and fault == 1.
examples/explicit_circuit_fault_diagnosis.py
def or_fault(a, b, out, fault): """Returns True if OR(a, b) == out and fault == 0 or OR(a, b) != out and fault == 1.""" if (a or b) == out: return fault == 0 else: return fault == 1
def or_fault(a, b, out, fault): """Returns True if OR(a, b) == out and fault == 0 or OR(a, b) != out and fault == 1.""" if (a or b) == out: return fault == 0 else: return fault == 1
[ "Returns", "True", "if", "OR", "(", "a", "b", ")", "==", "out", "and", "fault", "==", "0", "or", "OR", "(", "a", "b", ")", "!", "=", "out", "and", "fault", "==", "1", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/examples/explicit_circuit_fault_diagnosis.py#L22-L27
[ "def", "or_fault", "(", "a", ",", "b", ",", "out", ",", "fault", ")", ":", "if", "(", "a", "or", "b", ")", "==", "out", ":", "return", "fault", "==", "0", "else", ":", "return", "fault", "==", "1" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
assert_penaltymodel_factory_available
For `dwavebinarycsp` to be functional, at least one penalty model factory has to be installed. See discussion in setup.py for details.
dwavebinarycsp/__init__.py
def assert_penaltymodel_factory_available(): """For `dwavebinarycsp` to be functional, at least one penalty model factory has to be installed. See discussion in setup.py for details. """ from pkg_resources import iter_entry_points from penaltymodel.core import FACTORY_ENTRYPOINT from itertools import chain supported = ('maxgap', 'mip') factories = chain(*(iter_entry_points(FACTORY_ENTRYPOINT, name) for name in supported)) try: next(factories) except StopIteration: raise AssertionError( "To use 'dwavebinarycsp', at least one penaltymodel factory must be installed. " "Try {}.".format( " or ".join("'pip install dwavebinarycsp[{}]'".format(name) for name in supported) ))
def assert_penaltymodel_factory_available(): """For `dwavebinarycsp` to be functional, at least one penalty model factory has to be installed. See discussion in setup.py for details. """ from pkg_resources import iter_entry_points from penaltymodel.core import FACTORY_ENTRYPOINT from itertools import chain supported = ('maxgap', 'mip') factories = chain(*(iter_entry_points(FACTORY_ENTRYPOINT, name) for name in supported)) try: next(factories) except StopIteration: raise AssertionError( "To use 'dwavebinarycsp', at least one penaltymodel factory must be installed. " "Try {}.".format( " or ".join("'pip install dwavebinarycsp[{}]'".format(name) for name in supported) ))
[ "For", "dwavebinarycsp", "to", "be", "functional", "at", "least", "one", "penalty", "model", "factory", "has", "to", "be", "installed", ".", "See", "discussion", "in", "setup", ".", "py", "for", "details", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/__init__.py#L41-L60
[ "def", "assert_penaltymodel_factory_available", "(", ")", ":", "from", "pkg_resources", "import", "iter_entry_points", "from", "penaltymodel", ".", "core", "import", "FACTORY_ENTRYPOINT", "from", "itertools", "import", "chain", "supported", "=", "(", "'maxgap'", ",", "'mip'", ")", "factories", "=", "chain", "(", "*", "(", "iter_entry_points", "(", "FACTORY_ENTRYPOINT", ",", "name", ")", "for", "name", "in", "supported", ")", ")", "try", ":", "next", "(", "factories", ")", "except", "StopIteration", ":", "raise", "AssertionError", "(", "\"To use 'dwavebinarycsp', at least one penaltymodel factory must be installed. \"", "\"Try {}.\"", ".", "format", "(", "\" or \"", ".", "join", "(", "\"'pip install dwavebinarycsp[{}]'\"", ".", "format", "(", "name", ")", "for", "name", "in", "supported", ")", ")", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
add_constraint
Add a constraint. Args: constraint (function/iterable/:obj:`.Constraint`): Constraint definition in one of the supported formats: 1. Function, with input arguments matching the order and :attr:`~.ConstraintSatisfactionProblem.vartype` type of the `variables` argument, that evaluates True when the constraint is satisfied. 2. List explicitly specifying each allowed configuration as a tuple. 3. :obj:`.Constraint` object built either explicitly or by :mod:`dwavebinarycsp.factories`. variables(iterable): Variables associated with the constraint. Not required when `constraint` is a :obj:`.Constraint` object. Examples: This example defines a function that evaluates True when the constraint is satisfied. The function's input arguments match the order and type of the `variables` argument. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> def all_equal(a, b, c): # works for both dwavebinarycsp.BINARY and dwavebinarycsp.SPIN ... return (a == b) and (b == c) >>> csp.add_constraint(all_equal, ['a', 'b', 'c']) >>> csp.check({'a': 0, 'b': 0, 'c': 0}) True >>> csp.check({'a': 0, 'b': 0, 'c': 1}) False This example explicitly lists allowed configurations. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.SPIN) >>> eq_configurations = {(-1, -1), (1, 1)} >>> csp.add_constraint(eq_configurations, ['v0', 'v1']) >>> csp.check({'v0': -1, 'v1': +1}) False >>> csp.check({'v0': -1, 'v1': -1}) True This example uses a :obj:`.Constraint` object built by :mod:`dwavebinarycsp.factories`. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'])) # add an AND gate >>> csp.add_constraint(gates.xor_gate(['a', 'c', 'd'])) # add an XOR gate >>> csp.check({'a': 1, 'b': 0, 'c': 0, 'd': 1}) True
dwavebinarycsp/core/csp.py
def add_constraint(self, constraint, variables=tuple()): """Add a constraint. Args: constraint (function/iterable/:obj:`.Constraint`): Constraint definition in one of the supported formats: 1. Function, with input arguments matching the order and :attr:`~.ConstraintSatisfactionProblem.vartype` type of the `variables` argument, that evaluates True when the constraint is satisfied. 2. List explicitly specifying each allowed configuration as a tuple. 3. :obj:`.Constraint` object built either explicitly or by :mod:`dwavebinarycsp.factories`. variables(iterable): Variables associated with the constraint. Not required when `constraint` is a :obj:`.Constraint` object. Examples: This example defines a function that evaluates True when the constraint is satisfied. The function's input arguments match the order and type of the `variables` argument. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> def all_equal(a, b, c): # works for both dwavebinarycsp.BINARY and dwavebinarycsp.SPIN ... return (a == b) and (b == c) >>> csp.add_constraint(all_equal, ['a', 'b', 'c']) >>> csp.check({'a': 0, 'b': 0, 'c': 0}) True >>> csp.check({'a': 0, 'b': 0, 'c': 1}) False This example explicitly lists allowed configurations. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.SPIN) >>> eq_configurations = {(-1, -1), (1, 1)} >>> csp.add_constraint(eq_configurations, ['v0', 'v1']) >>> csp.check({'v0': -1, 'v1': +1}) False >>> csp.check({'v0': -1, 'v1': -1}) True This example uses a :obj:`.Constraint` object built by :mod:`dwavebinarycsp.factories`. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'])) # add an AND gate >>> csp.add_constraint(gates.xor_gate(['a', 'c', 'd'])) # add an XOR gate >>> csp.check({'a': 1, 'b': 0, 'c': 0, 'd': 1}) True """ if isinstance(constraint, Constraint): if variables and (tuple(variables) != constraint.variables): raise ValueError("mismatched variables and Constraint") elif isinstance(constraint, Callable): constraint = Constraint.from_func(constraint, variables, self.vartype) elif isinstance(constraint, Iterable): constraint = Constraint.from_configurations(constraint, variables, self.vartype) else: raise TypeError("Unknown constraint type given") self.constraints.append(constraint) for v in constraint.variables: self.variables[v].append(constraint)
def add_constraint(self, constraint, variables=tuple()): """Add a constraint. Args: constraint (function/iterable/:obj:`.Constraint`): Constraint definition in one of the supported formats: 1. Function, with input arguments matching the order and :attr:`~.ConstraintSatisfactionProblem.vartype` type of the `variables` argument, that evaluates True when the constraint is satisfied. 2. List explicitly specifying each allowed configuration as a tuple. 3. :obj:`.Constraint` object built either explicitly or by :mod:`dwavebinarycsp.factories`. variables(iterable): Variables associated with the constraint. Not required when `constraint` is a :obj:`.Constraint` object. Examples: This example defines a function that evaluates True when the constraint is satisfied. The function's input arguments match the order and type of the `variables` argument. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> def all_equal(a, b, c): # works for both dwavebinarycsp.BINARY and dwavebinarycsp.SPIN ... return (a == b) and (b == c) >>> csp.add_constraint(all_equal, ['a', 'b', 'c']) >>> csp.check({'a': 0, 'b': 0, 'c': 0}) True >>> csp.check({'a': 0, 'b': 0, 'c': 1}) False This example explicitly lists allowed configurations. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.SPIN) >>> eq_configurations = {(-1, -1), (1, 1)} >>> csp.add_constraint(eq_configurations, ['v0', 'v1']) >>> csp.check({'v0': -1, 'v1': +1}) False >>> csp.check({'v0': -1, 'v1': -1}) True This example uses a :obj:`.Constraint` object built by :mod:`dwavebinarycsp.factories`. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'])) # add an AND gate >>> csp.add_constraint(gates.xor_gate(['a', 'c', 'd'])) # add an XOR gate >>> csp.check({'a': 1, 'b': 0, 'c': 0, 'd': 1}) True """ if isinstance(constraint, Constraint): if variables and (tuple(variables) != constraint.variables): raise ValueError("mismatched variables and Constraint") elif isinstance(constraint, Callable): constraint = Constraint.from_func(constraint, variables, self.vartype) elif isinstance(constraint, Iterable): constraint = Constraint.from_configurations(constraint, variables, self.vartype) else: raise TypeError("Unknown constraint type given") self.constraints.append(constraint) for v in constraint.variables: self.variables[v].append(constraint)
[ "Add", "a", "constraint", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/core/csp.py#L76-L141
[ "def", "add_constraint", "(", "self", ",", "constraint", ",", "variables", "=", "tuple", "(", ")", ")", ":", "if", "isinstance", "(", "constraint", ",", "Constraint", ")", ":", "if", "variables", "and", "(", "tuple", "(", "variables", ")", "!=", "constraint", ".", "variables", ")", ":", "raise", "ValueError", "(", "\"mismatched variables and Constraint\"", ")", "elif", "isinstance", "(", "constraint", ",", "Callable", ")", ":", "constraint", "=", "Constraint", ".", "from_func", "(", "constraint", ",", "variables", ",", "self", ".", "vartype", ")", "elif", "isinstance", "(", "constraint", ",", "Iterable", ")", ":", "constraint", "=", "Constraint", ".", "from_configurations", "(", "constraint", ",", "variables", ",", "self", ".", "vartype", ")", "else", ":", "raise", "TypeError", "(", "\"Unknown constraint type given\"", ")", "self", ".", "constraints", ".", "append", "(", "constraint", ")", "for", "v", "in", "constraint", ".", "variables", ":", "self", ".", "variables", "[", "v", "]", ".", "append", "(", "constraint", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
stitch
Build a binary quadratic model with minimal energy levels at solutions to the specified constraint satisfaction problem. Args: csp (:obj:`.ConstraintSatisfactionProblem`): Constraint satisfaction problem. min_classical_gap (float, optional, default=2.0): Minimum energy gap from ground. Each constraint violated by the solution increases the energy level of the binary quadratic model by at least this much relative to ground energy. max_graph_size (int, optional, default=8): Maximum number of variables in the binary quadratic model that can be used to represent a single constraint. Returns: :class:`~dimod.BinaryQuadraticModel` Notes: For a `min_classical_gap` > 2 or constraints with more than two variables, requires access to factories from the penaltymodel_ ecosystem to construct the binary quadratic model. .. _penaltymodel: https://github.com/dwavesystems/penaltymodel Examples: This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy level of -2 such that each constraint violation by a solution adds the default minimum energy gap. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.energy({'a': 0, 'b': 0, 'c': 1}) # satisfies csp -2.0 >>> bqm.energy({'a': 0, 'b': 0, 'c': 0}) # violates one constraint 0.0 >>> bqm.energy({'a': 1, 'b': 0, 'c': 0}) # violates two constraints 2.0 This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy gap of 4. Note that in this case the conversion to binary quadratic model adds two ancillary variables that must be minimized over when solving. >>> import dwavebinarycsp >>> import operator >>> import itertools >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0) >>> list(bqm) # # doctest: +SKIP ['a', 'aux1', 'aux0', 'b', 'c'] >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 1, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # satisfies csp -6.0 >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates one constraint -2.0 >>> min([bqm.energy({'a': 1, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates two constraints 2.0 This example finds for the previous example the minimum graph size. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> for n in range(8, 1, -1): ... try: ... bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0, max_graph_size=n) ... except dwavebinarycsp.exceptions.ImpossibleBQM: ... print(n+1) ... 3
dwavebinarycsp/compilers/stitcher.py
def stitch(csp, min_classical_gap=2.0, max_graph_size=8): """Build a binary quadratic model with minimal energy levels at solutions to the specified constraint satisfaction problem. Args: csp (:obj:`.ConstraintSatisfactionProblem`): Constraint satisfaction problem. min_classical_gap (float, optional, default=2.0): Minimum energy gap from ground. Each constraint violated by the solution increases the energy level of the binary quadratic model by at least this much relative to ground energy. max_graph_size (int, optional, default=8): Maximum number of variables in the binary quadratic model that can be used to represent a single constraint. Returns: :class:`~dimod.BinaryQuadraticModel` Notes: For a `min_classical_gap` > 2 or constraints with more than two variables, requires access to factories from the penaltymodel_ ecosystem to construct the binary quadratic model. .. _penaltymodel: https://github.com/dwavesystems/penaltymodel Examples: This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy level of -2 such that each constraint violation by a solution adds the default minimum energy gap. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.energy({'a': 0, 'b': 0, 'c': 1}) # satisfies csp -2.0 >>> bqm.energy({'a': 0, 'b': 0, 'c': 0}) # violates one constraint 0.0 >>> bqm.energy({'a': 1, 'b': 0, 'c': 0}) # violates two constraints 2.0 This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy gap of 4. Note that in this case the conversion to binary quadratic model adds two ancillary variables that must be minimized over when solving. >>> import dwavebinarycsp >>> import operator >>> import itertools >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0) >>> list(bqm) # # doctest: +SKIP ['a', 'aux1', 'aux0', 'b', 'c'] >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 1, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # satisfies csp -6.0 >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates one constraint -2.0 >>> min([bqm.energy({'a': 1, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates two constraints 2.0 This example finds for the previous example the minimum graph size. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> for n in range(8, 1, -1): ... try: ... bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0, max_graph_size=n) ... except dwavebinarycsp.exceptions.ImpossibleBQM: ... print(n+1) ... 3 """ # ensure we have penaltymodel factory available try: dwavebinarycsp.assert_penaltymodel_factory_available() except AssertionError as e: raise RuntimeError(e) def aux_factory(): for i in count(): yield 'aux{}'.format(i) aux = aux_factory() bqm = dimod.BinaryQuadraticModel.empty(csp.vartype) # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models = {} for const in csp.constraints: configurations = const.configurations if len(const.variables) > max_graph_size: msg = ("The given csp contains a constraint {const} with {num_var} variables. " "This cannot be mapped to a graph with {max_graph_size} nodes. " "Consider checking whether your constraint is irreducible." "").format(const=const, num_var=len(const.variables), max_graph_size=max_graph_size) raise ImpossibleBQM(msg) pmodel = None if len(const) == 0: # empty constraint continue if min_classical_gap <= 2.0: if len(const) == 1 and max_graph_size >= 1: bqm.update(_bqm_from_1sat(const)) continue elif len(const) == 2 and max_graph_size >= 2: bqm.update(_bqm_from_2sat(const)) continue # developer note: we could cache them and relabel, for now though let's do the simple thing # if configurations in penalty_models: # raise NotImplementedError for G in iter_complete_graphs(const.variables, max_graph_size + 1, aux): # construct a specification spec = pm.Specification( graph=G, decision_variables=const.variables, feasible_configurations=configurations, min_classical_gap=min_classical_gap, vartype=csp.vartype ) # try to use the penaltymodel ecosystem try: pmodel = pm.get_penalty_model(spec) except pm.ImpossiblePenaltyModel: # hopefully adding more variables will make it possible continue if pmodel.classical_gap >= min_classical_gap: break # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models[configurations] = pmodel else: msg = ("No penalty model can be build for constraint {}".format(const)) raise ImpossibleBQM(msg) bqm.update(pmodel.model) return bqm
def stitch(csp, min_classical_gap=2.0, max_graph_size=8): """Build a binary quadratic model with minimal energy levels at solutions to the specified constraint satisfaction problem. Args: csp (:obj:`.ConstraintSatisfactionProblem`): Constraint satisfaction problem. min_classical_gap (float, optional, default=2.0): Minimum energy gap from ground. Each constraint violated by the solution increases the energy level of the binary quadratic model by at least this much relative to ground energy. max_graph_size (int, optional, default=8): Maximum number of variables in the binary quadratic model that can be used to represent a single constraint. Returns: :class:`~dimod.BinaryQuadraticModel` Notes: For a `min_classical_gap` > 2 or constraints with more than two variables, requires access to factories from the penaltymodel_ ecosystem to construct the binary quadratic model. .. _penaltymodel: https://github.com/dwavesystems/penaltymodel Examples: This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy level of -2 such that each constraint violation by a solution adds the default minimum energy gap. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.energy({'a': 0, 'b': 0, 'c': 1}) # satisfies csp -2.0 >>> bqm.energy({'a': 0, 'b': 0, 'c': 0}) # violates one constraint 0.0 >>> bqm.energy({'a': 1, 'b': 0, 'c': 0}) # violates two constraints 2.0 This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy gap of 4. Note that in this case the conversion to binary quadratic model adds two ancillary variables that must be minimized over when solving. >>> import dwavebinarycsp >>> import operator >>> import itertools >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0) >>> list(bqm) # # doctest: +SKIP ['a', 'aux1', 'aux0', 'b', 'c'] >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 1, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # satisfies csp -6.0 >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates one constraint -2.0 >>> min([bqm.energy({'a': 1, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates two constraints 2.0 This example finds for the previous example the minimum graph size. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> for n in range(8, 1, -1): ... try: ... bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0, max_graph_size=n) ... except dwavebinarycsp.exceptions.ImpossibleBQM: ... print(n+1) ... 3 """ # ensure we have penaltymodel factory available try: dwavebinarycsp.assert_penaltymodel_factory_available() except AssertionError as e: raise RuntimeError(e) def aux_factory(): for i in count(): yield 'aux{}'.format(i) aux = aux_factory() bqm = dimod.BinaryQuadraticModel.empty(csp.vartype) # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models = {} for const in csp.constraints: configurations = const.configurations if len(const.variables) > max_graph_size: msg = ("The given csp contains a constraint {const} with {num_var} variables. " "This cannot be mapped to a graph with {max_graph_size} nodes. " "Consider checking whether your constraint is irreducible." "").format(const=const, num_var=len(const.variables), max_graph_size=max_graph_size) raise ImpossibleBQM(msg) pmodel = None if len(const) == 0: # empty constraint continue if min_classical_gap <= 2.0: if len(const) == 1 and max_graph_size >= 1: bqm.update(_bqm_from_1sat(const)) continue elif len(const) == 2 and max_graph_size >= 2: bqm.update(_bqm_from_2sat(const)) continue # developer note: we could cache them and relabel, for now though let's do the simple thing # if configurations in penalty_models: # raise NotImplementedError for G in iter_complete_graphs(const.variables, max_graph_size + 1, aux): # construct a specification spec = pm.Specification( graph=G, decision_variables=const.variables, feasible_configurations=configurations, min_classical_gap=min_classical_gap, vartype=csp.vartype ) # try to use the penaltymodel ecosystem try: pmodel = pm.get_penalty_model(spec) except pm.ImpossiblePenaltyModel: # hopefully adding more variables will make it possible continue if pmodel.classical_gap >= min_classical_gap: break # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models[configurations] = pmodel else: msg = ("No penalty model can be build for constraint {}".format(const)) raise ImpossibleBQM(msg) bqm.update(pmodel.model) return bqm
[ "Build", "a", "binary", "quadratic", "model", "with", "minimal", "energy", "levels", "at", "solutions", "to", "the", "specified", "constraint", "satisfaction", "problem", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/compilers/stitcher.py#L34-L196
[ "def", "stitch", "(", "csp", ",", "min_classical_gap", "=", "2.0", ",", "max_graph_size", "=", "8", ")", ":", "# ensure we have penaltymodel factory available", "try", ":", "dwavebinarycsp", ".", "assert_penaltymodel_factory_available", "(", ")", "except", "AssertionError", "as", "e", ":", "raise", "RuntimeError", "(", "e", ")", "def", "aux_factory", "(", ")", ":", "for", "i", "in", "count", "(", ")", ":", "yield", "'aux{}'", ".", "format", "(", "i", ")", "aux", "=", "aux_factory", "(", ")", "bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "empty", "(", "csp", ".", "vartype", ")", "# developer note: we could cache them and relabel, for now though let's do the simple thing", "# penalty_models = {}", "for", "const", "in", "csp", ".", "constraints", ":", "configurations", "=", "const", ".", "configurations", "if", "len", "(", "const", ".", "variables", ")", ">", "max_graph_size", ":", "msg", "=", "(", "\"The given csp contains a constraint {const} with {num_var} variables. \"", "\"This cannot be mapped to a graph with {max_graph_size} nodes. \"", "\"Consider checking whether your constraint is irreducible.\"", "\"\"", ")", ".", "format", "(", "const", "=", "const", ",", "num_var", "=", "len", "(", "const", ".", "variables", ")", ",", "max_graph_size", "=", "max_graph_size", ")", "raise", "ImpossibleBQM", "(", "msg", ")", "pmodel", "=", "None", "if", "len", "(", "const", ")", "==", "0", ":", "# empty constraint", "continue", "if", "min_classical_gap", "<=", "2.0", ":", "if", "len", "(", "const", ")", "==", "1", "and", "max_graph_size", ">=", "1", ":", "bqm", ".", "update", "(", "_bqm_from_1sat", "(", "const", ")", ")", "continue", "elif", "len", "(", "const", ")", "==", "2", "and", "max_graph_size", ">=", "2", ":", "bqm", ".", "update", "(", "_bqm_from_2sat", "(", "const", ")", ")", "continue", "# developer note: we could cache them and relabel, for now though let's do the simple thing", "# if configurations in penalty_models:", "# raise NotImplementedError", "for", "G", "in", "iter_complete_graphs", "(", "const", ".", "variables", ",", "max_graph_size", "+", "1", ",", "aux", ")", ":", "# construct a specification", "spec", "=", "pm", ".", "Specification", "(", "graph", "=", "G", ",", "decision_variables", "=", "const", ".", "variables", ",", "feasible_configurations", "=", "configurations", ",", "min_classical_gap", "=", "min_classical_gap", ",", "vartype", "=", "csp", ".", "vartype", ")", "# try to use the penaltymodel ecosystem", "try", ":", "pmodel", "=", "pm", ".", "get_penalty_model", "(", "spec", ")", "except", "pm", ".", "ImpossiblePenaltyModel", ":", "# hopefully adding more variables will make it possible", "continue", "if", "pmodel", ".", "classical_gap", ">=", "min_classical_gap", ":", "break", "# developer note: we could cache them and relabel, for now though let's do the simple thing", "# penalty_models[configurations] = pmodel", "else", ":", "msg", "=", "(", "\"No penalty model can be build for constraint {}\"", ".", "format", "(", "const", ")", ")", "raise", "ImpossibleBQM", "(", "msg", ")", "bqm", ".", "update", "(", "pmodel", ".", "model", ")", "return", "bqm" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
_bqm_from_1sat
create a bqm for a constraint with only one variable bqm will have exactly classical gap 2.
dwavebinarycsp/compilers/stitcher.py
def _bqm_from_1sat(constraint): """create a bqm for a constraint with only one variable bqm will have exactly classical gap 2. """ configurations = constraint.configurations num_configurations = len(configurations) bqm = dimod.BinaryQuadraticModel.empty(constraint.vartype) if num_configurations == 1: val, = next(iter(configurations)) v, = constraint.variables bqm.add_variable(v, -1 if val > 0 else +1, vartype=dimod.SPIN) else: bqm.add_variables_from((v, 0.0) for v in constraint.variables) return bqm
def _bqm_from_1sat(constraint): """create a bqm for a constraint with only one variable bqm will have exactly classical gap 2. """ configurations = constraint.configurations num_configurations = len(configurations) bqm = dimod.BinaryQuadraticModel.empty(constraint.vartype) if num_configurations == 1: val, = next(iter(configurations)) v, = constraint.variables bqm.add_variable(v, -1 if val > 0 else +1, vartype=dimod.SPIN) else: bqm.add_variables_from((v, 0.0) for v in constraint.variables) return bqm
[ "create", "a", "bqm", "for", "a", "constraint", "with", "only", "one", "variable" ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/compilers/stitcher.py#L199-L216
[ "def", "_bqm_from_1sat", "(", "constraint", ")", ":", "configurations", "=", "constraint", ".", "configurations", "num_configurations", "=", "len", "(", "configurations", ")", "bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "empty", "(", "constraint", ".", "vartype", ")", "if", "num_configurations", "==", "1", ":", "val", ",", "=", "next", "(", "iter", "(", "configurations", ")", ")", "v", ",", "=", "constraint", ".", "variables", "bqm", ".", "add_variable", "(", "v", ",", "-", "1", "if", "val", ">", "0", "else", "+", "1", ",", "vartype", "=", "dimod", ".", "SPIN", ")", "else", ":", "bqm", ".", "add_variables_from", "(", "(", "v", ",", "0.0", ")", "for", "v", "in", "constraint", ".", "variables", ")", "return", "bqm" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
_bqm_from_2sat
create a bqm for a constraint with two variables. bqm will have exactly classical gap 2.
dwavebinarycsp/compilers/stitcher.py
def _bqm_from_2sat(constraint): """create a bqm for a constraint with two variables. bqm will have exactly classical gap 2. """ configurations = constraint.configurations variables = constraint.variables vartype = constraint.vartype u, v = constraint.variables # if all configurations are present, then nothing is infeasible and the bqm is just all # 0.0s if len(configurations) == 4: return dimod.BinaryQuadraticModel.empty(constraint.vartype) # check if the constraint is irreducible, and if so, build the bqm for its two # components components = irreducible_components(constraint) if len(components) > 1: const0 = Constraint.from_configurations(((config[0],) for config in configurations), (u,), vartype) const1 = Constraint.from_configurations(((config[1],) for config in configurations), (v,), vartype) bqm = _bqm_from_1sat(const0) bqm.update(_bqm_from_1sat(const1)) return bqm assert len(configurations) > 1, "single configurations should be irreducible" # if it is not irreducible, and there are infeasible configurations, then it is time to # start building a bqm bqm = dimod.BinaryQuadraticModel.empty(vartype) # if the constraint is not irreducible and has two configurations, then it is either eq or ne if all(operator.eq(*config) for config in configurations): bqm.add_interaction(u, v, -1, vartype=dimod.SPIN) # equality elif all(operator.ne(*config) for config in configurations): bqm.add_interaction(u, v, +1, vartype=dimod.SPIN) # inequality elif (1, 1) not in configurations: bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) # penalize (1, 1) elif (-1, +1) not in configurations and (0, 1) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(v, 2, vartype=dimod.BINARY) elif (+1, -1) not in configurations and (1, 0) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(u, 2, vartype=dimod.BINARY) else: # (0, 0) not in configurations bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) bqm.add_variable(u, -2, vartype=dimod.BINARY) bqm.add_variable(v, -2, vartype=dimod.BINARY) return bqm
def _bqm_from_2sat(constraint): """create a bqm for a constraint with two variables. bqm will have exactly classical gap 2. """ configurations = constraint.configurations variables = constraint.variables vartype = constraint.vartype u, v = constraint.variables # if all configurations are present, then nothing is infeasible and the bqm is just all # 0.0s if len(configurations) == 4: return dimod.BinaryQuadraticModel.empty(constraint.vartype) # check if the constraint is irreducible, and if so, build the bqm for its two # components components = irreducible_components(constraint) if len(components) > 1: const0 = Constraint.from_configurations(((config[0],) for config in configurations), (u,), vartype) const1 = Constraint.from_configurations(((config[1],) for config in configurations), (v,), vartype) bqm = _bqm_from_1sat(const0) bqm.update(_bqm_from_1sat(const1)) return bqm assert len(configurations) > 1, "single configurations should be irreducible" # if it is not irreducible, and there are infeasible configurations, then it is time to # start building a bqm bqm = dimod.BinaryQuadraticModel.empty(vartype) # if the constraint is not irreducible and has two configurations, then it is either eq or ne if all(operator.eq(*config) for config in configurations): bqm.add_interaction(u, v, -1, vartype=dimod.SPIN) # equality elif all(operator.ne(*config) for config in configurations): bqm.add_interaction(u, v, +1, vartype=dimod.SPIN) # inequality elif (1, 1) not in configurations: bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) # penalize (1, 1) elif (-1, +1) not in configurations and (0, 1) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(v, 2, vartype=dimod.BINARY) elif (+1, -1) not in configurations and (1, 0) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(u, 2, vartype=dimod.BINARY) else: # (0, 0) not in configurations bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) bqm.add_variable(u, -2, vartype=dimod.BINARY) bqm.add_variable(v, -2, vartype=dimod.BINARY) return bqm
[ "create", "a", "bqm", "for", "a", "constraint", "with", "two", "variables", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/compilers/stitcher.py#L219-L271
[ "def", "_bqm_from_2sat", "(", "constraint", ")", ":", "configurations", "=", "constraint", ".", "configurations", "variables", "=", "constraint", ".", "variables", "vartype", "=", "constraint", ".", "vartype", "u", ",", "v", "=", "constraint", ".", "variables", "# if all configurations are present, then nothing is infeasible and the bqm is just all", "# 0.0s", "if", "len", "(", "configurations", ")", "==", "4", ":", "return", "dimod", ".", "BinaryQuadraticModel", ".", "empty", "(", "constraint", ".", "vartype", ")", "# check if the constraint is irreducible, and if so, build the bqm for its two", "# components", "components", "=", "irreducible_components", "(", "constraint", ")", "if", "len", "(", "components", ")", ">", "1", ":", "const0", "=", "Constraint", ".", "from_configurations", "(", "(", "(", "config", "[", "0", "]", ",", ")", "for", "config", "in", "configurations", ")", ",", "(", "u", ",", ")", ",", "vartype", ")", "const1", "=", "Constraint", ".", "from_configurations", "(", "(", "(", "config", "[", "1", "]", ",", ")", "for", "config", "in", "configurations", ")", ",", "(", "v", ",", ")", ",", "vartype", ")", "bqm", "=", "_bqm_from_1sat", "(", "const0", ")", "bqm", ".", "update", "(", "_bqm_from_1sat", "(", "const1", ")", ")", "return", "bqm", "assert", "len", "(", "configurations", ")", ">", "1", ",", "\"single configurations should be irreducible\"", "# if it is not irreducible, and there are infeasible configurations, then it is time to", "# start building a bqm", "bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "empty", "(", "vartype", ")", "# if the constraint is not irreducible and has two configurations, then it is either eq or ne", "if", "all", "(", "operator", ".", "eq", "(", "*", "config", ")", "for", "config", "in", "configurations", ")", ":", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "-", "1", ",", "vartype", "=", "dimod", ".", "SPIN", ")", "# equality", "elif", "all", "(", "operator", ".", "ne", "(", "*", "config", ")", "for", "config", "in", "configurations", ")", ":", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "+", "1", ",", "vartype", "=", "dimod", ".", "SPIN", ")", "# inequality", "elif", "(", "1", ",", "1", ")", "not", "in", "configurations", ":", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "# penalize (1, 1)", "elif", "(", "-", "1", ",", "+", "1", ")", "not", "in", "configurations", "and", "(", "0", ",", "1", ")", "not", "in", "configurations", ":", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "-", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "bqm", ".", "add_variable", "(", "v", ",", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "elif", "(", "+", "1", ",", "-", "1", ")", "not", "in", "configurations", "and", "(", "1", ",", "0", ")", "not", "in", "configurations", ":", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "-", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "bqm", ".", "add_variable", "(", "u", ",", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "else", ":", "# (0, 0) not in configurations", "bqm", ".", "add_interaction", "(", "u", ",", "v", ",", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "bqm", ".", "add_variable", "(", "u", ",", "-", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "bqm", ".", "add_variable", "(", "v", ",", "-", "2", ",", "vartype", "=", "dimod", ".", "BINARY", ")", "return", "bqm" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
iter_complete_graphs
Iterate over complete graphs. Args: start (int/iterable): Define the size of the starting graph. If an int, the nodes will be index-labeled, otherwise should be an iterable of node labels. stop (int): Stops yielding graphs when the size equals stop. factory (iterator, optional): If provided, nodes added will be labeled according to the values returned by factory. Otherwise the extra nodes will be index-labeled. Yields: :class:`nx.Graph`
dwavebinarycsp/compilers/stitcher.py
def iter_complete_graphs(start, stop, factory=None): """Iterate over complete graphs. Args: start (int/iterable): Define the size of the starting graph. If an int, the nodes will be index-labeled, otherwise should be an iterable of node labels. stop (int): Stops yielding graphs when the size equals stop. factory (iterator, optional): If provided, nodes added will be labeled according to the values returned by factory. Otherwise the extra nodes will be index-labeled. Yields: :class:`nx.Graph` """ _, nodes = start nodes = list(nodes) # we'll be appending if factory is None: factory = count() while len(nodes) < stop: # we need to construct a new graph each time, this is actually faster than copy and add # the new edges in any case G = nx.complete_graph(nodes) yield G v = next(factory) while v in G: v = next(factory) nodes.append(v)
def iter_complete_graphs(start, stop, factory=None): """Iterate over complete graphs. Args: start (int/iterable): Define the size of the starting graph. If an int, the nodes will be index-labeled, otherwise should be an iterable of node labels. stop (int): Stops yielding graphs when the size equals stop. factory (iterator, optional): If provided, nodes added will be labeled according to the values returned by factory. Otherwise the extra nodes will be index-labeled. Yields: :class:`nx.Graph` """ _, nodes = start nodes = list(nodes) # we'll be appending if factory is None: factory = count() while len(nodes) < stop: # we need to construct a new graph each time, this is actually faster than copy and add # the new edges in any case G = nx.complete_graph(nodes) yield G v = next(factory) while v in G: v = next(factory) nodes.append(v)
[ "Iterate", "over", "complete", "graphs", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/compilers/stitcher.py#L275-L311
[ "def", "iter_complete_graphs", "(", "start", ",", "stop", ",", "factory", "=", "None", ")", ":", "_", ",", "nodes", "=", "start", "nodes", "=", "list", "(", "nodes", ")", "# we'll be appending", "if", "factory", "is", "None", ":", "factory", "=", "count", "(", ")", "while", "len", "(", "nodes", ")", "<", "stop", ":", "# we need to construct a new graph each time, this is actually faster than copy and add", "# the new edges in any case", "G", "=", "nx", ".", "complete_graph", "(", "nodes", ")", "yield", "G", "v", "=", "next", "(", "factory", ")", "while", "v", "in", "G", ":", "v", "=", "next", "(", "factory", ")", "nodes", ".", "append", "(", "v", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
load_cnf
Load a constraint satisfaction problem from a .cnf file. Args: fp (file, optional): `.write()`-supporting `file object`_ DIMACS CNF formatted_ file. Returns: :obj:`.ConstraintSatisfactionProblem` a binary-valued SAT problem. Examples: >>> import dwavebinarycsp as dbcsp ... >>> with open('test.cnf', 'r') as fp: # doctest: +SKIP ... csp = dbcsp.cnf.load_cnf(fp) .. _file object: https://docs.python.org/3/glossary.html#term-file-object .. _formatted: http://www.satcompetition.org/2009/format-benchmarks2009.html
dwavebinarycsp/io/cnf.py
def load_cnf(fp): """Load a constraint satisfaction problem from a .cnf file. Args: fp (file, optional): `.write()`-supporting `file object`_ DIMACS CNF formatted_ file. Returns: :obj:`.ConstraintSatisfactionProblem` a binary-valued SAT problem. Examples: >>> import dwavebinarycsp as dbcsp ... >>> with open('test.cnf', 'r') as fp: # doctest: +SKIP ... csp = dbcsp.cnf.load_cnf(fp) .. _file object: https://docs.python.org/3/glossary.html#term-file-object .. _formatted: http://www.satcompetition.org/2009/format-benchmarks2009.html """ fp = iter(fp) # handle lists/tuples/etc csp = ConstraintSatisfactionProblem(dimod.BINARY) # first look for the problem num_clauses = num_variables = 0 problem_pattern = re.compile(_PROBLEM_REGEX) for line in fp: matches = problem_pattern.findall(line) if matches: if len(matches) > 1: raise ValueError nv, nc = matches[0] num_variables, num_clauses = int(nv), int(nc) break # now parse the clauses, picking up where we left off looking for the header clause_pattern = re.compile(_CLAUSE_REGEX) for line in fp: if clause_pattern.match(line) is not None: clause = [int(v) for v in line.split(' ')[:-1]] # line ends with a trailing 0 # -1 is the notation for NOT(1) variables = [abs(v) for v in clause] f = _cnf_or(clause) csp.add_constraint(f, variables) for v in range(1, num_variables+1): csp.add_variable(v) for v in csp.variables: if v > num_variables: msg = ("given .cnf file's header defines variables [1, {}] and {} clauses " "but constraints a reference to variable {}").format(num_variables, num_clauses, v) raise ValueError(msg) if len(csp) != num_clauses: msg = ("given .cnf file's header defines {} " "clauses but the file contains {}").format(num_clauses, len(csp)) raise ValueError(msg) return csp
def load_cnf(fp): """Load a constraint satisfaction problem from a .cnf file. Args: fp (file, optional): `.write()`-supporting `file object`_ DIMACS CNF formatted_ file. Returns: :obj:`.ConstraintSatisfactionProblem` a binary-valued SAT problem. Examples: >>> import dwavebinarycsp as dbcsp ... >>> with open('test.cnf', 'r') as fp: # doctest: +SKIP ... csp = dbcsp.cnf.load_cnf(fp) .. _file object: https://docs.python.org/3/glossary.html#term-file-object .. _formatted: http://www.satcompetition.org/2009/format-benchmarks2009.html """ fp = iter(fp) # handle lists/tuples/etc csp = ConstraintSatisfactionProblem(dimod.BINARY) # first look for the problem num_clauses = num_variables = 0 problem_pattern = re.compile(_PROBLEM_REGEX) for line in fp: matches = problem_pattern.findall(line) if matches: if len(matches) > 1: raise ValueError nv, nc = matches[0] num_variables, num_clauses = int(nv), int(nc) break # now parse the clauses, picking up where we left off looking for the header clause_pattern = re.compile(_CLAUSE_REGEX) for line in fp: if clause_pattern.match(line) is not None: clause = [int(v) for v in line.split(' ')[:-1]] # line ends with a trailing 0 # -1 is the notation for NOT(1) variables = [abs(v) for v in clause] f = _cnf_or(clause) csp.add_constraint(f, variables) for v in range(1, num_variables+1): csp.add_variable(v) for v in csp.variables: if v > num_variables: msg = ("given .cnf file's header defines variables [1, {}] and {} clauses " "but constraints a reference to variable {}").format(num_variables, num_clauses, v) raise ValueError(msg) if len(csp) != num_clauses: msg = ("given .cnf file's header defines {} " "clauses but the file contains {}").format(num_clauses, len(csp)) raise ValueError(msg) return csp
[ "Load", "a", "constraint", "satisfaction", "problem", "from", "a", ".", "cnf", "file", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/io/cnf.py#L29-L95
[ "def", "load_cnf", "(", "fp", ")", ":", "fp", "=", "iter", "(", "fp", ")", "# handle lists/tuples/etc", "csp", "=", "ConstraintSatisfactionProblem", "(", "dimod", ".", "BINARY", ")", "# first look for the problem", "num_clauses", "=", "num_variables", "=", "0", "problem_pattern", "=", "re", ".", "compile", "(", "_PROBLEM_REGEX", ")", "for", "line", "in", "fp", ":", "matches", "=", "problem_pattern", ".", "findall", "(", "line", ")", "if", "matches", ":", "if", "len", "(", "matches", ")", ">", "1", ":", "raise", "ValueError", "nv", ",", "nc", "=", "matches", "[", "0", "]", "num_variables", ",", "num_clauses", "=", "int", "(", "nv", ")", ",", "int", "(", "nc", ")", "break", "# now parse the clauses, picking up where we left off looking for the header", "clause_pattern", "=", "re", ".", "compile", "(", "_CLAUSE_REGEX", ")", "for", "line", "in", "fp", ":", "if", "clause_pattern", ".", "match", "(", "line", ")", "is", "not", "None", ":", "clause", "=", "[", "int", "(", "v", ")", "for", "v", "in", "line", ".", "split", "(", "' '", ")", "[", ":", "-", "1", "]", "]", "# line ends with a trailing 0", "# -1 is the notation for NOT(1)", "variables", "=", "[", "abs", "(", "v", ")", "for", "v", "in", "clause", "]", "f", "=", "_cnf_or", "(", "clause", ")", "csp", ".", "add_constraint", "(", "f", ",", "variables", ")", "for", "v", "in", "range", "(", "1", ",", "num_variables", "+", "1", ")", ":", "csp", ".", "add_variable", "(", "v", ")", "for", "v", "in", "csp", ".", "variables", ":", "if", "v", ">", "num_variables", ":", "msg", "=", "(", "\"given .cnf file's header defines variables [1, {}] and {} clauses \"", "\"but constraints a reference to variable {}\"", ")", ".", "format", "(", "num_variables", ",", "num_clauses", ",", "v", ")", "raise", "ValueError", "(", "msg", ")", "if", "len", "(", "csp", ")", "!=", "num_clauses", ":", "msg", "=", "(", "\"given .cnf file's header defines {} \"", "\"clauses but the file contains {}\"", ")", ".", "format", "(", "num_clauses", ",", "len", "(", "csp", ")", ")", "raise", "ValueError", "(", "msg", ")", "return", "csp" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
and_gate
AND gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='AND'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an AND gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'], name='AND1')) >>> csp.check({'a': 1, 'b': 0, 'c': 0}) True
dwavebinarycsp/factories/constraint/gates.py
def and_gate(variables, vartype=dimod.BINARY, name='AND'): """AND gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='AND'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an AND gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'], name='AND1')) >>> csp.check({'a': 1, 'b': 0, 'c': 0}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configurations = frozenset([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 1)]) def func(in1, in2, out): return (in1 and in2) == out else: # SPIN, vartype is checked by the decorator configurations = frozenset([(-1, -1, -1), (-1, +1, -1), (+1, -1, -1), (+1, +1, +1)]) def func(in1, in2, out): return ((in1 > 0) and (in2 > 0)) == (out > 0) return Constraint(func, configurations, variables, vartype=vartype, name=name)
def and_gate(variables, vartype=dimod.BINARY, name='AND'): """AND gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='AND'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an AND gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'], name='AND1')) >>> csp.check({'a': 1, 'b': 0, 'c': 0}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configurations = frozenset([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 1)]) def func(in1, in2, out): return (in1 and in2) == out else: # SPIN, vartype is checked by the decorator configurations = frozenset([(-1, -1, -1), (-1, +1, -1), (+1, -1, -1), (+1, +1, +1)]) def func(in1, in2, out): return ((in1 > 0) and (in2 > 0)) == (out > 0) return Constraint(func, configurations, variables, vartype=vartype, name=name)
[ "AND", "gate", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/factories/constraint/gates.py#L29-L74
[ "def", "and_gate", "(", "variables", ",", "vartype", "=", "dimod", ".", "BINARY", ",", "name", "=", "'AND'", ")", ":", "variables", "=", "tuple", "(", "variables", ")", "if", "vartype", "is", "dimod", ".", "BINARY", ":", "configurations", "=", "frozenset", "(", "[", "(", "0", ",", "0", ",", "0", ")", ",", "(", "0", ",", "1", ",", "0", ")", ",", "(", "1", ",", "0", ",", "0", ")", ",", "(", "1", ",", "1", ",", "1", ")", "]", ")", "def", "func", "(", "in1", ",", "in2", ",", "out", ")", ":", "return", "(", "in1", "and", "in2", ")", "==", "out", "else", ":", "# SPIN, vartype is checked by the decorator", "configurations", "=", "frozenset", "(", "[", "(", "-", "1", ",", "-", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "+", "1", ",", "-", "1", ",", "-", "1", ")", ",", "(", "+", "1", ",", "+", "1", ",", "+", "1", ")", "]", ")", "def", "func", "(", "in1", ",", "in2", ",", "out", ")", ":", "return", "(", "(", "in1", ">", "0", ")", "and", "(", "in2", ">", "0", ")", ")", "==", "(", "out", ">", "0", ")", "return", "Constraint", "(", "func", ",", "configurations", ",", "variables", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
xor_gate
XOR gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='XOR'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an XOR gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.xor_gate(['x', 'y', 'z'], name='XOR1')) >>> csp.check({'x': 1, 'y': 1, 'z': 1}) False
dwavebinarycsp/factories/constraint/gates.py
def xor_gate(variables, vartype=dimod.BINARY, name='XOR'): """XOR gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='XOR'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an XOR gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.xor_gate(['x', 'y', 'z'], name='XOR1')) >>> csp.check({'x': 1, 'y': 1, 'z': 1}) False """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)]) def func(in1, in2, out): return (in1 != in2) == out else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1), (-1, +1, +1), (+1, -1, +1), (+1, +1, -1)]) def func(in1, in2, out): return ((in1 > 0) != (in2 > 0)) == (out > 0) return Constraint(func, configs, variables, vartype=vartype, name=name)
def xor_gate(variables, vartype=dimod.BINARY, name='XOR'): """XOR gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='XOR'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an XOR gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.xor_gate(['x', 'y', 'z'], name='XOR1')) >>> csp.check({'x': 1, 'y': 1, 'z': 1}) False """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)]) def func(in1, in2, out): return (in1 != in2) == out else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1), (-1, +1, +1), (+1, -1, +1), (+1, +1, -1)]) def func(in1, in2, out): return ((in1 > 0) != (in2 > 0)) == (out > 0) return Constraint(func, configs, variables, vartype=vartype, name=name)
[ "XOR", "gate", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/factories/constraint/gates.py#L127-L171
[ "def", "xor_gate", "(", "variables", ",", "vartype", "=", "dimod", ".", "BINARY", ",", "name", "=", "'XOR'", ")", ":", "variables", "=", "tuple", "(", "variables", ")", "if", "vartype", "is", "dimod", ".", "BINARY", ":", "configs", "=", "frozenset", "(", "[", "(", "0", ",", "0", ",", "0", ")", ",", "(", "0", ",", "1", ",", "1", ")", ",", "(", "1", ",", "0", ",", "1", ")", ",", "(", "1", ",", "1", ",", "0", ")", "]", ")", "def", "func", "(", "in1", ",", "in2", ",", "out", ")", ":", "return", "(", "in1", "!=", "in2", ")", "==", "out", "else", ":", "# SPIN, vartype is checked by the decorator", "configs", "=", "frozenset", "(", "[", "(", "-", "1", ",", "-", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "+", "1", ",", "+", "1", ")", ",", "(", "+", "1", ",", "-", "1", ",", "+", "1", ")", ",", "(", "+", "1", ",", "+", "1", ",", "-", "1", ")", "]", ")", "def", "func", "(", "in1", ",", "in2", ",", "out", ")", ":", "return", "(", "(", "in1", ">", "0", ")", "!=", "(", "in2", ">", "0", ")", ")", "==", "(", "out", ">", "0", ")", "return", "Constraint", "(", "func", ",", "configs", ",", "variables", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
halfadder_gate
Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='HALF_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean half adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.halfadder_gate(['a', 'b', 'total', 'carry'], name='HA1')) >>> csp.check({'a': 1, 'b': 1, 'total': 0, 'carry': 1}) True
dwavebinarycsp/factories/constraint/gates.py
def halfadder_gate(variables, vartype=dimod.BINARY, name='HALF_ADDER'): """Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='HALF_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean half adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.halfadder_gate(['a', 'b', 'total', 'carry'], name='HA1')) >>> csp.check({'a': 1, 'b': 1, 'total': 0, 'carry': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1), (-1, +1, +1, -1), (+1, -1, +1, -1), (+1, +1, -1, +1)]) def func(augend, addend, sum_, carry): total = (augend > 0) + (addend > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
def halfadder_gate(variables, vartype=dimod.BINARY, name='HALF_ADDER'): """Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='HALF_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean half adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.halfadder_gate(['a', 'b', 'total', 'carry'], name='HA1')) >>> csp.check({'a': 1, 'b': 1, 'total': 0, 'carry': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1), (-1, +1, +1, -1), (+1, -1, +1, -1), (+1, +1, -1, +1)]) def func(augend, addend, sum_, carry): total = (augend > 0) + (addend > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
[ "Half", "adder", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/factories/constraint/gates.py#L175-L229
[ "def", "halfadder_gate", "(", "variables", ",", "vartype", "=", "dimod", ".", "BINARY", ",", "name", "=", "'HALF_ADDER'", ")", ":", "variables", "=", "tuple", "(", "variables", ")", "if", "vartype", "is", "dimod", ".", "BINARY", ":", "configs", "=", "frozenset", "(", "[", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "(", "0", ",", "1", ",", "1", ",", "0", ")", ",", "(", "1", ",", "0", ",", "1", ",", "0", ")", ",", "(", "1", ",", "1", ",", "0", ",", "1", ")", "]", ")", "else", ":", "# SPIN, vartype is checked by the decorator", "configs", "=", "frozenset", "(", "[", "(", "-", "1", ",", "-", "1", ",", "-", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "+", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "+", "1", ",", "-", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "+", "1", ",", "+", "1", ",", "-", "1", ",", "+", "1", ")", "]", ")", "def", "func", "(", "augend", ",", "addend", ",", "sum_", ",", "carry", ")", ":", "total", "=", "(", "augend", ">", "0", ")", "+", "(", "addend", ">", "0", ")", "if", "total", "==", "0", ":", "return", "(", "sum_", "<=", "0", ")", "and", "(", "carry", "<=", "0", ")", "elif", "total", "==", "1", ":", "return", "(", "sum_", ">", "0", ")", "and", "(", "carry", "<=", "0", ")", "elif", "total", "==", "2", ":", "return", "(", "sum_", "<=", "0", ")", "and", "(", "carry", ">", "0", ")", "else", ":", "raise", "ValueError", "(", "\"func recieved unexpected values\"", ")", "return", "Constraint", "(", "func", ",", "configs", ",", "variables", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2
valid
fulladder_gate
Full adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, in3, sum, carry]`, where `in1, in2, in3` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='FULL_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean full adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.fulladder_gate(['a', 'b', 'c_in', 'total', 'c_out'], name='FA1')) >>> csp.check({'a': 1, 'b': 0, 'c_in': 1, 'total': 0, 'c_out': 1}) True
dwavebinarycsp/factories/constraint/gates.py
def fulladder_gate(variables, vartype=dimod.BINARY, name='FULL_ADDER'): """Full adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, in3, sum, carry]`, where `in1, in2, in3` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='FULL_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean full adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.fulladder_gate(['a', 'b', 'c_in', 'total', 'c_out'], name='FA1')) >>> csp.check({'a': 1, 'b': 0, 'c_in': 1, 'total': 0, 'c_out': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0, 0), (0, 0, 1, 1, 0), (0, 1, 0, 1, 0), (0, 1, 1, 0, 1), (1, 0, 0, 1, 0), (1, 0, 1, 0, 1), (1, 1, 0, 0, 1), (1, 1, 1, 1, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1, -1), (-1, -1, +1, +1, -1), (-1, +1, -1, +1, -1), (-1, +1, +1, -1, +1), (+1, -1, -1, +1, -1), (+1, -1, +1, -1, +1), (+1, +1, -1, -1, +1), (+1, +1, +1, +1, +1)]) def func(in1, in2, in3, sum_, carry): total = (in1 > 0) + (in2 > 0) + (in3 > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) elif total == 3: return (sum_ > 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
def fulladder_gate(variables, vartype=dimod.BINARY, name='FULL_ADDER'): """Full adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, in3, sum, carry]`, where `in1, in2, in3` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='FULL_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean full adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.fulladder_gate(['a', 'b', 'c_in', 'total', 'c_out'], name='FA1')) >>> csp.check({'a': 1, 'b': 0, 'c_in': 1, 'total': 0, 'c_out': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0, 0), (0, 0, 1, 1, 0), (0, 1, 0, 1, 0), (0, 1, 1, 0, 1), (1, 0, 0, 1, 0), (1, 0, 1, 0, 1), (1, 1, 0, 0, 1), (1, 1, 1, 1, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1, -1), (-1, -1, +1, +1, -1), (-1, +1, -1, +1, -1), (-1, +1, +1, -1, +1), (+1, -1, -1, +1, -1), (+1, -1, +1, -1, +1), (+1, +1, -1, -1, +1), (+1, +1, +1, +1, +1)]) def func(in1, in2, in3, sum_, carry): total = (in1 > 0) + (in2 > 0) + (in3 > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) elif total == 3: return (sum_ > 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
[ "Full", "adder", "." ]
dwavesystems/dwavebinarycsp
python
https://github.com/dwavesystems/dwavebinarycsp/blob/d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2/dwavebinarycsp/factories/constraint/gates.py#L233-L297
[ "def", "fulladder_gate", "(", "variables", ",", "vartype", "=", "dimod", ".", "BINARY", ",", "name", "=", "'FULL_ADDER'", ")", ":", "variables", "=", "tuple", "(", "variables", ")", "if", "vartype", "is", "dimod", ".", "BINARY", ":", "configs", "=", "frozenset", "(", "[", "(", "0", ",", "0", ",", "0", ",", "0", ",", "0", ")", ",", "(", "0", ",", "0", ",", "1", ",", "1", ",", "0", ")", ",", "(", "0", ",", "1", ",", "0", ",", "1", ",", "0", ")", ",", "(", "0", ",", "1", ",", "1", ",", "0", ",", "1", ")", ",", "(", "1", ",", "0", ",", "0", ",", "1", ",", "0", ")", ",", "(", "1", ",", "0", ",", "1", ",", "0", ",", "1", ")", ",", "(", "1", ",", "1", ",", "0", ",", "0", ",", "1", ")", ",", "(", "1", ",", "1", ",", "1", ",", "1", ",", "1", ")", "]", ")", "else", ":", "# SPIN, vartype is checked by the decorator", "configs", "=", "frozenset", "(", "[", "(", "-", "1", ",", "-", "1", ",", "-", "1", ",", "-", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "-", "1", ",", "+", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "+", "1", ",", "-", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "+", "1", ",", "+", "1", ",", "-", "1", ",", "+", "1", ")", ",", "(", "+", "1", ",", "-", "1", ",", "-", "1", ",", "+", "1", ",", "-", "1", ")", ",", "(", "+", "1", ",", "-", "1", ",", "+", "1", ",", "-", "1", ",", "+", "1", ")", ",", "(", "+", "1", ",", "+", "1", ",", "-", "1", ",", "-", "1", ",", "+", "1", ")", ",", "(", "+", "1", ",", "+", "1", ",", "+", "1", ",", "+", "1", ",", "+", "1", ")", "]", ")", "def", "func", "(", "in1", ",", "in2", ",", "in3", ",", "sum_", ",", "carry", ")", ":", "total", "=", "(", "in1", ">", "0", ")", "+", "(", "in2", ">", "0", ")", "+", "(", "in3", ">", "0", ")", "if", "total", "==", "0", ":", "return", "(", "sum_", "<=", "0", ")", "and", "(", "carry", "<=", "0", ")", "elif", "total", "==", "1", ":", "return", "(", "sum_", ">", "0", ")", "and", "(", "carry", "<=", "0", ")", "elif", "total", "==", "2", ":", "return", "(", "sum_", "<=", "0", ")", "and", "(", "carry", ">", "0", ")", "elif", "total", "==", "3", ":", "return", "(", "sum_", ">", "0", ")", "and", "(", "carry", ">", "0", ")", "else", ":", "raise", "ValueError", "(", "\"func recieved unexpected values\"", ")", "return", "Constraint", "(", "func", ",", "configs", ",", "variables", ",", "vartype", "=", "vartype", ",", "name", "=", "name", ")" ]
d6b1e70ceaa8f451d7afaa87ea10c7fc948a64e2