partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
RelationsGraph.connexity
A boolean matrix, m[i, j] == True if there is a relation term(i) -> term(j) :return: a np.matrix (len(dictionary), len(dictionary)) of boolean
ieml/dictionary/relations.py
def connexity(self): """ A boolean matrix, m[i, j] == True if there is a relation term(i) -> term(j) :return: a np.matrix (len(dictionary), len(dictionary)) of boolean """ return np.matrix(sum(self.relations.values()).todense(), dtype=bool)
def connexity(self): """ A boolean matrix, m[i, j] == True if there is a relation term(i) -> term(j) :return: a np.matrix (len(dictionary), len(dictionary)) of boolean """ return np.matrix(sum(self.relations.values()).todense(), dtype=bool)
[ "A", "boolean", "matrix", "m", "[", "i", "j", "]", "==", "True", "if", "there", "is", "a", "relation", "term", "(", "i", ")", "-", ">", "term", "(", "j", ")", ":", "return", ":", "a", "np", ".", "matrix", "(", "len", "(", "dictionary", ")", "len", "(", "dictionary", "))", "of", "boolean" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/relations.py#L96-L101
[ "def", "connexity", "(", "self", ")", ":", "return", "np", ".", "matrix", "(", "sum", "(", "self", ".", "relations", ".", "values", "(", ")", ")", ".", "todense", "(", ")", ",", "dtype", "=", "bool", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
_resolve_path
path is a mul of coord or a coord
ieml/grammar/paths/tools.py
def _resolve_path(obj, path): """path is a mul of coord or a coord""" if obj.__class__ not in path.context.accept: result = set() for ctx in path.context.accept: result |= {e for u in obj[ctx] for e in _resolve_path(u, path)} return result if isinstance(obj, Text): if path.index is not None: return {obj.children[path.index]} return set(obj.children) if isinstance(obj, (Fact, Theory)): return _resolve_path_tree_graph(obj.tree_graph, path) if isinstance(obj, Topic): if path.kind == 'r': if path.index is not None: return {obj.root[path.index]} return set(obj.root) else: if path.index is not None: return {obj.flexing[path.index]} return set(obj.flexing)
def _resolve_path(obj, path): """path is a mul of coord or a coord""" if obj.__class__ not in path.context.accept: result = set() for ctx in path.context.accept: result |= {e for u in obj[ctx] for e in _resolve_path(u, path)} return result if isinstance(obj, Text): if path.index is not None: return {obj.children[path.index]} return set(obj.children) if isinstance(obj, (Fact, Theory)): return _resolve_path_tree_graph(obj.tree_graph, path) if isinstance(obj, Topic): if path.kind == 'r': if path.index is not None: return {obj.root[path.index]} return set(obj.root) else: if path.index is not None: return {obj.flexing[path.index]} return set(obj.flexing)
[ "path", "is", "a", "mul", "of", "coord", "or", "a", "coord" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/grammar/paths/tools.py#L77-L103
[ "def", "_resolve_path", "(", "obj", ",", "path", ")", ":", "if", "obj", ".", "__class__", "not", "in", "path", ".", "context", ".", "accept", ":", "result", "=", "set", "(", ")", "for", "ctx", "in", "path", ".", "context", ".", "accept", ":", "result", "|=", "{", "e", "for", "u", "in", "obj", "[", "ctx", "]", "for", "e", "in", "_resolve_path", "(", "u", ",", "path", ")", "}", "return", "result", "if", "isinstance", "(", "obj", ",", "Text", ")", ":", "if", "path", ".", "index", "is", "not", "None", ":", "return", "{", "obj", ".", "children", "[", "path", ".", "index", "]", "}", "return", "set", "(", "obj", ".", "children", ")", "if", "isinstance", "(", "obj", ",", "(", "Fact", ",", "Theory", ")", ")", ":", "return", "_resolve_path_tree_graph", "(", "obj", ".", "tree_graph", ",", "path", ")", "if", "isinstance", "(", "obj", ",", "Topic", ")", ":", "if", "path", ".", "kind", "==", "'r'", ":", "if", "path", ".", "index", "is", "not", "None", ":", "return", "{", "obj", ".", "root", "[", "path", ".", "index", "]", "}", "return", "set", "(", "obj", ".", "root", ")", "else", ":", "if", "path", ".", "index", "is", "not", "None", ":", "return", "{", "obj", ".", "flexing", "[", "path", ".", "index", "]", "}", "return", "set", "(", "obj", ".", "flexing", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
_resolve_ctx
Resolve the context of the rules (the type of this element), and building the ieml element. :param rules: :return:
ieml/grammar/paths/tools.py
def _resolve_ctx(rules): """ Resolve the context of the rules (the type of this element), and building the ieml element. :param rules: :return: """ if not rules: raise ResolveError("Missing node definition.") # if rules == [(None, e)] --> e if len(rules) == 1 and rules[0][0] is None: return rules[0][1] if any(r[0] is None for r in rules): raise ResolveError("Multiple definition, multiple ieml object provided for the same node.") if any(not isinstance(r[0], Path) for r in rules): raise ResolveError("Must have only path instance.") # resolve all the possible types for this element r0 = rules[0] types = _inferred_types(*r0) for r in rules[1:]: types = types.intersection(_inferred_types(*r)) if not types: raise ResolveError("No definition, no type inferred on rules list.") if len(types) > 1: raise ResolveError("Multiple definition, multiple type inferred on rules list.") type = next(types.__iter__()) if type == Topic: error, deps = _build_deps_topic(rules) if error: return flexing = None if deps['f']: flexing = deps['f'] if not deps['r']: raise ResolveError("No root for the topic node.") return topic(deps['r'], flexing) if type == Text: error, deps = _build_deps_text(rules) if error: return return text(deps) if type in (Theory, Fact): error, deps = _build_deps_tree_graph(rules) if error: return if type == Fact: clauses = [] for s, a, m in deps: clauses.append((s, a, m)) return fact(clauses) else: clauses = [] for s, a, m in deps: clauses.append((s, a, m)) return theory(clauses) raise ResolveError("Invalid type inferred %s"%type.__name__)
def _resolve_ctx(rules): """ Resolve the context of the rules (the type of this element), and building the ieml element. :param rules: :return: """ if not rules: raise ResolveError("Missing node definition.") # if rules == [(None, e)] --> e if len(rules) == 1 and rules[0][0] is None: return rules[0][1] if any(r[0] is None for r in rules): raise ResolveError("Multiple definition, multiple ieml object provided for the same node.") if any(not isinstance(r[0], Path) for r in rules): raise ResolveError("Must have only path instance.") # resolve all the possible types for this element r0 = rules[0] types = _inferred_types(*r0) for r in rules[1:]: types = types.intersection(_inferred_types(*r)) if not types: raise ResolveError("No definition, no type inferred on rules list.") if len(types) > 1: raise ResolveError("Multiple definition, multiple type inferred on rules list.") type = next(types.__iter__()) if type == Topic: error, deps = _build_deps_topic(rules) if error: return flexing = None if deps['f']: flexing = deps['f'] if not deps['r']: raise ResolveError("No root for the topic node.") return topic(deps['r'], flexing) if type == Text: error, deps = _build_deps_text(rules) if error: return return text(deps) if type in (Theory, Fact): error, deps = _build_deps_tree_graph(rules) if error: return if type == Fact: clauses = [] for s, a, m in deps: clauses.append((s, a, m)) return fact(clauses) else: clauses = [] for s, a, m in deps: clauses.append((s, a, m)) return theory(clauses) raise ResolveError("Invalid type inferred %s"%type.__name__)
[ "Resolve", "the", "context", "of", "the", "rules", "(", "the", "type", "of", "this", "element", ")", "and", "building", "the", "ieml", "element", ".", ":", "param", "rules", ":", ":", "return", ":" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/grammar/paths/tools.py#L464-L533
[ "def", "_resolve_ctx", "(", "rules", ")", ":", "if", "not", "rules", ":", "raise", "ResolveError", "(", "\"Missing node definition.\"", ")", "# if rules == [(None, e)] --> e", "if", "len", "(", "rules", ")", "==", "1", "and", "rules", "[", "0", "]", "[", "0", "]", "is", "None", ":", "return", "rules", "[", "0", "]", "[", "1", "]", "if", "any", "(", "r", "[", "0", "]", "is", "None", "for", "r", "in", "rules", ")", ":", "raise", "ResolveError", "(", "\"Multiple definition, multiple ieml object provided for the same node.\"", ")", "if", "any", "(", "not", "isinstance", "(", "r", "[", "0", "]", ",", "Path", ")", "for", "r", "in", "rules", ")", ":", "raise", "ResolveError", "(", "\"Must have only path instance.\"", ")", "# resolve all the possible types for this element", "r0", "=", "rules", "[", "0", "]", "types", "=", "_inferred_types", "(", "*", "r0", ")", "for", "r", "in", "rules", "[", "1", ":", "]", ":", "types", "=", "types", ".", "intersection", "(", "_inferred_types", "(", "*", "r", ")", ")", "if", "not", "types", ":", "raise", "ResolveError", "(", "\"No definition, no type inferred on rules list.\"", ")", "if", "len", "(", "types", ")", ">", "1", ":", "raise", "ResolveError", "(", "\"Multiple definition, multiple type inferred on rules list.\"", ")", "type", "=", "next", "(", "types", ".", "__iter__", "(", ")", ")", "if", "type", "==", "Topic", ":", "error", ",", "deps", "=", "_build_deps_topic", "(", "rules", ")", "if", "error", ":", "return", "flexing", "=", "None", "if", "deps", "[", "'f'", "]", ":", "flexing", "=", "deps", "[", "'f'", "]", "if", "not", "deps", "[", "'r'", "]", ":", "raise", "ResolveError", "(", "\"No root for the topic node.\"", ")", "return", "topic", "(", "deps", "[", "'r'", "]", ",", "flexing", ")", "if", "type", "==", "Text", ":", "error", ",", "deps", "=", "_build_deps_text", "(", "rules", ")", "if", "error", ":", "return", "return", "text", "(", "deps", ")", "if", "type", "in", "(", "Theory", ",", "Fact", ")", ":", "error", ",", "deps", "=", "_build_deps_tree_graph", "(", "rules", ")", "if", "error", ":", "return", "if", "type", "==", "Fact", ":", "clauses", "=", "[", "]", "for", "s", ",", "a", ",", "m", "in", "deps", ":", "clauses", ".", "append", "(", "(", "s", ",", "a", ",", "m", ")", ")", "return", "fact", "(", "clauses", ")", "else", ":", "clauses", "=", "[", "]", "for", "s", ",", "a", ",", "m", "in", "deps", ":", "clauses", ".", "append", "(", "(", "s", ",", "a", ",", "m", ")", ")", "return", "theory", "(", "clauses", ")", "raise", "ResolveError", "(", "\"Invalid type inferred %s\"", "%", "type", ".", "__name__", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
project_usls_on_dictionary
`usls` is an iterable of usl. return a mapping term -> usl list
ieml/distance/projection.py
def project_usls_on_dictionary(usls, allowed_terms=None): """`usls` is an iterable of usl. return a mapping term -> usl list """ cells_to_usls = defaultdict(set) tables = set() for u in usls: for t in u.objects(Term): for c in t.singular_sequences: # This is the first time we meet the cell c if not cells_to_usls[c]: tables.update(c.relations.contained) cells_to_usls[c].add(u) if allowed_terms: allowed_terms = set(allowed_terms) tables = tables & allowed_terms cells_to_usls = {c: l for c, l in cells_to_usls.items() if c in allowed_terms} tables_to_usls = { table: list(set(u for c in table.singular_sequences for u in cells_to_usls[c])) for table in tables if not isinstance(table, TableSet) } return tables_to_usls
def project_usls_on_dictionary(usls, allowed_terms=None): """`usls` is an iterable of usl. return a mapping term -> usl list """ cells_to_usls = defaultdict(set) tables = set() for u in usls: for t in u.objects(Term): for c in t.singular_sequences: # This is the first time we meet the cell c if not cells_to_usls[c]: tables.update(c.relations.contained) cells_to_usls[c].add(u) if allowed_terms: allowed_terms = set(allowed_terms) tables = tables & allowed_terms cells_to_usls = {c: l for c, l in cells_to_usls.items() if c in allowed_terms} tables_to_usls = { table: list(set(u for c in table.singular_sequences for u in cells_to_usls[c])) for table in tables if not isinstance(table, TableSet) } return tables_to_usls
[ "usls", "is", "an", "iterable", "of", "usl", "." ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/distance/projection.py#L25-L53
[ "def", "project_usls_on_dictionary", "(", "usls", ",", "allowed_terms", "=", "None", ")", ":", "cells_to_usls", "=", "defaultdict", "(", "set", ")", "tables", "=", "set", "(", ")", "for", "u", "in", "usls", ":", "for", "t", "in", "u", ".", "objects", "(", "Term", ")", ":", "for", "c", "in", "t", ".", "singular_sequences", ":", "# This is the first time we meet the cell c", "if", "not", "cells_to_usls", "[", "c", "]", ":", "tables", ".", "update", "(", "c", ".", "relations", ".", "contained", ")", "cells_to_usls", "[", "c", "]", ".", "add", "(", "u", ")", "if", "allowed_terms", ":", "allowed_terms", "=", "set", "(", "allowed_terms", ")", "tables", "=", "tables", "&", "allowed_terms", "cells_to_usls", "=", "{", "c", ":", "l", "for", "c", ",", "l", "in", "cells_to_usls", ".", "items", "(", ")", "if", "c", "in", "allowed_terms", "}", "tables_to_usls", "=", "{", "table", ":", "list", "(", "set", "(", "u", "for", "c", "in", "table", ".", "singular_sequences", "for", "u", "in", "cells_to_usls", "[", "c", "]", ")", ")", "for", "table", "in", "tables", "if", "not", "isinstance", "(", "table", ",", "TableSet", ")", "}", "return", "tables_to_usls" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
project_usl_with_data
usls_data: usl => data[] :param usls_data: :return:
ieml/distance/projection.py
def project_usl_with_data(usls_data, metric=None): """ usls_data: usl => data[] :param usls_data: :return: """ projection = project_usls_on_dictionary(usls_data) all_terms = set(c for u in usls_data for t in u.objects(Term) for c in t.singular_sequences) if metric is None: metric = lambda e: len(e['posts']) * len(all_terms.intersection(e['table'].singular_sequences)) return sorted(({ 'table': table, 'usls': usls, 'posts': list(set(chain.from_iterable(usls_data[u] for u in usls))) } for table, usls in projection.items()), key=metric, reverse=True)
def project_usl_with_data(usls_data, metric=None): """ usls_data: usl => data[] :param usls_data: :return: """ projection = project_usls_on_dictionary(usls_data) all_terms = set(c for u in usls_data for t in u.objects(Term) for c in t.singular_sequences) if metric is None: metric = lambda e: len(e['posts']) * len(all_terms.intersection(e['table'].singular_sequences)) return sorted(({ 'table': table, 'usls': usls, 'posts': list(set(chain.from_iterable(usls_data[u] for u in usls))) } for table, usls in projection.items()), key=metric, reverse=True)
[ "usls_data", ":", "usl", "=", ">", "data", "[]", ":", "param", "usls_data", ":", ":", "return", ":" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/distance/projection.py#L56-L72
[ "def", "project_usl_with_data", "(", "usls_data", ",", "metric", "=", "None", ")", ":", "projection", "=", "project_usls_on_dictionary", "(", "usls_data", ")", "all_terms", "=", "set", "(", "c", "for", "u", "in", "usls_data", "for", "t", "in", "u", ".", "objects", "(", "Term", ")", "for", "c", "in", "t", ".", "singular_sequences", ")", "if", "metric", "is", "None", ":", "metric", "=", "lambda", "e", ":", "len", "(", "e", "[", "'posts'", "]", ")", "*", "len", "(", "all_terms", ".", "intersection", "(", "e", "[", "'table'", "]", ".", "singular_sequences", ")", ")", "return", "sorted", "(", "(", "{", "'table'", ":", "table", ",", "'usls'", ":", "usls", ",", "'posts'", ":", "list", "(", "set", "(", "chain", ".", "from_iterable", "(", "usls_data", "[", "u", "]", "for", "u", "in", "usls", ")", ")", ")", "}", "for", "table", ",", "usls", "in", "projection", ".", "items", "(", ")", ")", ",", "key", "=", "metric", ",", "reverse", "=", "True", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
ScriptParser.p_script_lvl_0
script_lvl_0 : PRIMITIVE LAYER0_MARK | REMARKABLE_ADDITION LAYER0_MARK
ieml/dictionary/script/parser/parser.py
def p_script_lvl_0(self, p): """ script_lvl_0 : PRIMITIVE LAYER0_MARK | REMARKABLE_ADDITION LAYER0_MARK""" if p[1] == 'E': p[0] = NullScript(layer=0) elif p[1] in REMARKABLE_ADDITION: p[0] = AdditiveScript(character=p[1]) else: p[0] = MultiplicativeScript(character=p[1])
def p_script_lvl_0(self, p): """ script_lvl_0 : PRIMITIVE LAYER0_MARK | REMARKABLE_ADDITION LAYER0_MARK""" if p[1] == 'E': p[0] = NullScript(layer=0) elif p[1] in REMARKABLE_ADDITION: p[0] = AdditiveScript(character=p[1]) else: p[0] = MultiplicativeScript(character=p[1])
[ "script_lvl_0", ":", "PRIMITIVE", "LAYER0_MARK", "|", "REMARKABLE_ADDITION", "LAYER0_MARK" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/script/parser/parser.py#L68-L76
[ "def", "p_script_lvl_0", "(", "self", ",", "p", ")", ":", "if", "p", "[", "1", "]", "==", "'E'", ":", "p", "[", "0", "]", "=", "NullScript", "(", "layer", "=", "0", ")", "elif", "p", "[", "1", "]", "in", "REMARKABLE_ADDITION", ":", "p", "[", "0", "]", "=", "AdditiveScript", "(", "character", "=", "p", "[", "1", "]", ")", "else", ":", "p", "[", "0", "]", "=", "MultiplicativeScript", "(", "character", "=", "p", "[", "1", "]", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
ScriptParser.p_sum_lvl_0
sum_lvl_0 : script_lvl_0 | script_lvl_0 PLUS sum_lvl_0
ieml/dictionary/script/parser/parser.py
def p_sum_lvl_0(self, p): """ sum_lvl_0 : script_lvl_0 | script_lvl_0 PLUS sum_lvl_0""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] else: p[0] = [p[1]]
def p_sum_lvl_0(self, p): """ sum_lvl_0 : script_lvl_0 | script_lvl_0 PLUS sum_lvl_0""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] else: p[0] = [p[1]]
[ "sum_lvl_0", ":", "script_lvl_0", "|", "script_lvl_0", "PLUS", "sum_lvl_0" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/script/parser/parser.py#L83-L90
[ "def", "p_sum_lvl_0", "(", "self", ",", "p", ")", ":", "if", "len", "(", "p", ")", "==", "4", ":", "p", "[", "3", "]", ".", "append", "(", "p", "[", "1", "]", ")", "p", "[", "0", "]", "=", "p", "[", "3", "]", "else", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", "]" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
ScriptParser.p_script_lvl_1
script_lvl_1 : additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | REMARKABLE_MULTIPLICATION LAYER1_MARK
ieml/dictionary/script/parser/parser.py
def p_script_lvl_1(self, p): """ script_lvl_1 : additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | REMARKABLE_MULTIPLICATION LAYER1_MARK""" if isinstance(p[1], AdditiveScript): if len(p) == 3: p[0] = MultiplicativeScript(substance=p[1]) elif len(p) == 4: p[0] = MultiplicativeScript(substance=p[1], attribute=p[2]) else: p[0] = MultiplicativeScript(substance=p[1], attribute=p[2], mode=p[3]) else: p[0] = MultiplicativeScript(character=p[1])
def p_script_lvl_1(self, p): """ script_lvl_1 : additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | additive_script_lvl_0 additive_script_lvl_0 additive_script_lvl_0 LAYER1_MARK | REMARKABLE_MULTIPLICATION LAYER1_MARK""" if isinstance(p[1], AdditiveScript): if len(p) == 3: p[0] = MultiplicativeScript(substance=p[1]) elif len(p) == 4: p[0] = MultiplicativeScript(substance=p[1], attribute=p[2]) else: p[0] = MultiplicativeScript(substance=p[1], attribute=p[2], mode=p[3]) else: p[0] = MultiplicativeScript(character=p[1])
[ "script_lvl_1", ":", "additive_script_lvl_0", "LAYER1_MARK", "|", "additive_script_lvl_0", "additive_script_lvl_0", "LAYER1_MARK", "|", "additive_script_lvl_0", "additive_script_lvl_0", "additive_script_lvl_0", "LAYER1_MARK", "|", "REMARKABLE_MULTIPLICATION", "LAYER1_MARK" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/script/parser/parser.py#L92-L108
[ "def", "p_script_lvl_1", "(", "self", ",", "p", ")", ":", "if", "isinstance", "(", "p", "[", "1", "]", ",", "AdditiveScript", ")", ":", "if", "len", "(", "p", ")", "==", "3", ":", "p", "[", "0", "]", "=", "MultiplicativeScript", "(", "substance", "=", "p", "[", "1", "]", ")", "elif", "len", "(", "p", ")", "==", "4", ":", "p", "[", "0", "]", "=", "MultiplicativeScript", "(", "substance", "=", "p", "[", "1", "]", ",", "attribute", "=", "p", "[", "2", "]", ")", "else", ":", "p", "[", "0", "]", "=", "MultiplicativeScript", "(", "substance", "=", "p", "[", "1", "]", ",", "attribute", "=", "p", "[", "2", "]", ",", "mode", "=", "p", "[", "3", "]", ")", "else", ":", "p", "[", "0", "]", "=", "MultiplicativeScript", "(", "character", "=", "p", "[", "1", "]", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
ScriptParser.p_sum_lvl_1
sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1
ieml/dictionary/script/parser/parser.py
def p_sum_lvl_1(self, p): """ sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] else: p[0] = [p[1]]
def p_sum_lvl_1(self, p): """ sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] else: p[0] = [p[1]]
[ "sum_lvl_1", ":", "script_lvl_1", "|", "script_lvl_1", "PLUS", "sum_lvl_1" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/script/parser/parser.py#L110-L117
[ "def", "p_sum_lvl_1", "(", "self", ",", "p", ")", ":", "if", "len", "(", "p", ")", "==", "4", ":", "p", "[", "3", "]", ".", "append", "(", "p", "[", "1", "]", ")", "p", "[", "0", "]", "=", "p", "[", "3", "]", "else", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", "]" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
square_order_matrix
Compute the ordering of a list of usls from each usl and return the matrix m s.t. for each u in usl_list at index i, [usl_list[j] for j in m[i, :]] is the list sorted by proximity from u. of the result :param usl_list: a list of usls :return: a (len(usl_list), len(usl_list)) np.array
ieml/distance/sort.py
def square_order_matrix(usl_list): """ Compute the ordering of a list of usls from each usl and return the matrix m s.t. for each u in usl_list at index i, [usl_list[j] for j in m[i, :]] is the list sorted by proximity from u. of the result :param usl_list: a list of usls :return: a (len(usl_list), len(usl_list)) np.array """ usl_list = list(usl_list) indexes = { u: i for i, u in enumerate(usl_list) } order_mat = np.zeros(shape=(len(usl_list), len(usl_list)), dtype=int) for u in usl_list: sorted_list = QuerySort(u).sort(collection=usl_list) for i, u_s in enumerate(sorted_list): order_mat[indexes[u], indexes[u_s]] = i return order_mat
def square_order_matrix(usl_list): """ Compute the ordering of a list of usls from each usl and return the matrix m s.t. for each u in usl_list at index i, [usl_list[j] for j in m[i, :]] is the list sorted by proximity from u. of the result :param usl_list: a list of usls :return: a (len(usl_list), len(usl_list)) np.array """ usl_list = list(usl_list) indexes = { u: i for i, u in enumerate(usl_list) } order_mat = np.zeros(shape=(len(usl_list), len(usl_list)), dtype=int) for u in usl_list: sorted_list = QuerySort(u).sort(collection=usl_list) for i, u_s in enumerate(sorted_list): order_mat[indexes[u], indexes[u_s]] = i return order_mat
[ "Compute", "the", "ordering", "of", "a", "list", "of", "usls", "from", "each", "usl", "and", "return", "the", "matrix", "m", "s", ".", "t", ".", "for", "each", "u", "in", "usl_list", "at", "index", "i", "[", "usl_list", "[", "j", "]", "for", "j", "in", "m", "[", "i", ":", "]]", "is", "the", "list", "sorted", "by", "proximity", "from", "u", ".", "of", "the", "result", ":", "param", "usl_list", ":", "a", "list", "of", "usls", ":", "return", ":", "a", "(", "len", "(", "usl_list", ")", "len", "(", "usl_list", "))", "np", ".", "array" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/distance/sort.py#L5-L27
[ "def", "square_order_matrix", "(", "usl_list", ")", ":", "usl_list", "=", "list", "(", "usl_list", ")", "indexes", "=", "{", "u", ":", "i", "for", "i", ",", "u", "in", "enumerate", "(", "usl_list", ")", "}", "order_mat", "=", "np", ".", "zeros", "(", "shape", "=", "(", "len", "(", "usl_list", ")", ",", "len", "(", "usl_list", ")", ")", ",", "dtype", "=", "int", ")", "for", "u", "in", "usl_list", ":", "sorted_list", "=", "QuerySort", "(", "u", ")", ".", "sort", "(", "collection", "=", "usl_list", ")", "for", "i", ",", "u_s", "in", "enumerate", "(", "sorted_list", ")", ":", "order_mat", "[", "indexes", "[", "u", "]", ",", "indexes", "[", "u_s", "]", "]", "=", "i", "return", "order_mat" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
TableSet.accept_script
True when the term is a subset of this term tables. If the parent of this term is already a TableSet,return always false (only one main tableset) :param term: :return:
ieml/dictionary/table.py
def accept_script(self, script): """ True when the term is a subset of this term tables. If the parent of this term is already a TableSet,return always false (only one main tableset) :param term: :return: """ if isinstance(self.parent, TableSet): return False, False tables = [table for table in self.script.tables_script if table in script] if len(tables) >= 1 and {ss for t in tables for ss in t.singular_sequences} == set(script.singular_sequences): return True, False return False, False
def accept_script(self, script): """ True when the term is a subset of this term tables. If the parent of this term is already a TableSet,return always false (only one main tableset) :param term: :return: """ if isinstance(self.parent, TableSet): return False, False tables = [table for table in self.script.tables_script if table in script] if len(tables) >= 1 and {ss for t in tables for ss in t.singular_sequences} == set(script.singular_sequences): return True, False return False, False
[ "True", "when", "the", "term", "is", "a", "subset", "of", "this", "term", "tables", ".", "If", "the", "parent", "of", "this", "term", "is", "already", "a", "TableSet", "return", "always", "false", "(", "only", "one", "main", "tableset", ")", ":", "param", "term", ":", ":", "return", ":" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/dictionary/table.py#L208-L223
[ "def", "accept_script", "(", "self", ",", "script", ")", ":", "if", "isinstance", "(", "self", ".", "parent", ",", "TableSet", ")", ":", "return", "False", ",", "False", "tables", "=", "[", "table", "for", "table", "in", "self", ".", "script", ".", "tables_script", "if", "table", "in", "script", "]", "if", "len", "(", "tables", ")", ">=", "1", "and", "{", "ss", "for", "t", "in", "tables", "for", "ss", "in", "t", ".", "singular_sequences", "}", "==", "set", "(", "script", ".", "singular_sequences", ")", ":", "return", "True", ",", "False", "return", "False", ",", "False" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
RandomPoolIEMLObjectGenerator._build_pools
Slow method, retrieve all the terms from the database. :return:
ieml/tools.py
def _build_pools(self): """ Slow method, retrieve all the terms from the database. :return: """ if self.level >= Topic: # words self.topics_pool = set(self.topic() for i in range(self.pool_size)) if self.level >= Fact: # sentences self.facts_pool = set(self.fact() for i in range(self.pool_size)) if self.level >= Theory: self.theories_pool = set(self.theory() for i in range(self.pool_size)) if self.level >= Text: self.propositions_pool = set(chain.from_iterable((self.topics_pool, self.facts_pool, self.theories_pool)))
def _build_pools(self): """ Slow method, retrieve all the terms from the database. :return: """ if self.level >= Topic: # words self.topics_pool = set(self.topic() for i in range(self.pool_size)) if self.level >= Fact: # sentences self.facts_pool = set(self.fact() for i in range(self.pool_size)) if self.level >= Theory: self.theories_pool = set(self.theory() for i in range(self.pool_size)) if self.level >= Text: self.propositions_pool = set(chain.from_iterable((self.topics_pool, self.facts_pool, self.theories_pool)))
[ "Slow", "method", "retrieve", "all", "the", "terms", "from", "the", "database", ".", ":", "return", ":" ]
IEMLdev/ieml
python
https://github.com/IEMLdev/ieml/blob/4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25/ieml/tools.py#L59-L76
[ "def", "_build_pools", "(", "self", ")", ":", "if", "self", ".", "level", ">=", "Topic", ":", "# words", "self", ".", "topics_pool", "=", "set", "(", "self", ".", "topic", "(", ")", "for", "i", "in", "range", "(", "self", ".", "pool_size", ")", ")", "if", "self", ".", "level", ">=", "Fact", ":", "# sentences", "self", ".", "facts_pool", "=", "set", "(", "self", ".", "fact", "(", ")", "for", "i", "in", "range", "(", "self", ".", "pool_size", ")", ")", "if", "self", ".", "level", ">=", "Theory", ":", "self", ".", "theories_pool", "=", "set", "(", "self", ".", "theory", "(", ")", "for", "i", "in", "range", "(", "self", ".", "pool_size", ")", ")", "if", "self", ".", "level", ">=", "Text", ":", "self", ".", "propositions_pool", "=", "set", "(", "chain", ".", "from_iterable", "(", "(", "self", ".", "topics_pool", ",", "self", ".", "facts_pool", ",", "self", ".", "theories_pool", ")", ")", ")" ]
4c842ba7e6165e2f1b4a4e2e98759f9f33af5f25
test
Histogram.mean
Returns the mean value.
metrology/instruments/histogram.py
def mean(self): """Returns the mean value.""" if self.counter.value > 0: return self.sum.value / self.counter.value return 0.0
def mean(self): """Returns the mean value.""" if self.counter.value > 0: return self.sum.value / self.counter.value return 0.0
[ "Returns", "the", "mean", "value", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/instruments/histogram.py#L92-L96
[ "def", "mean", "(", "self", ")", ":", "if", "self", ".", "counter", ".", "value", ">", "0", ":", "return", "self", ".", "sum", ".", "value", "/", "self", ".", "counter", ".", "value", "return", "0.0" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
Histogram.variance
Returns variance
metrology/instruments/histogram.py
def variance(self): """Returns variance""" if self.counter.value <= 1: return 0.0 return self.var.value[1] / (self.counter.value - 1)
def variance(self): """Returns variance""" if self.counter.value <= 1: return 0.0 return self.var.value[1] / (self.counter.value - 1)
[ "Returns", "variance" ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/instruments/histogram.py#L106-L110
[ "def", "variance", "(", "self", ")", ":", "if", "self", ".", "counter", ".", "value", "<=", "1", ":", "return", "0.0", "return", "self", ".", "var", ".", "value", "[", "1", "]", "/", "(", "self", ".", "counter", ".", "value", "-", "1", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
Meter.mark
Record an event with the meter. By default it will record one event. :param value: number of event to record
metrology/instruments/meter.py
def mark(self, value=1): """Record an event with the meter. By default it will record one event. :param value: number of event to record """ self.counter += value self.m1_rate.update(value) self.m5_rate.update(value) self.m15_rate.update(value)
def mark(self, value=1): """Record an event with the meter. By default it will record one event. :param value: number of event to record """ self.counter += value self.m1_rate.update(value) self.m5_rate.update(value) self.m15_rate.update(value)
[ "Record", "an", "event", "with", "the", "meter", ".", "By", "default", "it", "will", "record", "one", "event", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/instruments/meter.py#L78-L86
[ "def", "mark", "(", "self", ",", "value", "=", "1", ")", ":", "self", ".", "counter", "+=", "value", "self", ".", "m1_rate", ".", "update", "(", "value", ")", "self", ".", "m5_rate", ".", "update", "(", "value", ")", "self", ".", "m15_rate", ".", "update", "(", "value", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
Meter.mean_rate
Returns the mean rate of the events since the start of the process.
metrology/instruments/meter.py
def mean_rate(self): """ Returns the mean rate of the events since the start of the process. """ if self.counter.value == 0: return 0.0 else: elapsed = time() - self.start_time return self.counter.value / elapsed
def mean_rate(self): """ Returns the mean rate of the events since the start of the process. """ if self.counter.value == 0: return 0.0 else: elapsed = time() - self.start_time return self.counter.value / elapsed
[ "Returns", "the", "mean", "rate", "of", "the", "events", "since", "the", "start", "of", "the", "process", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/instruments/meter.py#L112-L120
[ "def", "mean_rate", "(", "self", ")", ":", "if", "self", ".", "counter", ".", "value", "==", "0", ":", "return", "0.0", "else", ":", "elapsed", "=", "time", "(", ")", "-", "self", ".", "start_time", "return", "self", ".", "counter", ".", "value", "/", "elapsed" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
Derive.mark
Record an event with the derive. :param value: counter value to record
metrology/instruments/derive.py
def mark(self, value=1): """Record an event with the derive. :param value: counter value to record """ last = self.last.get_and_set(value) if last <= value: value = value - last super(Derive, self).mark(value)
def mark(self, value=1): """Record an event with the derive. :param value: counter value to record """ last = self.last.get_and_set(value) if last <= value: value = value - last super(Derive, self).mark(value)
[ "Record", "an", "event", "with", "the", "derive", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/instruments/derive.py#L20-L28
[ "def", "mark", "(", "self", ",", "value", "=", "1", ")", ":", "last", "=", "self", ".", "last", ".", "get_and_set", "(", "value", ")", "if", "last", "<=", "value", ":", "value", "=", "value", "-", "last", "super", "(", "Derive", ",", "self", ")", ".", "mark", "(", "value", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
mmap
Wrapper to make map() behave the same on Py2 and Py3.
metrology/reporter/statsd.py
def mmap(func, iterable): """Wrapper to make map() behave the same on Py2 and Py3.""" if sys.version_info[0] > 2: return [i for i in map(func, iterable)] else: return map(func, iterable)
def mmap(func, iterable): """Wrapper to make map() behave the same on Py2 and Py3.""" if sys.version_info[0] > 2: return [i for i in map(func, iterable)] else: return map(func, iterable)
[ "Wrapper", "to", "make", "map", "()", "behave", "the", "same", "on", "Py2", "and", "Py3", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/reporter/statsd.py#L19-L25
[ "def", "mmap", "(", "func", ",", "iterable", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", ">", "2", ":", "return", "[", "i", "for", "i", "in", "map", "(", "func", ",", "iterable", ")", "]", "else", ":", "return", "map", "(", "func", ",", "iterable", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
StatsDReporter.send_metric
Send metric and its snapshot.
metrology/reporter/statsd.py
def send_metric(self, name, metric): """Send metric and its snapshot.""" config = SERIALIZER_CONFIG[class_name(metric)] mmap( self._buffered_send_metric, self.serialize_metric( metric, name, config['keys'], config['serialized_type'] ) ) if hasattr(metric, 'snapshot') and config.get('snapshot_keys'): mmap( self._buffered_send_metric, self.serialize_metric( metric.snapshot, name, config['snapshot_keys'], config['serialized_type'] ) )
def send_metric(self, name, metric): """Send metric and its snapshot.""" config = SERIALIZER_CONFIG[class_name(metric)] mmap( self._buffered_send_metric, self.serialize_metric( metric, name, config['keys'], config['serialized_type'] ) ) if hasattr(metric, 'snapshot') and config.get('snapshot_keys'): mmap( self._buffered_send_metric, self.serialize_metric( metric.snapshot, name, config['snapshot_keys'], config['serialized_type'] ) )
[ "Send", "metric", "and", "its", "snapshot", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/reporter/statsd.py#L145-L168
[ "def", "send_metric", "(", "self", ",", "name", ",", "metric", ")", ":", "config", "=", "SERIALIZER_CONFIG", "[", "class_name", "(", "metric", ")", "]", "mmap", "(", "self", ".", "_buffered_send_metric", ",", "self", ".", "serialize_metric", "(", "metric", ",", "name", ",", "config", "[", "'keys'", "]", ",", "config", "[", "'serialized_type'", "]", ")", ")", "if", "hasattr", "(", "metric", ",", "'snapshot'", ")", "and", "config", ".", "get", "(", "'snapshot_keys'", ")", ":", "mmap", "(", "self", ".", "_buffered_send_metric", ",", "self", ".", "serialize_metric", "(", "metric", ".", "snapshot", ",", "name", ",", "config", "[", "'snapshot_keys'", "]", ",", "config", "[", "'serialized_type'", "]", ")", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
StatsDReporter.serialize_metric
Serialize and send available measures of a metric.
metrology/reporter/statsd.py
def serialize_metric(self, metric, m_name, keys, m_type): """Serialize and send available measures of a metric.""" return [ self.format_metric_string(m_name, getattr(metric, key), m_type) for key in keys ]
def serialize_metric(self, metric, m_name, keys, m_type): """Serialize and send available measures of a metric.""" return [ self.format_metric_string(m_name, getattr(metric, key), m_type) for key in keys ]
[ "Serialize", "and", "send", "available", "measures", "of", "a", "metric", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/reporter/statsd.py#L170-L176
[ "def", "serialize_metric", "(", "self", ",", "metric", ",", "m_name", ",", "keys", ",", "m_type", ")", ":", "return", "[", "self", ".", "format_metric_string", "(", "m_name", ",", "getattr", "(", "metric", ",", "key", ")", ",", "m_type", ")", "for", "key", "in", "keys", "]" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
StatsDReporter.format_metric_string
Compose a statsd compatible string for a metric's measurement.
metrology/reporter/statsd.py
def format_metric_string(self, name, value, m_type): """Compose a statsd compatible string for a metric's measurement.""" # NOTE(romcheg): This serialized metric template is based on # statsd's documentation. template = '{name}:{value}|{m_type}\n' if self.prefix: name = "{prefix}.{m_name}".format(prefix=self.prefix, m_name=name) return template.format(name=name, value=value, m_type=m_type)
def format_metric_string(self, name, value, m_type): """Compose a statsd compatible string for a metric's measurement.""" # NOTE(romcheg): This serialized metric template is based on # statsd's documentation. template = '{name}:{value}|{m_type}\n' if self.prefix: name = "{prefix}.{m_name}".format(prefix=self.prefix, m_name=name) return template.format(name=name, value=value, m_type=m_type)
[ "Compose", "a", "statsd", "compatible", "string", "for", "a", "metric", "s", "measurement", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/reporter/statsd.py#L178-L188
[ "def", "format_metric_string", "(", "self", ",", "name", ",", "value", ",", "m_type", ")", ":", "# NOTE(romcheg): This serialized metric template is based on", "# statsd's documentation.", "template", "=", "'{name}:{value}|{m_type}\\n'", "if", "self", ".", "prefix", ":", "name", "=", "\"{prefix}.{m_name}\"", ".", "format", "(", "prefix", "=", "self", ".", "prefix", ",", "m_name", "=", "name", ")", "return", "template", ".", "format", "(", "name", "=", "name", ",", "value", "=", "value", ",", "m_type", "=", "m_type", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
StatsDReporter._buffered_send_metric
Add a metric to the buffer.
metrology/reporter/statsd.py
def _buffered_send_metric(self, metric_str): """Add a metric to the buffer.""" self.batch_count += 1 self.batch_buffer += metric_str # NOTE(romcheg): Send metrics if the number of metrics in the buffer # has reached the threshold for sending. if self.batch_count >= self.batch_size: self._send()
def _buffered_send_metric(self, metric_str): """Add a metric to the buffer.""" self.batch_count += 1 self.batch_buffer += metric_str # NOTE(romcheg): Send metrics if the number of metrics in the buffer # has reached the threshold for sending. if self.batch_count >= self.batch_size: self._send()
[ "Add", "a", "metric", "to", "the", "buffer", "." ]
cyberdelia/metrology
python
https://github.com/cyberdelia/metrology/blob/7599bea7de1fd59374c06e2f8041a217e3cf9c01/metrology/reporter/statsd.py#L190-L200
[ "def", "_buffered_send_metric", "(", "self", ",", "metric_str", ")", ":", "self", ".", "batch_count", "+=", "1", "self", ".", "batch_buffer", "+=", "metric_str", "# NOTE(romcheg): Send metrics if the number of metrics in the buffer", "# has reached the threshold for sending.", "if", "self", ".", "batch_count", ">=", "self", ".", "batch_size", ":", "self", ".", "_send", "(", ")" ]
7599bea7de1fd59374c06e2f8041a217e3cf9c01
test
IniStorage.get
Get method that raises MissingSetting if the value was unset. This differs from the SafeConfigParser which may raise either a NoOptionError or a NoSectionError. We take extra **kwargs because the Python 3.5 configparser extends the get method signature and it calls self with those parameters. def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
exactonline/storage/ini.py
def get(self, section, option, **kwargs): """ Get method that raises MissingSetting if the value was unset. This differs from the SafeConfigParser which may raise either a NoOptionError or a NoSectionError. We take extra **kwargs because the Python 3.5 configparser extends the get method signature and it calls self with those parameters. def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET): """ try: ret = super(ExactOnlineConfig, self).get(section, option, **kwargs) except (NoOptionError, NoSectionError): raise MissingSetting(option, section) return ret
def get(self, section, option, **kwargs): """ Get method that raises MissingSetting if the value was unset. This differs from the SafeConfigParser which may raise either a NoOptionError or a NoSectionError. We take extra **kwargs because the Python 3.5 configparser extends the get method signature and it calls self with those parameters. def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET): """ try: ret = super(ExactOnlineConfig, self).get(section, option, **kwargs) except (NoOptionError, NoSectionError): raise MissingSetting(option, section) return ret
[ "Get", "method", "that", "raises", "MissingSetting", "if", "the", "value", "was", "unset", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/storage/ini.py#L75-L93
[ "def", "get", "(", "self", ",", "section", ",", "option", ",", "*", "*", "kwargs", ")", ":", "try", ":", "ret", "=", "super", "(", "ExactOnlineConfig", ",", "self", ")", ".", "get", "(", "section", ",", "option", ",", "*", "*", "kwargs", ")", "except", "(", "NoOptionError", ",", "NoSectionError", ")", ":", "raise", "MissingSetting", "(", "option", ",", "section", ")", "return", "ret" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
IniStorage.set
Set method that (1) auto-saves if possible and (2) auto-creates sections.
exactonline/storage/ini.py
def set(self, section, option, value): """ Set method that (1) auto-saves if possible and (2) auto-creates sections. """ try: super(ExactOnlineConfig, self).set(section, option, value) except NoSectionError: self.add_section(section) super(ExactOnlineConfig, self).set(section, option, value) # Save automatically! self.save()
def set(self, section, option, value): """ Set method that (1) auto-saves if possible and (2) auto-creates sections. """ try: super(ExactOnlineConfig, self).set(section, option, value) except NoSectionError: self.add_section(section) super(ExactOnlineConfig, self).set(section, option, value) # Save automatically! self.save()
[ "Set", "method", "that", "(", "1", ")", "auto", "-", "saves", "if", "possible", "and", "(", "2", ")", "auto", "-", "creates", "sections", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/storage/ini.py#L95-L107
[ "def", "set", "(", "self", ",", "section", ",", "option", ",", "value", ")", ":", "try", ":", "super", "(", "ExactOnlineConfig", ",", "self", ")", ".", "set", "(", "section", ",", "option", ",", "value", ")", "except", "NoSectionError", ":", "self", ".", "add_section", "(", "section", ")", "super", "(", "ExactOnlineConfig", ",", "self", ")", ".", "set", "(", "section", ",", "option", ",", "value", ")", "# Save automatically!", "self", ".", "save", "(", ")" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
_json_safe
json.loads wants an unistr in Python3. Convert it.
exactonline/rawapi.py
def _json_safe(data): """ json.loads wants an unistr in Python3. Convert it. """ if not hasattr(data, 'encode'): try: data = data.decode('utf-8') except UnicodeDecodeError: raise ValueError( 'Expected valid UTF8 for JSON data, got %r' % (data,)) return data
def _json_safe(data): """ json.loads wants an unistr in Python3. Convert it. """ if not hasattr(data, 'encode'): try: data = data.decode('utf-8') except UnicodeDecodeError: raise ValueError( 'Expected valid UTF8 for JSON data, got %r' % (data,)) return data
[ "json", ".", "loads", "wants", "an", "unistr", "in", "Python3", ".", "Convert", "it", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/rawapi.py#L18-L28
[ "def", "_json_safe", "(", "data", ")", ":", "if", "not", "hasattr", "(", "data", ",", "'encode'", ")", ":", "try", ":", "data", "=", "data", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "raise", "ValueError", "(", "'Expected valid UTF8 for JSON data, got %r'", "%", "(", "data", ",", ")", ")", "return", "data" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
http_post
Shortcut for urlopen (POST) + read. We'll probably want to add a nice timeout here later too.
exactonline/http.py
def http_post(url, data=None, opt=opt_default): """ Shortcut for urlopen (POST) + read. We'll probably want to add a nice timeout here later too. """ return _http_request(url, method='POST', data=_marshalled(data), opt=opt)
def http_post(url, data=None, opt=opt_default): """ Shortcut for urlopen (POST) + read. We'll probably want to add a nice timeout here later too. """ return _http_request(url, method='POST', data=_marshalled(data), opt=opt)
[ "Shortcut", "for", "urlopen", "(", "POST", ")", "+", "read", ".", "We", "ll", "probably", "want", "to", "add", "a", "nice", "timeout", "here", "later", "too", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/http.py#L251-L256
[ "def", "http_post", "(", "url", ",", "data", "=", "None", ",", "opt", "=", "opt_default", ")", ":", "return", "_http_request", "(", "url", ",", "method", "=", "'POST'", ",", "data", "=", "_marshalled", "(", "data", ")", ",", "opt", "=", "opt", ")" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
http_put
Shortcut for urlopen (PUT) + read. We'll probably want to add a nice timeout here later too.
exactonline/http.py
def http_put(url, data=None, opt=opt_default): """ Shortcut for urlopen (PUT) + read. We'll probably want to add a nice timeout here later too. """ return _http_request(url, method='PUT', data=_marshalled(data), opt=opt)
def http_put(url, data=None, opt=opt_default): """ Shortcut for urlopen (PUT) + read. We'll probably want to add a nice timeout here later too. """ return _http_request(url, method='PUT', data=_marshalled(data), opt=opt)
[ "Shortcut", "for", "urlopen", "(", "PUT", ")", "+", "read", ".", "We", "ll", "probably", "want", "to", "add", "a", "nice", "timeout", "here", "later", "too", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/http.py#L259-L264
[ "def", "http_put", "(", "url", ",", "data", "=", "None", ",", "opt", "=", "opt_default", ")", ":", "return", "_http_request", "(", "url", ",", "method", "=", "'PUT'", ",", "data", "=", "_marshalled", "(", "data", ")", ",", "opt", "=", "opt", ")" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
ValidHTTPSConnection.connect
Connect to a host on a given (SSL) port.
exactonline/http.py
def connect(self): "Connect to a host on a given (SSL) port." sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self.sock = sock self._tunnel() # Python 2.7.9+ if create_default_context: # Newer python will use the "right" cacert file automatically. So # the default of None can safely be passed along. ctx = create_default_context(cafile=self.cacert_file) sock = ctx.wrap_socket(sock, server_hostname=self.host) else: # Take the supplied file, or FALLBACK_CACERT_FILE if nothing # was supplied. cacert_file = self.cacert_file or FALLBACK_CACERT_FILE sock = ssl.wrap_socket(sock, ca_certs=cacert_file, cert_reqs=ssl.CERT_REQUIRED) self.sock = sock
def connect(self): "Connect to a host on a given (SSL) port." sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self.sock = sock self._tunnel() # Python 2.7.9+ if create_default_context: # Newer python will use the "right" cacert file automatically. So # the default of None can safely be passed along. ctx = create_default_context(cafile=self.cacert_file) sock = ctx.wrap_socket(sock, server_hostname=self.host) else: # Take the supplied file, or FALLBACK_CACERT_FILE if nothing # was supplied. cacert_file = self.cacert_file or FALLBACK_CACERT_FILE sock = ssl.wrap_socket(sock, ca_certs=cacert_file, cert_reqs=ssl.CERT_REQUIRED) self.sock = sock
[ "Connect", "to", "a", "host", "on", "a", "given", "(", "SSL", ")", "port", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/http.py#L186-L207
[ "def", "connect", "(", "self", ")", ":", "sock", "=", "socket", ".", "create_connection", "(", "(", "self", ".", "host", ",", "self", ".", "port", ")", ",", "self", ".", "timeout", ",", "self", ".", "source_address", ")", "if", "self", ".", "_tunnel_host", ":", "self", ".", "sock", "=", "sock", "self", ".", "_tunnel", "(", ")", "# Python 2.7.9+", "if", "create_default_context", ":", "# Newer python will use the \"right\" cacert file automatically. So", "# the default of None can safely be passed along.", "ctx", "=", "create_default_context", "(", "cafile", "=", "self", ".", "cacert_file", ")", "sock", "=", "ctx", ".", "wrap_socket", "(", "sock", ",", "server_hostname", "=", "self", ".", "host", ")", "else", ":", "# Take the supplied file, or FALLBACK_CACERT_FILE if nothing", "# was supplied.", "cacert_file", "=", "self", ".", "cacert_file", "or", "FALLBACK_CACERT_FILE", "sock", "=", "ssl", ".", "wrap_socket", "(", "sock", ",", "ca_certs", "=", "cacert_file", ",", "cert_reqs", "=", "ssl", ".", "CERT_REQUIRED", ")", "self", ".", "sock", "=", "sock" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
ExactOnlineConfig.get_or_set_default
Base method to fetch values and to set defaults in case they don't exist.
exactonline/storage/base.py
def get_or_set_default(self, section, option, value): """ Base method to fetch values and to set defaults in case they don't exist. """ try: ret = self.get(section, option) except MissingSetting: self.set(section, option, value) ret = value return ret
def get_or_set_default(self, section, option, value): """ Base method to fetch values and to set defaults in case they don't exist. """ try: ret = self.get(section, option) except MissingSetting: self.set(section, option, value) ret = value return ret
[ "Base", "method", "to", "fetch", "values", "and", "to", "set", "defaults", "in", "case", "they", "don", "t", "exist", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/storage/base.py#L61-L72
[ "def", "get_or_set_default", "(", "self", ",", "section", ",", "option", ",", "value", ")", ":", "try", ":", "ret", "=", "self", ".", "get", "(", "section", ",", "option", ")", "except", "MissingSetting", ":", "self", ".", "set", "(", "section", ",", "option", ",", "value", ")", "ret", "=", "value", "return", "ret" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
ExactInvoice.get_ledger_code_to_guid_map
Convert set of human codes and to a dict of code to exactonline guid mappings. Example:: ret = inv.get_ledger_code_to_guid_map(['1234', '5555']) ret == {'1234': '<guid1_from_exactonline_ledgeraccounts>', '5555': '<guid2_from_exactonline_ledgeraccounts>'}
exactonline/elements/invoice.py
def get_ledger_code_to_guid_map(self, codes): """ Convert set of human codes and to a dict of code to exactonline guid mappings. Example:: ret = inv.get_ledger_code_to_guid_map(['1234', '5555']) ret == {'1234': '<guid1_from_exactonline_ledgeraccounts>', '5555': '<guid2_from_exactonline_ledgeraccounts>'} """ if codes: codes = set(str(i) for i in codes) ledger_ids = self._api.ledgeraccounts.filter(code__in=codes) ret = dict((str(i['Code']), i['ID']) for i in ledger_ids) found = set(ret.keys()) missing = (codes - found) if missing: raise UnknownLedgerCodes(missing) return ret return {}
def get_ledger_code_to_guid_map(self, codes): """ Convert set of human codes and to a dict of code to exactonline guid mappings. Example:: ret = inv.get_ledger_code_to_guid_map(['1234', '5555']) ret == {'1234': '<guid1_from_exactonline_ledgeraccounts>', '5555': '<guid2_from_exactonline_ledgeraccounts>'} """ if codes: codes = set(str(i) for i in codes) ledger_ids = self._api.ledgeraccounts.filter(code__in=codes) ret = dict((str(i['Code']), i['ID']) for i in ledger_ids) found = set(ret.keys()) missing = (codes - found) if missing: raise UnknownLedgerCodes(missing) return ret return {}
[ "Convert", "set", "of", "human", "codes", "and", "to", "a", "dict", "of", "code", "to", "exactonline", "guid", "mappings", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/elements/invoice.py#L69-L89
[ "def", "get_ledger_code_to_guid_map", "(", "self", ",", "codes", ")", ":", "if", "codes", ":", "codes", "=", "set", "(", "str", "(", "i", ")", "for", "i", "in", "codes", ")", "ledger_ids", "=", "self", ".", "_api", ".", "ledgeraccounts", ".", "filter", "(", "code__in", "=", "codes", ")", "ret", "=", "dict", "(", "(", "str", "(", "i", "[", "'Code'", "]", ")", ",", "i", "[", "'ID'", "]", ")", "for", "i", "in", "ledger_ids", ")", "found", "=", "set", "(", "ret", ".", "keys", "(", ")", ")", "missing", "=", "(", "codes", "-", "found", ")", "if", "missing", ":", "raise", "UnknownLedgerCodes", "(", "missing", ")", "return", "ret", "return", "{", "}" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
ExactInvoice.get_vatcode_for_ledger_line
Get VATCode (up to three digit number) for the specified ledger line. Can be as simple as: return '0 ' # one VAT category only Or more complicated, like: if ledger_line['vat_percentage'] == 21: return '2 ' # high VAT assert ledger_line['vat_percentage'] == 0 customer = self._bosso_invoice.customer assert customer.has_vat_number() if customer.is_in_nl(): return '0 ' # no VAT elif customer.is_in_eu(): return '7 ' # inside EU, no VAT return '6 ' # outside EU, no VAT
exactonline/elements/invoice.py
def get_vatcode_for_ledger_line(self, ledger_line): """ Get VATCode (up to three digit number) for the specified ledger line. Can be as simple as: return '0 ' # one VAT category only Or more complicated, like: if ledger_line['vat_percentage'] == 21: return '2 ' # high VAT assert ledger_line['vat_percentage'] == 0 customer = self._bosso_invoice.customer assert customer.has_vat_number() if customer.is_in_nl(): return '0 ' # no VAT elif customer.is_in_eu(): return '7 ' # inside EU, no VAT return '6 ' # outside EU, no VAT """ # Exact accepts receiving 'VATPercentage', but only when it is # higher than 0. Possibly because we have more than one match # for 0%? So, we'll have to fetch the right VATCode instead. vat_percentage = ledger_line['vat_percentage'] if vat_percentage == 0: vatcode = '0 ' # FIXME: hardcoded.. fetch from API? elif vat_percentage == 21: vatcode = '2 ' # FIXME: hardcoded.. fetch from API? else: raise NotImplementedError('Unknown VAT: %s' % (vat_percentage,)) return vatcode
def get_vatcode_for_ledger_line(self, ledger_line): """ Get VATCode (up to three digit number) for the specified ledger line. Can be as simple as: return '0 ' # one VAT category only Or more complicated, like: if ledger_line['vat_percentage'] == 21: return '2 ' # high VAT assert ledger_line['vat_percentage'] == 0 customer = self._bosso_invoice.customer assert customer.has_vat_number() if customer.is_in_nl(): return '0 ' # no VAT elif customer.is_in_eu(): return '7 ' # inside EU, no VAT return '6 ' # outside EU, no VAT """ # Exact accepts receiving 'VATPercentage', but only when it is # higher than 0. Possibly because we have more than one match # for 0%? So, we'll have to fetch the right VATCode instead. vat_percentage = ledger_line['vat_percentage'] if vat_percentage == 0: vatcode = '0 ' # FIXME: hardcoded.. fetch from API? elif vat_percentage == 21: vatcode = '2 ' # FIXME: hardcoded.. fetch from API? else: raise NotImplementedError('Unknown VAT: %s' % (vat_percentage,)) return vatcode
[ "Get", "VATCode", "(", "up", "to", "three", "digit", "number", ")", "for", "the", "specified", "ledger", "line", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/elements/invoice.py#L136-L169
[ "def", "get_vatcode_for_ledger_line", "(", "self", ",", "ledger_line", ")", ":", "# Exact accepts receiving 'VATPercentage', but only when it is", "# higher than 0. Possibly because we have more than one match", "# for 0%? So, we'll have to fetch the right VATCode instead.", "vat_percentage", "=", "ledger_line", "[", "'vat_percentage'", "]", "if", "vat_percentage", "==", "0", ":", "vatcode", "=", "'0 '", "# FIXME: hardcoded.. fetch from API?", "elif", "vat_percentage", "==", "21", ":", "vatcode", "=", "'2 '", "# FIXME: hardcoded.. fetch from API?", "else", ":", "raise", "NotImplementedError", "(", "'Unknown VAT: %s'", "%", "(", "vat_percentage", ",", ")", ")", "return", "vatcode" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
V1Division.get_divisions
Get the "current" division and return a dictionary of divisions so the user can select the right one.
exactonline/api/v1division.py
def get_divisions(self): """ Get the "current" division and return a dictionary of divisions so the user can select the right one. """ ret = self.rest(GET('v1/current/Me?$select=CurrentDivision')) current_division = ret[0]['CurrentDivision'] assert isinstance(current_division, int) urlbase = 'v1/%d/' % (current_division,) resource = urljoin(urlbase, 'hrm/Divisions?$select=Code,Description') ret = self.rest(GET(resource)) choices = dict((i['Code'], i['Description']) for i in ret) return choices, current_division
def get_divisions(self): """ Get the "current" division and return a dictionary of divisions so the user can select the right one. """ ret = self.rest(GET('v1/current/Me?$select=CurrentDivision')) current_division = ret[0]['CurrentDivision'] assert isinstance(current_division, int) urlbase = 'v1/%d/' % (current_division,) resource = urljoin(urlbase, 'hrm/Divisions?$select=Code,Description') ret = self.rest(GET(resource)) choices = dict((i['Code'], i['Description']) for i in ret) return choices, current_division
[ "Get", "the", "current", "division", "and", "return", "a", "dictionary", "of", "divisions", "so", "the", "user", "can", "select", "the", "right", "one", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/api/v1division.py#L32-L46
[ "def", "get_divisions", "(", "self", ")", ":", "ret", "=", "self", ".", "rest", "(", "GET", "(", "'v1/current/Me?$select=CurrentDivision'", ")", ")", "current_division", "=", "ret", "[", "0", "]", "[", "'CurrentDivision'", "]", "assert", "isinstance", "(", "current_division", ",", "int", ")", "urlbase", "=", "'v1/%d/'", "%", "(", "current_division", ",", ")", "resource", "=", "urljoin", "(", "urlbase", ",", "'hrm/Divisions?$select=Code,Description'", ")", "ret", "=", "self", ".", "rest", "(", "GET", "(", "resource", ")", ")", "choices", "=", "dict", "(", "(", "i", "[", "'Code'", "]", ",", "i", "[", "'Description'", "]", ")", "for", "i", "in", "ret", ")", "return", "choices", ",", "current_division" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
V1Division.set_division
Select the "current" division that we'll be working on/with.
exactonline/api/v1division.py
def set_division(self, division): """ Select the "current" division that we'll be working on/with. """ try: division = int(division) except (TypeError, ValueError): raise V1DivisionError('Supplied division %r is not a number' % (division,)) urlbase = 'v1/%d/' % (division,) resource = urljoin( urlbase, "crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST'") try: self.rest(GET(resource)) except AssertionError: raise V1DivisionError('Invalid division %r according to server' % (division,)) self.storage.set_division(division)
def set_division(self, division): """ Select the "current" division that we'll be working on/with. """ try: division = int(division) except (TypeError, ValueError): raise V1DivisionError('Supplied division %r is not a number' % (division,)) urlbase = 'v1/%d/' % (division,) resource = urljoin( urlbase, "crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST'") try: self.rest(GET(resource)) except AssertionError: raise V1DivisionError('Invalid division %r according to server' % (division,)) self.storage.set_division(division)
[ "Select", "the", "current", "division", "that", "we", "ll", "be", "working", "on", "/", "with", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/api/v1division.py#L48-L68
[ "def", "set_division", "(", "self", ",", "division", ")", ":", "try", ":", "division", "=", "int", "(", "division", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "V1DivisionError", "(", "'Supplied division %r is not a number'", "%", "(", "division", ",", ")", ")", "urlbase", "=", "'v1/%d/'", "%", "(", "division", ",", ")", "resource", "=", "urljoin", "(", "urlbase", ",", "\"crm/Accounts?$select=ID&$filter=Name+eq+'DOES_NOT_EXIST'\"", ")", "try", ":", "self", ".", "rest", "(", "GET", "(", "resource", ")", ")", "except", "AssertionError", ":", "raise", "V1DivisionError", "(", "'Invalid division %r according to server'", "%", "(", "division", ",", ")", ")", "self", ".", "storage", ".", "set_division", "(", "division", ")" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
Invoices.map_exact2foreign_invoice_numbers
Optionally supply a list of ExactOnline invoice numbers. Returns a dictionary of ExactOnline invoice numbers to foreign (YourRef) invoice numbers.
exactonline/api/invoices.py
def map_exact2foreign_invoice_numbers(self, exact_invoice_numbers=None): """ Optionally supply a list of ExactOnline invoice numbers. Returns a dictionary of ExactOnline invoice numbers to foreign (YourRef) invoice numbers. """ # Quick, select all. Not the most nice to the server though. if exact_invoice_numbers is None: ret = self.filter(select='InvoiceNumber,YourRef') return dict((i['InvoiceNumber'], i['YourRef']) for i in ret) # Slower, select what we want to know. More work for us. exact_to_foreign_map = {} # Do it in batches. If we append 300 InvoiceNumbers at once, we # get a 12kB URI. (If the list is empty, we skip the entire # forloop and correctly return the empty dict.) exact_invoice_numbers = list(set(exact_invoice_numbers)) # unique for offset in range(0, len(exact_invoice_numbers), 40): batch = exact_invoice_numbers[offset:(offset + 40)] filter_ = ' or '.join( 'InvoiceNumber eq %s' % (i,) for i in batch) assert filter_ # if filter was empty, we'd get all! ret = self.filter(filter=filter_, select='InvoiceNumber,YourRef') exact_to_foreign_map.update( dict((i['InvoiceNumber'], i['YourRef']) for i in ret)) # Any values we missed? for exact_invoice_number in exact_invoice_numbers: if exact_invoice_number not in exact_to_foreign_map: exact_to_foreign_map[exact_invoice_number] = None return exact_to_foreign_map
def map_exact2foreign_invoice_numbers(self, exact_invoice_numbers=None): """ Optionally supply a list of ExactOnline invoice numbers. Returns a dictionary of ExactOnline invoice numbers to foreign (YourRef) invoice numbers. """ # Quick, select all. Not the most nice to the server though. if exact_invoice_numbers is None: ret = self.filter(select='InvoiceNumber,YourRef') return dict((i['InvoiceNumber'], i['YourRef']) for i in ret) # Slower, select what we want to know. More work for us. exact_to_foreign_map = {} # Do it in batches. If we append 300 InvoiceNumbers at once, we # get a 12kB URI. (If the list is empty, we skip the entire # forloop and correctly return the empty dict.) exact_invoice_numbers = list(set(exact_invoice_numbers)) # unique for offset in range(0, len(exact_invoice_numbers), 40): batch = exact_invoice_numbers[offset:(offset + 40)] filter_ = ' or '.join( 'InvoiceNumber eq %s' % (i,) for i in batch) assert filter_ # if filter was empty, we'd get all! ret = self.filter(filter=filter_, select='InvoiceNumber,YourRef') exact_to_foreign_map.update( dict((i['InvoiceNumber'], i['YourRef']) for i in ret)) # Any values we missed? for exact_invoice_number in exact_invoice_numbers: if exact_invoice_number not in exact_to_foreign_map: exact_to_foreign_map[exact_invoice_number] = None return exact_to_foreign_map
[ "Optionally", "supply", "a", "list", "of", "ExactOnline", "invoice", "numbers", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/api/invoices.py#L59-L92
[ "def", "map_exact2foreign_invoice_numbers", "(", "self", ",", "exact_invoice_numbers", "=", "None", ")", ":", "# Quick, select all. Not the most nice to the server though.", "if", "exact_invoice_numbers", "is", "None", ":", "ret", "=", "self", ".", "filter", "(", "select", "=", "'InvoiceNumber,YourRef'", ")", "return", "dict", "(", "(", "i", "[", "'InvoiceNumber'", "]", ",", "i", "[", "'YourRef'", "]", ")", "for", "i", "in", "ret", ")", "# Slower, select what we want to know. More work for us.", "exact_to_foreign_map", "=", "{", "}", "# Do it in batches. If we append 300 InvoiceNumbers at once, we", "# get a 12kB URI. (If the list is empty, we skip the entire", "# forloop and correctly return the empty dict.)", "exact_invoice_numbers", "=", "list", "(", "set", "(", "exact_invoice_numbers", ")", ")", "# unique", "for", "offset", "in", "range", "(", "0", ",", "len", "(", "exact_invoice_numbers", ")", ",", "40", ")", ":", "batch", "=", "exact_invoice_numbers", "[", "offset", ":", "(", "offset", "+", "40", ")", "]", "filter_", "=", "' or '", ".", "join", "(", "'InvoiceNumber eq %s'", "%", "(", "i", ",", ")", "for", "i", "in", "batch", ")", "assert", "filter_", "# if filter was empty, we'd get all!", "ret", "=", "self", ".", "filter", "(", "filter", "=", "filter_", ",", "select", "=", "'InvoiceNumber,YourRef'", ")", "exact_to_foreign_map", ".", "update", "(", "dict", "(", "(", "i", "[", "'InvoiceNumber'", "]", ",", "i", "[", "'YourRef'", "]", ")", "for", "i", "in", "ret", ")", ")", "# Any values we missed?", "for", "exact_invoice_number", "in", "exact_invoice_numbers", ":", "if", "exact_invoice_number", "not", "in", "exact_to_foreign_map", ":", "exact_to_foreign_map", "[", "exact_invoice_number", "]", "=", "None", "return", "exact_to_foreign_map" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
Invoices.map_foreign2exact_invoice_numbers
Optionally supply a list of foreign (your) invoice numbers. Returns a dictionary of your invoice numbers (YourRef) to Exact Online invoice numbers.
exactonline/api/invoices.py
def map_foreign2exact_invoice_numbers(self, foreign_invoice_numbers=None): """ Optionally supply a list of foreign (your) invoice numbers. Returns a dictionary of your invoice numbers (YourRef) to Exact Online invoice numbers. """ # Quick, select all. Not the most nice to the server though. if foreign_invoice_numbers is None: ret = self.filter(select='InvoiceNumber,YourRef') return dict((i['YourRef'], i['InvoiceNumber']) for i in ret) # Slower, select what we want to know. More work for us. foreign_to_exact_map = {} # Do it in batches. If we append 300 InvoiceNumbers at once, we # get a 12kB URI. (If the list is empty, we skip the entire # forloop and correctly return the empty dict.) foreign_invoice_numbers = list(set(foreign_invoice_numbers)) # unique for offset in range(0, len(foreign_invoice_numbers), 40): batch = foreign_invoice_numbers[offset:(offset + 40)] filter_ = ' or '.join( 'YourRef eq %s' % (self._remote_invoice_number(i),) for i in batch) assert filter_ # if filter was empty, we'd get all! ret = self.filter(filter=filter_, select='InvoiceNumber,YourRef') foreign_to_exact_map.update( dict((i['YourRef'], i['InvoiceNumber']) for i in ret)) # Any values we missed? for foreign_invoice_number in foreign_invoice_numbers: if foreign_invoice_number not in foreign_to_exact_map: foreign_to_exact_map[foreign_invoice_number] = None return foreign_to_exact_map
def map_foreign2exact_invoice_numbers(self, foreign_invoice_numbers=None): """ Optionally supply a list of foreign (your) invoice numbers. Returns a dictionary of your invoice numbers (YourRef) to Exact Online invoice numbers. """ # Quick, select all. Not the most nice to the server though. if foreign_invoice_numbers is None: ret = self.filter(select='InvoiceNumber,YourRef') return dict((i['YourRef'], i['InvoiceNumber']) for i in ret) # Slower, select what we want to know. More work for us. foreign_to_exact_map = {} # Do it in batches. If we append 300 InvoiceNumbers at once, we # get a 12kB URI. (If the list is empty, we skip the entire # forloop and correctly return the empty dict.) foreign_invoice_numbers = list(set(foreign_invoice_numbers)) # unique for offset in range(0, len(foreign_invoice_numbers), 40): batch = foreign_invoice_numbers[offset:(offset + 40)] filter_ = ' or '.join( 'YourRef eq %s' % (self._remote_invoice_number(i),) for i in batch) assert filter_ # if filter was empty, we'd get all! ret = self.filter(filter=filter_, select='InvoiceNumber,YourRef') foreign_to_exact_map.update( dict((i['YourRef'], i['InvoiceNumber']) for i in ret)) # Any values we missed? for foreign_invoice_number in foreign_invoice_numbers: if foreign_invoice_number not in foreign_to_exact_map: foreign_to_exact_map[foreign_invoice_number] = None return foreign_to_exact_map
[ "Optionally", "supply", "a", "list", "of", "foreign", "(", "your", ")", "invoice", "numbers", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/api/invoices.py#L94-L128
[ "def", "map_foreign2exact_invoice_numbers", "(", "self", ",", "foreign_invoice_numbers", "=", "None", ")", ":", "# Quick, select all. Not the most nice to the server though.", "if", "foreign_invoice_numbers", "is", "None", ":", "ret", "=", "self", ".", "filter", "(", "select", "=", "'InvoiceNumber,YourRef'", ")", "return", "dict", "(", "(", "i", "[", "'YourRef'", "]", ",", "i", "[", "'InvoiceNumber'", "]", ")", "for", "i", "in", "ret", ")", "# Slower, select what we want to know. More work for us.", "foreign_to_exact_map", "=", "{", "}", "# Do it in batches. If we append 300 InvoiceNumbers at once, we", "# get a 12kB URI. (If the list is empty, we skip the entire", "# forloop and correctly return the empty dict.)", "foreign_invoice_numbers", "=", "list", "(", "set", "(", "foreign_invoice_numbers", ")", ")", "# unique", "for", "offset", "in", "range", "(", "0", ",", "len", "(", "foreign_invoice_numbers", ")", ",", "40", ")", ":", "batch", "=", "foreign_invoice_numbers", "[", "offset", ":", "(", "offset", "+", "40", ")", "]", "filter_", "=", "' or '", ".", "join", "(", "'YourRef eq %s'", "%", "(", "self", ".", "_remote_invoice_number", "(", "i", ")", ",", ")", "for", "i", "in", "batch", ")", "assert", "filter_", "# if filter was empty, we'd get all!", "ret", "=", "self", ".", "filter", "(", "filter", "=", "filter_", ",", "select", "=", "'InvoiceNumber,YourRef'", ")", "foreign_to_exact_map", ".", "update", "(", "dict", "(", "(", "i", "[", "'YourRef'", "]", ",", "i", "[", "'InvoiceNumber'", "]", ")", "for", "i", "in", "ret", ")", ")", "# Any values we missed?", "for", "foreign_invoice_number", "in", "foreign_invoice_numbers", ":", "if", "foreign_invoice_number", "not", "in", "foreign_to_exact_map", ":", "foreign_to_exact_map", "[", "foreign_invoice_number", "]", "=", "None", "return", "foreign_to_exact_map" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
Receivables.filter
A common query would be duedate__lt=date(2015, 1, 1) to get all Receivables that are due in 2014 and earlier.
exactonline/api/receivables.py
def filter(self, relation_id=None, duedate__lt=None, duedate__gte=None, **kwargs): """ A common query would be duedate__lt=date(2015, 1, 1) to get all Receivables that are due in 2014 and earlier. """ if relation_id is not None: # Filter by (relation) account_id. There doesn't seem to be # any reason to prefer # 'read/financial/ReceivablesListByAccount?accountId=X' over # this. relation_id = self._remote_guid(relation_id) self._filter_append(kwargs, u'AccountId eq %s' % (relation_id,)) if duedate__lt is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__lt = self._remote_datetime(duedate__lt) self._filter_append(kwargs, u'DueDate lt %s' % (duedate__lt,)) if duedate__gte is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__gte = self._remote_datetime(duedate__gte) self._filter_append(kwargs, u'DueDate ge %s' % (duedate__gte,)) return super(Receivables, self).filter(**kwargs)
def filter(self, relation_id=None, duedate__lt=None, duedate__gte=None, **kwargs): """ A common query would be duedate__lt=date(2015, 1, 1) to get all Receivables that are due in 2014 and earlier. """ if relation_id is not None: # Filter by (relation) account_id. There doesn't seem to be # any reason to prefer # 'read/financial/ReceivablesListByAccount?accountId=X' over # this. relation_id = self._remote_guid(relation_id) self._filter_append(kwargs, u'AccountId eq %s' % (relation_id,)) if duedate__lt is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__lt = self._remote_datetime(duedate__lt) self._filter_append(kwargs, u'DueDate lt %s' % (duedate__lt,)) if duedate__gte is not None: # Not sure what the AgeGroup means in # ReceivablesListByAgeGroup, but we can certainly do # without. duedate__gte = self._remote_datetime(duedate__gte) self._filter_append(kwargs, u'DueDate ge %s' % (duedate__gte,)) return super(Receivables, self).filter(**kwargs)
[ "A", "common", "query", "would", "be", "duedate__lt", "=", "date", "(", "2015", "1", "1", ")", "to", "get", "all", "Receivables", "that", "are", "due", "in", "2014", "and", "earlier", "." ]
ossobv/exactonline
python
https://github.com/ossobv/exactonline/blob/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde/exactonline/api/receivables.py#L19-L47
[ "def", "filter", "(", "self", ",", "relation_id", "=", "None", ",", "duedate__lt", "=", "None", ",", "duedate__gte", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "relation_id", "is", "not", "None", ":", "# Filter by (relation) account_id. There doesn't seem to be", "# any reason to prefer", "# 'read/financial/ReceivablesListByAccount?accountId=X' over", "# this.", "relation_id", "=", "self", ".", "_remote_guid", "(", "relation_id", ")", "self", ".", "_filter_append", "(", "kwargs", ",", "u'AccountId eq %s'", "%", "(", "relation_id", ",", ")", ")", "if", "duedate__lt", "is", "not", "None", ":", "# Not sure what the AgeGroup means in", "# ReceivablesListByAgeGroup, but we can certainly do", "# without.", "duedate__lt", "=", "self", ".", "_remote_datetime", "(", "duedate__lt", ")", "self", ".", "_filter_append", "(", "kwargs", ",", "u'DueDate lt %s'", "%", "(", "duedate__lt", ",", ")", ")", "if", "duedate__gte", "is", "not", "None", ":", "# Not sure what the AgeGroup means in", "# ReceivablesListByAgeGroup, but we can certainly do", "# without.", "duedate__gte", "=", "self", ".", "_remote_datetime", "(", "duedate__gte", ")", "self", ".", "_filter_append", "(", "kwargs", ",", "u'DueDate ge %s'", "%", "(", "duedate__gte", ",", ")", ")", "return", "super", "(", "Receivables", ",", "self", ")", ".", "filter", "(", "*", "*", "kwargs", ")" ]
f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde
test
sudoku_clauses
Create the (11745) Sudoku clauses, and return them as a list. Note that these clauses are *independent* of the particular Sudoku puzzle at hand.
examples/sudoku.py
def sudoku_clauses(): """ Create the (11745) Sudoku clauses, and return them as a list. Note that these clauses are *independent* of the particular Sudoku puzzle at hand. """ res = [] # for all cells, ensure that the each cell: for i in range(1, 10): for j in range(1, 10): # denotes (at least) one of the 9 digits (1 clause) res.append([v(i, j, d) for d in range(1, 10)]) # does not denote two different digits at once (36 clauses) for d in range(1, 10): for dp in range(d + 1, 10): res.append([-v(i, j, d), -v(i, j, dp)]) def valid(cells): # Append 324 clauses, corresponding to 9 cells, to the result. # The 9 cells are represented by a list tuples. The new clauses # ensure that the cells contain distinct values. for i, xi in enumerate(cells): for j, xj in enumerate(cells): if i < j: for d in range(1, 10): res.append([-v(xi[0], xi[1], d), -v(xj[0], xj[1], d)]) # ensure rows and columns have distinct values for i in range(1, 10): valid([(i, j) for j in range(1, 10)]) valid([(j, i) for j in range(1, 10)]) # ensure 3x3 sub-grids "regions" have distinct values for i in 1, 4, 7: for j in 1, 4 ,7: valid([(i + k % 3, j + k // 3) for k in range(9)]) assert len(res) == 81 * (1 + 36) + 27 * 324 return res
def sudoku_clauses(): """ Create the (11745) Sudoku clauses, and return them as a list. Note that these clauses are *independent* of the particular Sudoku puzzle at hand. """ res = [] # for all cells, ensure that the each cell: for i in range(1, 10): for j in range(1, 10): # denotes (at least) one of the 9 digits (1 clause) res.append([v(i, j, d) for d in range(1, 10)]) # does not denote two different digits at once (36 clauses) for d in range(1, 10): for dp in range(d + 1, 10): res.append([-v(i, j, d), -v(i, j, dp)]) def valid(cells): # Append 324 clauses, corresponding to 9 cells, to the result. # The 9 cells are represented by a list tuples. The new clauses # ensure that the cells contain distinct values. for i, xi in enumerate(cells): for j, xj in enumerate(cells): if i < j: for d in range(1, 10): res.append([-v(xi[0], xi[1], d), -v(xj[0], xj[1], d)]) # ensure rows and columns have distinct values for i in range(1, 10): valid([(i, j) for j in range(1, 10)]) valid([(j, i) for j in range(1, 10)]) # ensure 3x3 sub-grids "regions" have distinct values for i in 1, 4, 7: for j in 1, 4 ,7: valid([(i + k % 3, j + k // 3) for k in range(9)]) assert len(res) == 81 * (1 + 36) + 27 * 324 return res
[ "Create", "the", "(", "11745", ")", "Sudoku", "clauses", "and", "return", "them", "as", "a", "list", ".", "Note", "that", "these", "clauses", "are", "*", "independent", "*", "of", "the", "particular", "Sudoku", "puzzle", "at", "hand", "." ]
ContinuumIO/pycosat
python
https://github.com/ContinuumIO/pycosat/blob/b38fd85b6f4dcc18efd6027e96e5785104f53bb0/examples/sudoku.py#L24-L61
[ "def", "sudoku_clauses", "(", ")", ":", "res", "=", "[", "]", "# for all cells, ensure that the each cell:", "for", "i", "in", "range", "(", "1", ",", "10", ")", ":", "for", "j", "in", "range", "(", "1", ",", "10", ")", ":", "# denotes (at least) one of the 9 digits (1 clause)", "res", ".", "append", "(", "[", "v", "(", "i", ",", "j", ",", "d", ")", "for", "d", "in", "range", "(", "1", ",", "10", ")", "]", ")", "# does not denote two different digits at once (36 clauses)", "for", "d", "in", "range", "(", "1", ",", "10", ")", ":", "for", "dp", "in", "range", "(", "d", "+", "1", ",", "10", ")", ":", "res", ".", "append", "(", "[", "-", "v", "(", "i", ",", "j", ",", "d", ")", ",", "-", "v", "(", "i", ",", "j", ",", "dp", ")", "]", ")", "def", "valid", "(", "cells", ")", ":", "# Append 324 clauses, corresponding to 9 cells, to the result.", "# The 9 cells are represented by a list tuples. The new clauses", "# ensure that the cells contain distinct values.", "for", "i", ",", "xi", "in", "enumerate", "(", "cells", ")", ":", "for", "j", ",", "xj", "in", "enumerate", "(", "cells", ")", ":", "if", "i", "<", "j", ":", "for", "d", "in", "range", "(", "1", ",", "10", ")", ":", "res", ".", "append", "(", "[", "-", "v", "(", "xi", "[", "0", "]", ",", "xi", "[", "1", "]", ",", "d", ")", ",", "-", "v", "(", "xj", "[", "0", "]", ",", "xj", "[", "1", "]", ",", "d", ")", "]", ")", "# ensure rows and columns have distinct values", "for", "i", "in", "range", "(", "1", ",", "10", ")", ":", "valid", "(", "[", "(", "i", ",", "j", ")", "for", "j", "in", "range", "(", "1", ",", "10", ")", "]", ")", "valid", "(", "[", "(", "j", ",", "i", ")", "for", "j", "in", "range", "(", "1", ",", "10", ")", "]", ")", "# ensure 3x3 sub-grids \"regions\" have distinct values", "for", "i", "in", "1", ",", "4", ",", "7", ":", "for", "j", "in", "1", ",", "4", ",", "7", ":", "valid", "(", "[", "(", "i", "+", "k", "%", "3", ",", "j", "+", "k", "//", "3", ")", "for", "k", "in", "range", "(", "9", ")", "]", ")", "assert", "len", "(", "res", ")", "==", "81", "*", "(", "1", "+", "36", ")", "+", "27", "*", "324", "return", "res" ]
b38fd85b6f4dcc18efd6027e96e5785104f53bb0
test
solve
solve a Sudoku grid inplace
examples/sudoku.py
def solve(grid): """ solve a Sudoku grid inplace """ clauses = sudoku_clauses() for i in range(1, 10): for j in range(1, 10): d = grid[i - 1][j - 1] # For each digit already known, a clause (with one literal). # Note: # We could also remove all variables for the known cells # altogether (which would be more efficient). However, for # the sake of simplicity, we decided not to do that. if d: clauses.append([v(i, j, d)]) # solve the SAT problem sol = set(pycosat.solve(clauses)) def read_cell(i, j): # return the digit of cell i, j according to the solution for d in range(1, 10): if v(i, j, d) in sol: return d for i in range(1, 10): for j in range(1, 10): grid[i - 1][j - 1] = read_cell(i, j)
def solve(grid): """ solve a Sudoku grid inplace """ clauses = sudoku_clauses() for i in range(1, 10): for j in range(1, 10): d = grid[i - 1][j - 1] # For each digit already known, a clause (with one literal). # Note: # We could also remove all variables for the known cells # altogether (which would be more efficient). However, for # the sake of simplicity, we decided not to do that. if d: clauses.append([v(i, j, d)]) # solve the SAT problem sol = set(pycosat.solve(clauses)) def read_cell(i, j): # return the digit of cell i, j according to the solution for d in range(1, 10): if v(i, j, d) in sol: return d for i in range(1, 10): for j in range(1, 10): grid[i - 1][j - 1] = read_cell(i, j)
[ "solve", "a", "Sudoku", "grid", "inplace" ]
ContinuumIO/pycosat
python
https://github.com/ContinuumIO/pycosat/blob/b38fd85b6f4dcc18efd6027e96e5785104f53bb0/examples/sudoku.py#L64-L91
[ "def", "solve", "(", "grid", ")", ":", "clauses", "=", "sudoku_clauses", "(", ")", "for", "i", "in", "range", "(", "1", ",", "10", ")", ":", "for", "j", "in", "range", "(", "1", ",", "10", ")", ":", "d", "=", "grid", "[", "i", "-", "1", "]", "[", "j", "-", "1", "]", "# For each digit already known, a clause (with one literal).", "# Note:", "# We could also remove all variables for the known cells", "# altogether (which would be more efficient). However, for", "# the sake of simplicity, we decided not to do that.", "if", "d", ":", "clauses", ".", "append", "(", "[", "v", "(", "i", ",", "j", ",", "d", ")", "]", ")", "# solve the SAT problem", "sol", "=", "set", "(", "pycosat", ".", "solve", "(", "clauses", ")", ")", "def", "read_cell", "(", "i", ",", "j", ")", ":", "# return the digit of cell i, j according to the solution", "for", "d", "in", "range", "(", "1", ",", "10", ")", ":", "if", "v", "(", "i", ",", "j", ",", "d", ")", "in", "sol", ":", "return", "d", "for", "i", "in", "range", "(", "1", ",", "10", ")", ":", "for", "j", "in", "range", "(", "1", ",", "10", ")", ":", "grid", "[", "i", "-", "1", "]", "[", "j", "-", "1", "]", "=", "read_cell", "(", "i", ",", "j", ")" ]
b38fd85b6f4dcc18efd6027e96e5785104f53bb0
test
view
Create Django class-based view from injector class.
src/dependencies/contrib/_django.py
def view(injector): """Create Django class-based view from injector class.""" handler = create_handler(View, injector) apply_http_methods(handler, injector) return injector.let(as_view=handler.as_view)
def view(injector): """Create Django class-based view from injector class.""" handler = create_handler(View, injector) apply_http_methods(handler, injector) return injector.let(as_view=handler.as_view)
[ "Create", "Django", "class", "-", "based", "view", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_django.py#L8-L13
[ "def", "view", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "View", ",", "injector", ")", "apply_http_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_view", "=", "handler", ".", "as_view", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
form_view
Create Django form processing class-based view from injector class.
src/dependencies/contrib/_django.py
def form_view(injector): """Create Django form processing class-based view from injector class.""" handler = create_handler(FormView, injector) apply_form_methods(handler, injector) return injector.let(as_view=handler.as_view)
def form_view(injector): """Create Django form processing class-based view from injector class.""" handler = create_handler(FormView, injector) apply_form_methods(handler, injector) return injector.let(as_view=handler.as_view)
[ "Create", "Django", "form", "processing", "class", "-", "based", "view", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_django.py#L16-L21
[ "def", "form_view", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "FormView", ",", "injector", ")", "apply_form_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_view", "=", "handler", ".", "as_view", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
method_view
Create Flask method based dispatching view from injector class.
src/dependencies/contrib/_flask.py
def method_view(injector): """Create Flask method based dispatching view from injector class.""" handler = create_handler(MethodView) apply_http_methods(handler, injector) return injector.let(as_view=handler.as_view)
def method_view(injector): """Create Flask method based dispatching view from injector class.""" handler = create_handler(MethodView) apply_http_methods(handler, injector) return injector.let(as_view=handler.as_view)
[ "Create", "Flask", "method", "based", "dispatching", "view", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_flask.py#L7-L12
[ "def", "method_view", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "MethodView", ")", "apply_http_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_view", "=", "handler", ".", "as_view", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
api_view
Create DRF class-based API view from injector class.
src/dependencies/contrib/_rest_framework.py
def api_view(injector): """Create DRF class-based API view from injector class.""" handler = create_handler(APIView, injector) apply_http_methods(handler, injector) apply_api_view_methods(handler, injector) return injector.let(as_view=handler.as_view)
def api_view(injector): """Create DRF class-based API view from injector class.""" handler = create_handler(APIView, injector) apply_http_methods(handler, injector) apply_api_view_methods(handler, injector) return injector.let(as_view=handler.as_view)
[ "Create", "DRF", "class", "-", "based", "API", "view", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_rest_framework.py#L12-L18
[ "def", "api_view", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "APIView", ",", "injector", ")", "apply_http_methods", "(", "handler", ",", "injector", ")", "apply_api_view_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_view", "=", "handler", ".", "as_view", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
generic_api_view
Create DRF generic class-based API view from injector class.
src/dependencies/contrib/_rest_framework.py
def generic_api_view(injector): """Create DRF generic class-based API view from injector class.""" handler = create_handler(GenericAPIView, injector) apply_http_methods(handler, injector) apply_api_view_methods(handler, injector) apply_generic_api_view_methods(handler, injector) return injector.let(as_view=handler.as_view)
def generic_api_view(injector): """Create DRF generic class-based API view from injector class.""" handler = create_handler(GenericAPIView, injector) apply_http_methods(handler, injector) apply_api_view_methods(handler, injector) apply_generic_api_view_methods(handler, injector) return injector.let(as_view=handler.as_view)
[ "Create", "DRF", "generic", "class", "-", "based", "API", "view", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_rest_framework.py#L21-L28
[ "def", "generic_api_view", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "GenericAPIView", ",", "injector", ")", "apply_http_methods", "(", "handler", ",", "injector", ")", "apply_api_view_methods", "(", "handler", ",", "injector", ")", "apply_generic_api_view_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_view", "=", "handler", ".", "as_view", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
model_view_set
Create DRF model view set from injector class.
src/dependencies/contrib/_rest_framework.py
def model_view_set(injector): """Create DRF model view set from injector class.""" handler = create_handler(ModelViewSet, injector) apply_api_view_methods(handler, injector) apply_generic_api_view_methods(handler, injector) apply_model_view_set_methods(handler, injector) return injector.let(as_viewset=lambda: handler)
def model_view_set(injector): """Create DRF model view set from injector class.""" handler = create_handler(ModelViewSet, injector) apply_api_view_methods(handler, injector) apply_generic_api_view_methods(handler, injector) apply_model_view_set_methods(handler, injector) return injector.let(as_viewset=lambda: handler)
[ "Create", "DRF", "model", "view", "set", "from", "injector", "class", "." ]
dry-python/dependencies
python
https://github.com/dry-python/dependencies/blob/297912cbc6482ba26b3104729645f3a2aba5facc/src/dependencies/contrib/_rest_framework.py#L31-L38
[ "def", "model_view_set", "(", "injector", ")", ":", "handler", "=", "create_handler", "(", "ModelViewSet", ",", "injector", ")", "apply_api_view_methods", "(", "handler", ",", "injector", ")", "apply_generic_api_view_methods", "(", "handler", ",", "injector", ")", "apply_model_view_set_methods", "(", "handler", ",", "injector", ")", "return", "injector", ".", "let", "(", "as_viewset", "=", "lambda", ":", "handler", ")" ]
297912cbc6482ba26b3104729645f3a2aba5facc
test
stream_from_fd
Recieve a streamer for a given file descriptor.
aionotify/aioutils.py
def stream_from_fd(fd, loop): """Recieve a streamer for a given file descriptor.""" reader = asyncio.StreamReader(loop=loop) protocol = asyncio.StreamReaderProtocol(reader, loop=loop) waiter = asyncio.futures.Future(loop=loop) transport = UnixFileDescriptorTransport( loop=loop, fileno=fd, protocol=protocol, waiter=waiter, ) try: yield from waiter except Exception: transport.close() if loop.get_debug(): logger.debug("Read fd %r connected: (%r, %r)", fd, transport, protocol) return reader, transport
def stream_from_fd(fd, loop): """Recieve a streamer for a given file descriptor.""" reader = asyncio.StreamReader(loop=loop) protocol = asyncio.StreamReaderProtocol(reader, loop=loop) waiter = asyncio.futures.Future(loop=loop) transport = UnixFileDescriptorTransport( loop=loop, fileno=fd, protocol=protocol, waiter=waiter, ) try: yield from waiter except Exception: transport.close() if loop.get_debug(): logger.debug("Read fd %r connected: (%r, %r)", fd, transport, protocol) return reader, transport
[ "Recieve", "a", "streamer", "for", "a", "given", "file", "descriptor", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L123-L143
[ "def", "stream_from_fd", "(", "fd", ",", "loop", ")", ":", "reader", "=", "asyncio", ".", "StreamReader", "(", "loop", "=", "loop", ")", "protocol", "=", "asyncio", ".", "StreamReaderProtocol", "(", "reader", ",", "loop", "=", "loop", ")", "waiter", "=", "asyncio", ".", "futures", ".", "Future", "(", "loop", "=", "loop", ")", "transport", "=", "UnixFileDescriptorTransport", "(", "loop", "=", "loop", ",", "fileno", "=", "fd", ",", "protocol", "=", "protocol", ",", "waiter", "=", "waiter", ",", ")", "try", ":", "yield", "from", "waiter", "except", "Exception", ":", "transport", ".", "close", "(", ")", "if", "loop", ".", "get_debug", "(", ")", ":", "logger", ".", "debug", "(", "\"Read fd %r connected: (%r, %r)\"", ",", "fd", ",", "transport", ",", "protocol", ")", "return", "reader", ",", "transport" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
UnixFileDescriptorTransport._read_ready
Called by the event loop whenever the fd is ready for reading.
aionotify/aioutils.py
def _read_ready(self): """Called by the event loop whenever the fd is ready for reading.""" try: data = os.read(self._fileno, self.max_size) except InterruptedError: # No worries ;) pass except OSError as exc: # Some OS-level problem, crash. self._fatal_error(exc, "Fatal read error on file descriptor read") else: if data: self._protocol.data_received(data) else: # We reached end-of-file. if self._loop.get_debug(): logger.info("%r was closed by the kernel", self) self._closing = False self.pause_reading() self._loop.call_soon(self._protocol.eof_received) self._loop.call_soon(self._call_connection_lost, None)
def _read_ready(self): """Called by the event loop whenever the fd is ready for reading.""" try: data = os.read(self._fileno, self.max_size) except InterruptedError: # No worries ;) pass except OSError as exc: # Some OS-level problem, crash. self._fatal_error(exc, "Fatal read error on file descriptor read") else: if data: self._protocol.data_received(data) else: # We reached end-of-file. if self._loop.get_debug(): logger.info("%r was closed by the kernel", self) self._closing = False self.pause_reading() self._loop.call_soon(self._protocol.eof_received) self._loop.call_soon(self._call_connection_lost, None)
[ "Called", "by", "the", "event", "loop", "whenever", "the", "fd", "is", "ready", "for", "reading", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L37-L58
[ "def", "_read_ready", "(", "self", ")", ":", "try", ":", "data", "=", "os", ".", "read", "(", "self", ".", "_fileno", ",", "self", ".", "max_size", ")", "except", "InterruptedError", ":", "# No worries ;)", "pass", "except", "OSError", "as", "exc", ":", "# Some OS-level problem, crash.", "self", ".", "_fatal_error", "(", "exc", ",", "\"Fatal read error on file descriptor read\"", ")", "else", ":", "if", "data", ":", "self", ".", "_protocol", ".", "data_received", "(", "data", ")", "else", ":", "# We reached end-of-file.", "if", "self", ".", "_loop", ".", "get_debug", "(", ")", ":", "logger", ".", "info", "(", "\"%r was closed by the kernel\"", ",", "self", ")", "self", ".", "_closing", "=", "False", "self", ".", "pause_reading", "(", ")", "self", ".", "_loop", ".", "call_soon", "(", "self", ".", "_protocol", ".", "eof_received", ")", "self", ".", "_loop", ".", "call_soon", "(", "self", ".", "_call_connection_lost", ",", "None", ")" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
UnixFileDescriptorTransport.pause_reading
Public API: pause reading the transport.
aionotify/aioutils.py
def pause_reading(self): """Public API: pause reading the transport.""" self._loop.remove_reader(self._fileno) self._active = False
def pause_reading(self): """Public API: pause reading the transport.""" self._loop.remove_reader(self._fileno) self._active = False
[ "Public", "API", ":", "pause", "reading", "the", "transport", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L60-L63
[ "def", "pause_reading", "(", "self", ")", ":", "self", ".", "_loop", ".", "remove_reader", "(", "self", ".", "_fileno", ")", "self", ".", "_active", "=", "False" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
UnixFileDescriptorTransport.resume_reading
Public API: resume transport reading.
aionotify/aioutils.py
def resume_reading(self): """Public API: resume transport reading.""" self._loop.add_reader(self._fileno, self._read_ready) self._active = True
def resume_reading(self): """Public API: resume transport reading.""" self._loop.add_reader(self._fileno, self._read_ready) self._active = True
[ "Public", "API", ":", "resume", "transport", "reading", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L65-L68
[ "def", "resume_reading", "(", "self", ")", ":", "self", ".", "_loop", ".", "add_reader", "(", "self", ".", "_fileno", ",", "self", ".", "_read_ready", ")", "self", ".", "_active", "=", "True" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
UnixFileDescriptorTransport._close
Actual closing code, both from manual close and errors.
aionotify/aioutils.py
def _close(self, error=None): """Actual closing code, both from manual close and errors.""" self._closing = True self.pause_reading() self._loop.call_soon(self._call_connection_lost, error)
def _close(self, error=None): """Actual closing code, both from manual close and errors.""" self._closing = True self.pause_reading() self._loop.call_soon(self._call_connection_lost, error)
[ "Actual", "closing", "code", "both", "from", "manual", "close", "and", "errors", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L88-L92
[ "def", "_close", "(", "self", ",", "error", "=", "None", ")", ":", "self", ".", "_closing", "=", "True", "self", ".", "pause_reading", "(", ")", "self", ".", "_loop", ".", "call_soon", "(", "self", ".", "_call_connection_lost", ",", "error", ")" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
UnixFileDescriptorTransport._call_connection_lost
Finalize closing.
aionotify/aioutils.py
def _call_connection_lost(self, error): """Finalize closing.""" try: self._protocol.connection_lost(error) finally: os.close(self._fileno) self._fileno = None self._protocol = None self._loop = None
def _call_connection_lost(self, error): """Finalize closing.""" try: self._protocol.connection_lost(error) finally: os.close(self._fileno) self._fileno = None self._protocol = None self._loop = None
[ "Finalize", "closing", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/aioutils.py#L94-L102
[ "def", "_call_connection_lost", "(", "self", ",", "error", ")", ":", "try", ":", "self", ".", "_protocol", ".", "connection_lost", "(", "error", ")", "finally", ":", "os", ".", "close", "(", "self", ".", "_fileno", ")", "self", ".", "_fileno", "=", "None", "self", ".", "_protocol", "=", "None", "self", ".", "_loop", "=", "None" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Watcher.watch
Add a new watching rule.
aionotify/base.py
def watch(self, path, flags, *, alias=None): """Add a new watching rule.""" if alias is None: alias = path if alias in self.requests: raise ValueError("A watch request is already scheduled for alias %s" % alias) self.requests[alias] = (path, flags) if self._fd is not None: # We've started, register the watch immediately. self._setup_watch(alias, path, flags)
def watch(self, path, flags, *, alias=None): """Add a new watching rule.""" if alias is None: alias = path if alias in self.requests: raise ValueError("A watch request is already scheduled for alias %s" % alias) self.requests[alias] = (path, flags) if self._fd is not None: # We've started, register the watch immediately. self._setup_watch(alias, path, flags)
[ "Add", "a", "new", "watching", "rule", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/base.py#L50-L59
[ "def", "watch", "(", "self", ",", "path", ",", "flags", ",", "*", ",", "alias", "=", "None", ")", ":", "if", "alias", "is", "None", ":", "alias", "=", "path", "if", "alias", "in", "self", ".", "requests", ":", "raise", "ValueError", "(", "\"A watch request is already scheduled for alias %s\"", "%", "alias", ")", "self", ".", "requests", "[", "alias", "]", "=", "(", "path", ",", "flags", ")", "if", "self", ".", "_fd", "is", "not", "None", ":", "# We've started, register the watch immediately.", "self", ".", "_setup_watch", "(", "alias", ",", "path", ",", "flags", ")" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Watcher.unwatch
Stop watching a given rule.
aionotify/base.py
def unwatch(self, alias): """Stop watching a given rule.""" if alias not in self.descriptors: raise ValueError("Unknown watch alias %s; current set is %r" % (alias, list(self.descriptors.keys()))) wd = self.descriptors[alias] errno = LibC.inotify_rm_watch(self._fd, wd) if errno != 0: raise IOError("Failed to close watcher %d: errno=%d" % (wd, errno)) del self.descriptors[alias] del self.requests[alias] del self.aliases[wd]
def unwatch(self, alias): """Stop watching a given rule.""" if alias not in self.descriptors: raise ValueError("Unknown watch alias %s; current set is %r" % (alias, list(self.descriptors.keys()))) wd = self.descriptors[alias] errno = LibC.inotify_rm_watch(self._fd, wd) if errno != 0: raise IOError("Failed to close watcher %d: errno=%d" % (wd, errno)) del self.descriptors[alias] del self.requests[alias] del self.aliases[wd]
[ "Stop", "watching", "a", "given", "rule", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/base.py#L61-L71
[ "def", "unwatch", "(", "self", ",", "alias", ")", ":", "if", "alias", "not", "in", "self", ".", "descriptors", ":", "raise", "ValueError", "(", "\"Unknown watch alias %s; current set is %r\"", "%", "(", "alias", ",", "list", "(", "self", ".", "descriptors", ".", "keys", "(", ")", ")", ")", ")", "wd", "=", "self", ".", "descriptors", "[", "alias", "]", "errno", "=", "LibC", ".", "inotify_rm_watch", "(", "self", ".", "_fd", ",", "wd", ")", "if", "errno", "!=", "0", ":", "raise", "IOError", "(", "\"Failed to close watcher %d: errno=%d\"", "%", "(", "wd", ",", "errno", ")", ")", "del", "self", ".", "descriptors", "[", "alias", "]", "del", "self", ".", "requests", "[", "alias", "]", "del", "self", ".", "aliases", "[", "wd", "]" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Watcher._setup_watch
Actual rule setup.
aionotify/base.py
def _setup_watch(self, alias, path, flags): """Actual rule setup.""" assert alias not in self.descriptors, "Registering alias %s twice!" % alias wd = LibC.inotify_add_watch(self._fd, path, flags) if wd < 0: raise IOError("Error setting up watch on %s with flags %s: wd=%s" % ( path, flags, wd)) self.descriptors[alias] = wd self.aliases[wd] = alias
def _setup_watch(self, alias, path, flags): """Actual rule setup.""" assert alias not in self.descriptors, "Registering alias %s twice!" % alias wd = LibC.inotify_add_watch(self._fd, path, flags) if wd < 0: raise IOError("Error setting up watch on %s with flags %s: wd=%s" % ( path, flags, wd)) self.descriptors[alias] = wd self.aliases[wd] = alias
[ "Actual", "rule", "setup", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/base.py#L73-L81
[ "def", "_setup_watch", "(", "self", ",", "alias", ",", "path", ",", "flags", ")", ":", "assert", "alias", "not", "in", "self", ".", "descriptors", ",", "\"Registering alias %s twice!\"", "%", "alias", "wd", "=", "LibC", ".", "inotify_add_watch", "(", "self", ".", "_fd", ",", "path", ",", "flags", ")", "if", "wd", "<", "0", ":", "raise", "IOError", "(", "\"Error setting up watch on %s with flags %s: wd=%s\"", "%", "(", "path", ",", "flags", ",", "wd", ")", ")", "self", ".", "descriptors", "[", "alias", "]", "=", "wd", "self", ".", "aliases", "[", "wd", "]", "=", "alias" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Watcher.setup
Start the watcher, registering new watches if any.
aionotify/base.py
def setup(self, loop): """Start the watcher, registering new watches if any.""" self._loop = loop self._fd = LibC.inotify_init() for alias, (path, flags) in self.requests.items(): self._setup_watch(alias, path, flags) # We pass ownership of the fd to the transport; it will close it. self._stream, self._transport = yield from aioutils.stream_from_fd(self._fd, loop)
def setup(self, loop): """Start the watcher, registering new watches if any.""" self._loop = loop self._fd = LibC.inotify_init() for alias, (path, flags) in self.requests.items(): self._setup_watch(alias, path, flags) # We pass ownership of the fd to the transport; it will close it. self._stream, self._transport = yield from aioutils.stream_from_fd(self._fd, loop)
[ "Start", "the", "watcher", "registering", "new", "watches", "if", "any", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/base.py#L84-L93
[ "def", "setup", "(", "self", ",", "loop", ")", ":", "self", ".", "_loop", "=", "loop", "self", ".", "_fd", "=", "LibC", ".", "inotify_init", "(", ")", "for", "alias", ",", "(", "path", ",", "flags", ")", "in", "self", ".", "requests", ".", "items", "(", ")", ":", "self", ".", "_setup_watch", "(", "alias", ",", "path", ",", "flags", ")", "# We pass ownership of the fd to the transport; it will close it.", "self", ".", "_stream", ",", "self", ".", "_transport", "=", "yield", "from", "aioutils", ".", "stream_from_fd", "(", "self", ".", "_fd", ",", "loop", ")" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Watcher.get_event
Fetch an event. This coroutine will swallow events for removed watches.
aionotify/base.py
def get_event(self): """Fetch an event. This coroutine will swallow events for removed watches. """ while True: prefix = yield from self._stream.readexactly(PREFIX.size) if prefix == b'': # We got closed, return None. return wd, flags, cookie, length = PREFIX.unpack(prefix) path = yield from self._stream.readexactly(length) # All async performed, time to look at the event's content. if wd not in self.aliases: # Event for a removed watch, skip it. continue decoded_path = struct.unpack('%ds' % length, path)[0].rstrip(b'\x00').decode('utf-8') return Event( flags=flags, cookie=cookie, name=decoded_path, alias=self.aliases[wd], )
def get_event(self): """Fetch an event. This coroutine will swallow events for removed watches. """ while True: prefix = yield from self._stream.readexactly(PREFIX.size) if prefix == b'': # We got closed, return None. return wd, flags, cookie, length = PREFIX.unpack(prefix) path = yield from self._stream.readexactly(length) # All async performed, time to look at the event's content. if wd not in self.aliases: # Event for a removed watch, skip it. continue decoded_path = struct.unpack('%ds' % length, path)[0].rstrip(b'\x00').decode('utf-8') return Event( flags=flags, cookie=cookie, name=decoded_path, alias=self.aliases[wd], )
[ "Fetch", "an", "event", "." ]
rbarrois/aionotify
python
https://github.com/rbarrois/aionotify/blob/6cfa35b26a2660f77f29a92d3efb7d1dde685b43/aionotify/base.py#L109-L133
[ "def", "get_event", "(", "self", ")", ":", "while", "True", ":", "prefix", "=", "yield", "from", "self", ".", "_stream", ".", "readexactly", "(", "PREFIX", ".", "size", ")", "if", "prefix", "==", "b''", ":", "# We got closed, return None.", "return", "wd", ",", "flags", ",", "cookie", ",", "length", "=", "PREFIX", ".", "unpack", "(", "prefix", ")", "path", "=", "yield", "from", "self", ".", "_stream", ".", "readexactly", "(", "length", ")", "# All async performed, time to look at the event's content.", "if", "wd", "not", "in", "self", ".", "aliases", ":", "# Event for a removed watch, skip it.", "continue", "decoded_path", "=", "struct", ".", "unpack", "(", "'%ds'", "%", "length", ",", "path", ")", "[", "0", "]", ".", "rstrip", "(", "b'\\x00'", ")", ".", "decode", "(", "'utf-8'", ")", "return", "Event", "(", "flags", "=", "flags", ",", "cookie", "=", "cookie", ",", "name", "=", "decoded_path", ",", "alias", "=", "self", ".", "aliases", "[", "wd", "]", ",", ")" ]
6cfa35b26a2660f77f29a92d3efb7d1dde685b43
test
Message.finish
Respond to ``nsqd`` that you've processed this message successfully (or would like to silently discard it).
nsq/message.py
def finish(self): """ Respond to ``nsqd`` that you've processed this message successfully (or would like to silently discard it). """ assert not self._has_responded self._has_responded = True self.trigger(event.FINISH, message=self)
def finish(self): """ Respond to ``nsqd`` that you've processed this message successfully (or would like to silently discard it). """ assert not self._has_responded self._has_responded = True self.trigger(event.FINISH, message=self)
[ "Respond", "to", "nsqd", "that", "you", "ve", "processed", "this", "message", "successfully", "(", "or", "would", "like", "to", "silently", "discard", "it", ")", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/message.py#L82-L89
[ "def", "finish", "(", "self", ")", ":", "assert", "not", "self", ".", "_has_responded", "self", ".", "_has_responded", "=", "True", "self", ".", "trigger", "(", "event", ".", "FINISH", ",", "message", "=", "self", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Message.requeue
Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int
nsq/message.py
def requeue(self, **kwargs): """ Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int """ # convert delay to time_ms for fixing # https://github.com/nsqio/pynsq/issues/71 and maintaining # backward compatibility if 'delay' in kwargs and isinstance(kwargs['delay'], int) and kwargs['delay'] >= 0: kwargs['time_ms'] = kwargs['delay'] * 1000 assert not self._has_responded self._has_responded = True self.trigger(event.REQUEUE, message=self, **kwargs)
def requeue(self, **kwargs): """ Respond to ``nsqd`` that you've failed to process this message successfully (and would like it to be requeued). :param backoff: whether or not :class:`nsq.Reader` should apply backoff handling :type backoff: bool :param delay: the amount of time (in seconds) that this message should be delayed if -1 it will be calculated based on # of attempts :type delay: int """ # convert delay to time_ms for fixing # https://github.com/nsqio/pynsq/issues/71 and maintaining # backward compatibility if 'delay' in kwargs and isinstance(kwargs['delay'], int) and kwargs['delay'] >= 0: kwargs['time_ms'] = kwargs['delay'] * 1000 assert not self._has_responded self._has_responded = True self.trigger(event.REQUEUE, message=self, **kwargs)
[ "Respond", "to", "nsqd", "that", "you", "ve", "failed", "to", "process", "this", "message", "successfully", "(", "and", "would", "like", "it", "to", "be", "requeued", ")", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/message.py#L91-L112
[ "def", "requeue", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# convert delay to time_ms for fixing", "# https://github.com/nsqio/pynsq/issues/71 and maintaining", "# backward compatibility", "if", "'delay'", "in", "kwargs", "and", "isinstance", "(", "kwargs", "[", "'delay'", "]", ",", "int", ")", "and", "kwargs", "[", "'delay'", "]", ">=", "0", ":", "kwargs", "[", "'time_ms'", "]", "=", "kwargs", "[", "'delay'", "]", "*", "1000", "assert", "not", "self", ".", "_has_responded", "self", ".", "_has_responded", "=", "True", "self", ".", "trigger", "(", "event", ".", "REQUEUE", ",", "message", "=", "self", ",", "*", "*", "kwargs", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Message.touch
Respond to ``nsqd`` that you need more time to process the message.
nsq/message.py
def touch(self): """ Respond to ``nsqd`` that you need more time to process the message. """ assert not self._has_responded self.trigger(event.TOUCH, message=self)
def touch(self): """ Respond to ``nsqd`` that you need more time to process the message. """ assert not self._has_responded self.trigger(event.TOUCH, message=self)
[ "Respond", "to", "nsqd", "that", "you", "need", "more", "time", "to", "process", "the", "message", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/message.py#L114-L119
[ "def", "touch", "(", "self", ")", ":", "assert", "not", "self", ".", "_has_responded", "self", ".", "trigger", "(", "event", ".", "TOUCH", ",", "message", "=", "self", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
run
Starts any instantiated :class:`nsq.Reader` or :class:`nsq.Writer`
nsq/__init__.py
def run(): """ Starts any instantiated :class:`nsq.Reader` or :class:`nsq.Writer` """ signal.signal(signal.SIGTERM, _handle_term_signal) signal.signal(signal.SIGINT, _handle_term_signal) tornado.ioloop.IOLoop.instance().start()
def run(): """ Starts any instantiated :class:`nsq.Reader` or :class:`nsq.Writer` """ signal.signal(signal.SIGTERM, _handle_term_signal) signal.signal(signal.SIGINT, _handle_term_signal) tornado.ioloop.IOLoop.instance().start()
[ "Starts", "any", "instantiated", ":", "class", ":", "nsq", ".", "Reader", "or", ":", "class", ":", "nsq", ".", "Writer" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/__init__.py#L42-L48
[ "def", "run", "(", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "_handle_term_signal", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "_handle_term_signal", ")", "tornado", ".", "ioloop", ".", "IOLoop", ".", "instance", "(", ")", ".", "start", "(", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
BackoffTimer.success
Update the timer to reflect a successfull call
nsq/backoff_timer.py
def success(self): """Update the timer to reflect a successfull call""" if self.interval == 0.0: return self.short_interval -= self.short_unit self.long_interval -= self.long_unit self.short_interval = max(self.short_interval, Decimal(0)) self.long_interval = max(self.long_interval, Decimal(0)) self.update_interval()
def success(self): """Update the timer to reflect a successfull call""" if self.interval == 0.0: return self.short_interval -= self.short_unit self.long_interval -= self.long_unit self.short_interval = max(self.short_interval, Decimal(0)) self.long_interval = max(self.long_interval, Decimal(0)) self.update_interval()
[ "Update", "the", "timer", "to", "reflect", "a", "successfull", "call" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/backoff_timer.py#L32-L40
[ "def", "success", "(", "self", ")", ":", "if", "self", ".", "interval", "==", "0.0", ":", "return", "self", ".", "short_interval", "-=", "self", ".", "short_unit", "self", ".", "long_interval", "-=", "self", ".", "long_unit", "self", ".", "short_interval", "=", "max", "(", "self", ".", "short_interval", ",", "Decimal", "(", "0", ")", ")", "self", ".", "long_interval", "=", "max", "(", "self", ".", "long_interval", ",", "Decimal", "(", "0", ")", ")", "self", ".", "update_interval", "(", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
BackoffTimer.failure
Update the timer to reflect a failed call
nsq/backoff_timer.py
def failure(self): """Update the timer to reflect a failed call""" self.short_interval += self.short_unit self.long_interval += self.long_unit self.short_interval = min(self.short_interval, self.max_short_timer) self.long_interval = min(self.long_interval, self.max_long_timer) self.update_interval()
def failure(self): """Update the timer to reflect a failed call""" self.short_interval += self.short_unit self.long_interval += self.long_unit self.short_interval = min(self.short_interval, self.max_short_timer) self.long_interval = min(self.long_interval, self.max_long_timer) self.update_interval()
[ "Update", "the", "timer", "to", "reflect", "a", "failed", "call" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/backoff_timer.py#L42-L48
[ "def", "failure", "(", "self", ")", ":", "self", ".", "short_interval", "+=", "self", ".", "short_unit", "self", ".", "long_interval", "+=", "self", ".", "long_unit", "self", ".", "short_interval", "=", "min", "(", "self", ".", "short_interval", ",", "self", ".", "max_short_timer", ")", "self", ".", "long_interval", "=", "min", "(", "self", ".", "long_interval", ",", "self", ".", "max_long_timer", ")", "self", ".", "update_interval", "(", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
_utf8_params
encode a dictionary of URL parameters (including iterables) as utf-8
nsq/reader.py
def _utf8_params(params): """encode a dictionary of URL parameters (including iterables) as utf-8""" assert isinstance(params, dict) encoded_params = [] for k, v in params.items(): if v is None: continue if isinstance(v, integer_types + (float,)): v = str(v) if isinstance(v, (list, tuple)): v = [to_bytes(x) for x in v] else: v = to_bytes(v) encoded_params.append((k, v)) return dict(encoded_params)
def _utf8_params(params): """encode a dictionary of URL parameters (including iterables) as utf-8""" assert isinstance(params, dict) encoded_params = [] for k, v in params.items(): if v is None: continue if isinstance(v, integer_types + (float,)): v = str(v) if isinstance(v, (list, tuple)): v = [to_bytes(x) for x in v] else: v = to_bytes(v) encoded_params.append((k, v)) return dict(encoded_params)
[ "encode", "a", "dictionary", "of", "URL", "parameters", "(", "including", "iterables", ")", "as", "utf", "-", "8" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L754-L768
[ "def", "_utf8_params", "(", "params", ")", ":", "assert", "isinstance", "(", "params", ",", "dict", ")", "encoded_params", "=", "[", "]", "for", "k", ",", "v", "in", "params", ".", "items", "(", ")", ":", "if", "v", "is", "None", ":", "continue", "if", "isinstance", "(", "v", ",", "integer_types", "+", "(", "float", ",", ")", ")", ":", "v", "=", "str", "(", "v", ")", "if", "isinstance", "(", "v", ",", "(", "list", ",", "tuple", ")", ")", ":", "v", "=", "[", "to_bytes", "(", "x", ")", "for", "x", "in", "v", "]", "else", ":", "v", "=", "to_bytes", "(", "v", ")", "encoded_params", ".", "append", "(", "(", "k", ",", "v", ")", ")", "return", "dict", "(", "encoded_params", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.close
Closes all connections stops all periodic callbacks
nsq/reader.py
def close(self): """ Closes all connections stops all periodic callbacks """ for conn in self.conns.values(): conn.close() self.redist_periodic.stop() if self.query_periodic is not None: self.query_periodic.stop()
def close(self): """ Closes all connections stops all periodic callbacks """ for conn in self.conns.values(): conn.close() self.redist_periodic.stop() if self.query_periodic is not None: self.query_periodic.stop()
[ "Closes", "all", "connections", "stops", "all", "periodic", "callbacks" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L258-L267
[ "def", "close", "(", "self", ")", ":", "for", "conn", "in", "self", ".", "conns", ".", "values", "(", ")", ":", "conn", ".", "close", "(", ")", "self", ".", "redist_periodic", ".", "stop", "(", ")", "if", "self", ".", "query_periodic", "is", "not", "None", ":", "self", ".", "query_periodic", ".", "stop", "(", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.is_starved
Used to identify when buffered messages should be processed and responded to. When max_in_flight > 1 and you're batching messages together to perform work is isn't possible to just compare the len of your list of buffered messages against your configured max_in_flight (because max_in_flight may not be evenly divisible by the number of producers you're connected to, ie. you might never get that many messages... it's a *max*). Example:: def message_handler(self, nsq_msg, reader): # buffer messages if reader.is_starved(): # perform work reader = nsq.Reader(...) reader.set_message_handler(functools.partial(message_handler, reader=reader)) nsq.run()
nsq/reader.py
def is_starved(self): """ Used to identify when buffered messages should be processed and responded to. When max_in_flight > 1 and you're batching messages together to perform work is isn't possible to just compare the len of your list of buffered messages against your configured max_in_flight (because max_in_flight may not be evenly divisible by the number of producers you're connected to, ie. you might never get that many messages... it's a *max*). Example:: def message_handler(self, nsq_msg, reader): # buffer messages if reader.is_starved(): # perform work reader = nsq.Reader(...) reader.set_message_handler(functools.partial(message_handler, reader=reader)) nsq.run() """ for conn in itervalues(self.conns): if conn.in_flight > 0 and conn.in_flight >= (conn.last_rdy * 0.85): return True return False
def is_starved(self): """ Used to identify when buffered messages should be processed and responded to. When max_in_flight > 1 and you're batching messages together to perform work is isn't possible to just compare the len of your list of buffered messages against your configured max_in_flight (because max_in_flight may not be evenly divisible by the number of producers you're connected to, ie. you might never get that many messages... it's a *max*). Example:: def message_handler(self, nsq_msg, reader): # buffer messages if reader.is_starved(): # perform work reader = nsq.Reader(...) reader.set_message_handler(functools.partial(message_handler, reader=reader)) nsq.run() """ for conn in itervalues(self.conns): if conn.in_flight > 0 and conn.in_flight >= (conn.last_rdy * 0.85): return True return False
[ "Used", "to", "identify", "when", "buffered", "messages", "should", "be", "processed", "and", "responded", "to", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L281-L305
[ "def", "is_starved", "(", "self", ")", ":", "for", "conn", "in", "itervalues", "(", "self", ".", "conns", ")", ":", "if", "conn", ".", "in_flight", ">", "0", "and", "conn", ".", "in_flight", ">=", "(", "conn", ".", "last_rdy", "*", "0.85", ")", ":", "return", "True", "return", "False" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.connect_to_nsqd
Adds a connection to ``nsqd`` at the specified address. :param host: the address to connect to :param port: the port to connect to
nsq/reader.py
def connect_to_nsqd(self, host, port): """ Adds a connection to ``nsqd`` at the specified address. :param host: the address to connect to :param port: the port to connect to """ assert isinstance(host, string_types) assert isinstance(port, int) conn = AsyncConn(host, port, **self.conn_kwargs) conn.on('identify', self._on_connection_identify) conn.on('identify_response', self._on_connection_identify_response) conn.on('auth', self._on_connection_auth) conn.on('auth_response', self._on_connection_auth_response) conn.on('error', self._on_connection_error) conn.on('close', self._on_connection_close) conn.on('ready', self._on_connection_ready) conn.on('message', self._on_message) conn.on('heartbeat', self._on_heartbeat) conn.on('backoff', functools.partial(self._on_backoff_resume, success=False)) conn.on('resume', functools.partial(self._on_backoff_resume, success=True)) conn.on('continue', functools.partial(self._on_backoff_resume, success=None)) if conn.id in self.conns: return # only attempt to re-connect once every 10s per destination # this throttles reconnects to failed endpoints now = time.time() last_connect_attempt = self.connection_attempts.get(conn.id) if last_connect_attempt and last_connect_attempt > now - 10: return self.connection_attempts[conn.id] = now logger.info('[%s:%s] connecting to nsqd', conn.id, self.name) conn.connect() return conn
def connect_to_nsqd(self, host, port): """ Adds a connection to ``nsqd`` at the specified address. :param host: the address to connect to :param port: the port to connect to """ assert isinstance(host, string_types) assert isinstance(port, int) conn = AsyncConn(host, port, **self.conn_kwargs) conn.on('identify', self._on_connection_identify) conn.on('identify_response', self._on_connection_identify_response) conn.on('auth', self._on_connection_auth) conn.on('auth_response', self._on_connection_auth_response) conn.on('error', self._on_connection_error) conn.on('close', self._on_connection_close) conn.on('ready', self._on_connection_ready) conn.on('message', self._on_message) conn.on('heartbeat', self._on_heartbeat) conn.on('backoff', functools.partial(self._on_backoff_resume, success=False)) conn.on('resume', functools.partial(self._on_backoff_resume, success=True)) conn.on('continue', functools.partial(self._on_backoff_resume, success=None)) if conn.id in self.conns: return # only attempt to re-connect once every 10s per destination # this throttles reconnects to failed endpoints now = time.time() last_connect_attempt = self.connection_attempts.get(conn.id) if last_connect_attempt and last_connect_attempt > now - 10: return self.connection_attempts[conn.id] = now logger.info('[%s:%s] connecting to nsqd', conn.id, self.name) conn.connect() return conn
[ "Adds", "a", "connection", "to", "nsqd", "at", "the", "specified", "address", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L446-L484
[ "def", "connect_to_nsqd", "(", "self", ",", "host", ",", "port", ")", ":", "assert", "isinstance", "(", "host", ",", "string_types", ")", "assert", "isinstance", "(", "port", ",", "int", ")", "conn", "=", "AsyncConn", "(", "host", ",", "port", ",", "*", "*", "self", ".", "conn_kwargs", ")", "conn", ".", "on", "(", "'identify'", ",", "self", ".", "_on_connection_identify", ")", "conn", ".", "on", "(", "'identify_response'", ",", "self", ".", "_on_connection_identify_response", ")", "conn", ".", "on", "(", "'auth'", ",", "self", ".", "_on_connection_auth", ")", "conn", ".", "on", "(", "'auth_response'", ",", "self", ".", "_on_connection_auth_response", ")", "conn", ".", "on", "(", "'error'", ",", "self", ".", "_on_connection_error", ")", "conn", ".", "on", "(", "'close'", ",", "self", ".", "_on_connection_close", ")", "conn", ".", "on", "(", "'ready'", ",", "self", ".", "_on_connection_ready", ")", "conn", ".", "on", "(", "'message'", ",", "self", ".", "_on_message", ")", "conn", ".", "on", "(", "'heartbeat'", ",", "self", ".", "_on_heartbeat", ")", "conn", ".", "on", "(", "'backoff'", ",", "functools", ".", "partial", "(", "self", ".", "_on_backoff_resume", ",", "success", "=", "False", ")", ")", "conn", ".", "on", "(", "'resume'", ",", "functools", ".", "partial", "(", "self", ".", "_on_backoff_resume", ",", "success", "=", "True", ")", ")", "conn", ".", "on", "(", "'continue'", ",", "functools", ".", "partial", "(", "self", ".", "_on_backoff_resume", ",", "success", "=", "None", ")", ")", "if", "conn", ".", "id", "in", "self", ".", "conns", ":", "return", "# only attempt to re-connect once every 10s per destination", "# this throttles reconnects to failed endpoints", "now", "=", "time", ".", "time", "(", ")", "last_connect_attempt", "=", "self", ".", "connection_attempts", ".", "get", "(", "conn", ".", "id", ")", "if", "last_connect_attempt", "and", "last_connect_attempt", ">", "now", "-", "10", ":", "return", "self", ".", "connection_attempts", "[", "conn", ".", "id", "]", "=", "now", "logger", ".", "info", "(", "'[%s:%s] connecting to nsqd'", ",", "conn", ".", "id", ",", "self", ".", "name", ")", "conn", ".", "connect", "(", ")", "return", "conn" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.query_lookupd
Trigger a query of the configured ``nsq_lookupd_http_addresses``.
nsq/reader.py
def query_lookupd(self): """ Trigger a query of the configured ``nsq_lookupd_http_addresses``. """ endpoint = self.lookupd_http_addresses[self.lookupd_query_index] self.lookupd_query_index = (self.lookupd_query_index + 1) % len(self.lookupd_http_addresses) # urlsplit() is faulty if scheme not present if '://' not in endpoint: endpoint = 'http://' + endpoint scheme, netloc, path, query, fragment = urlparse.urlsplit(endpoint) if not path or path == "/": path = "/lookup" params = parse_qs(query) params['topic'] = self.topic query = urlencode(_utf8_params(params), doseq=1) lookupd_url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) req = tornado.httpclient.HTTPRequest( lookupd_url, method='GET', headers={'Accept': 'application/vnd.nsq; version=1.0'}, connect_timeout=self.lookupd_connect_timeout, request_timeout=self.lookupd_request_timeout) callback = functools.partial(self._finish_query_lookupd, lookupd_url=lookupd_url) self.http_client.fetch(req, callback=callback)
def query_lookupd(self): """ Trigger a query of the configured ``nsq_lookupd_http_addresses``. """ endpoint = self.lookupd_http_addresses[self.lookupd_query_index] self.lookupd_query_index = (self.lookupd_query_index + 1) % len(self.lookupd_http_addresses) # urlsplit() is faulty if scheme not present if '://' not in endpoint: endpoint = 'http://' + endpoint scheme, netloc, path, query, fragment = urlparse.urlsplit(endpoint) if not path or path == "/": path = "/lookup" params = parse_qs(query) params['topic'] = self.topic query = urlencode(_utf8_params(params), doseq=1) lookupd_url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) req = tornado.httpclient.HTTPRequest( lookupd_url, method='GET', headers={'Accept': 'application/vnd.nsq; version=1.0'}, connect_timeout=self.lookupd_connect_timeout, request_timeout=self.lookupd_request_timeout) callback = functools.partial(self._finish_query_lookupd, lookupd_url=lookupd_url) self.http_client.fetch(req, callback=callback)
[ "Trigger", "a", "query", "of", "the", "configured", "nsq_lookupd_http_addresses", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L548-L575
[ "def", "query_lookupd", "(", "self", ")", ":", "endpoint", "=", "self", ".", "lookupd_http_addresses", "[", "self", ".", "lookupd_query_index", "]", "self", ".", "lookupd_query_index", "=", "(", "self", ".", "lookupd_query_index", "+", "1", ")", "%", "len", "(", "self", ".", "lookupd_http_addresses", ")", "# urlsplit() is faulty if scheme not present", "if", "'://'", "not", "in", "endpoint", ":", "endpoint", "=", "'http://'", "+", "endpoint", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", "=", "urlparse", ".", "urlsplit", "(", "endpoint", ")", "if", "not", "path", "or", "path", "==", "\"/\"", ":", "path", "=", "\"/lookup\"", "params", "=", "parse_qs", "(", "query", ")", "params", "[", "'topic'", "]", "=", "self", ".", "topic", "query", "=", "urlencode", "(", "_utf8_params", "(", "params", ")", ",", "doseq", "=", "1", ")", "lookupd_url", "=", "urlparse", ".", "urlunsplit", "(", "(", "scheme", ",", "netloc", ",", "path", ",", "query", ",", "fragment", ")", ")", "req", "=", "tornado", ".", "httpclient", ".", "HTTPRequest", "(", "lookupd_url", ",", "method", "=", "'GET'", ",", "headers", "=", "{", "'Accept'", ":", "'application/vnd.nsq; version=1.0'", "}", ",", "connect_timeout", "=", "self", ".", "lookupd_connect_timeout", ",", "request_timeout", "=", "self", ".", "lookupd_request_timeout", ")", "callback", "=", "functools", ".", "partial", "(", "self", ".", "_finish_query_lookupd", ",", "lookupd_url", "=", "lookupd_url", ")", "self", ".", "http_client", ".", "fetch", "(", "req", ",", "callback", "=", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.set_max_in_flight
Dynamically adjust the reader max_in_flight. Set to 0 to immediately disable a Reader
nsq/reader.py
def set_max_in_flight(self, max_in_flight): """Dynamically adjust the reader max_in_flight. Set to 0 to immediately disable a Reader""" assert isinstance(max_in_flight, int) self.max_in_flight = max_in_flight if max_in_flight == 0: # set RDY 0 to all connections for conn in itervalues(self.conns): if conn.rdy > 0: logger.debug('[%s:%s] rdy: %d -> 0', conn.id, self.name, conn.rdy) self._send_rdy(conn, 0) self.total_rdy = 0 else: self.need_rdy_redistributed = True self._redistribute_rdy_state()
def set_max_in_flight(self, max_in_flight): """Dynamically adjust the reader max_in_flight. Set to 0 to immediately disable a Reader""" assert isinstance(max_in_flight, int) self.max_in_flight = max_in_flight if max_in_flight == 0: # set RDY 0 to all connections for conn in itervalues(self.conns): if conn.rdy > 0: logger.debug('[%s:%s] rdy: %d -> 0', conn.id, self.name, conn.rdy) self._send_rdy(conn, 0) self.total_rdy = 0 else: self.need_rdy_redistributed = True self._redistribute_rdy_state()
[ "Dynamically", "adjust", "the", "reader", "max_in_flight", ".", "Set", "to", "0", "to", "immediately", "disable", "a", "Reader" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L596-L610
[ "def", "set_max_in_flight", "(", "self", ",", "max_in_flight", ")", ":", "assert", "isinstance", "(", "max_in_flight", ",", "int", ")", "self", ".", "max_in_flight", "=", "max_in_flight", "if", "max_in_flight", "==", "0", ":", "# set RDY 0 to all connections", "for", "conn", "in", "itervalues", "(", "self", ".", "conns", ")", ":", "if", "conn", ".", "rdy", ">", "0", ":", "logger", ".", "debug", "(", "'[%s:%s] rdy: %d -> 0'", ",", "conn", ".", "id", ",", "self", ".", "name", ",", "conn", ".", "rdy", ")", "self", ".", "_send_rdy", "(", "conn", ",", "0", ")", "self", ".", "total_rdy", "=", "0", "else", ":", "self", ".", "need_rdy_redistributed", "=", "True", "self", ".", "_redistribute_rdy_state", "(", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Reader.giving_up
Called when a message has been received where ``msg.attempts > max_tries`` This is useful to subclass and override to perform a task (such as writing to disk, etc.) :param message: the :class:`nsq.Message` received
nsq/reader.py
def giving_up(self, message): """ Called when a message has been received where ``msg.attempts > max_tries`` This is useful to subclass and override to perform a task (such as writing to disk, etc.) :param message: the :class:`nsq.Message` received """ logger.warning('[%s] giving up on message %s after %d tries (max:%d) %r', self.name, message.id, message.attempts, self.max_tries, message.body)
def giving_up(self, message): """ Called when a message has been received where ``msg.attempts > max_tries`` This is useful to subclass and override to perform a task (such as writing to disk, etc.) :param message: the :class:`nsq.Message` received """ logger.warning('[%s] giving up on message %s after %d tries (max:%d) %r', self.name, message.id, message.attempts, self.max_tries, message.body)
[ "Called", "when", "a", "message", "has", "been", "received", "where", "msg", ".", "attempts", ">", "max_tries" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/reader.py#L706-L715
[ "def", "giving_up", "(", "self", ",", "message", ")", ":", "logger", ".", "warning", "(", "'[%s] giving up on message %s after %d tries (max:%d) %r'", ",", "self", ".", "name", ",", "message", ".", "id", ",", "message", ".", "attempts", ",", "self", ".", "max_tries", ",", "message", ".", "body", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
EventedMixin.on
Listen for the named event with the specified callback. :param name: the name of the event :type name: string :param callback: the callback to execute when the event is triggered :type callback: callable
nsq/event.py
def on(self, name, callback): """ Listen for the named event with the specified callback. :param name: the name of the event :type name: string :param callback: the callback to execute when the event is triggered :type callback: callable """ assert callable(callback), 'callback is not callable' if callback in self.__listeners[name]: raise DuplicateListenerError self.__listeners[name].append(callback)
def on(self, name, callback): """ Listen for the named event with the specified callback. :param name: the name of the event :type name: string :param callback: the callback to execute when the event is triggered :type callback: callable """ assert callable(callback), 'callback is not callable' if callback in self.__listeners[name]: raise DuplicateListenerError self.__listeners[name].append(callback)
[ "Listen", "for", "the", "named", "event", "with", "the", "specified", "callback", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/event.py#L44-L57
[ "def", "on", "(", "self", ",", "name", ",", "callback", ")", ":", "assert", "callable", "(", "callback", ")", ",", "'callback is not callable'", "if", "callback", "in", "self", ".", "__listeners", "[", "name", "]", ":", "raise", "DuplicateListenerError", "self", ".", "__listeners", "[", "name", "]", ".", "append", "(", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
EventedMixin.off
Stop listening for the named event via the specified callback. :param name: the name of the event :type name: string :param callback: the callback that was originally used :type callback: callable
nsq/event.py
def off(self, name, callback): """ Stop listening for the named event via the specified callback. :param name: the name of the event :type name: string :param callback: the callback that was originally used :type callback: callable """ if callback not in self.__listeners[name]: raise InvalidListenerError self.__listeners[name].remove(callback)
def off(self, name, callback): """ Stop listening for the named event via the specified callback. :param name: the name of the event :type name: string :param callback: the callback that was originally used :type callback: callable """ if callback not in self.__listeners[name]: raise InvalidListenerError self.__listeners[name].remove(callback)
[ "Stop", "listening", "for", "the", "named", "event", "via", "the", "specified", "callback", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/event.py#L59-L71
[ "def", "off", "(", "self", ",", "name", ",", "callback", ")", ":", "if", "callback", "not", "in", "self", ".", "__listeners", "[", "name", "]", ":", "raise", "InvalidListenerError", "self", ".", "__listeners", "[", "name", "]", ".", "remove", "(", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
EventedMixin.trigger
Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string
nsq/event.py
def trigger(self, name, *args, **kwargs): """ Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string """ for ev in self.__listeners[name]: ev(*args, **kwargs)
def trigger(self, name, *args, **kwargs): """ Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string """ for ev in self.__listeners[name]: ev(*args, **kwargs)
[ "Execute", "the", "callbacks", "for", "the", "listeners", "on", "the", "specified", "event", "with", "the", "supplied", "arguments", "." ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/event.py#L73-L84
[ "def", "trigger", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "ev", "in", "self", ".", "__listeners", "[", "name", "]", ":", "ev", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Writer.pub
publish a message to nsq :param topic: nsq topic :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error)
nsq/writer.py
def pub(self, topic, msg, callback=None): """ publish a message to nsq :param topic: nsq topic :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ self._pub('pub', topic, msg, callback=callback)
def pub(self, topic, msg, callback=None): """ publish a message to nsq :param topic: nsq topic :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ self._pub('pub', topic, msg, callback=callback)
[ "publish", "a", "message", "to", "nsq" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/writer.py#L116-L124
[ "def", "pub", "(", "self", ",", "topic", ",", "msg", ",", "callback", "=", "None", ")", ":", "self", ".", "_pub", "(", "'pub'", ",", "topic", ",", "msg", ",", "callback", "=", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Writer.mpub
publish multiple messages in one command (efficiently) :param topic: nsq topic :param msg: list of messages bodies (which are bytes) :param callback: function which takes (conn, data) (data may be nsq.Error)
nsq/writer.py
def mpub(self, topic, msg, callback=None): """ publish multiple messages in one command (efficiently) :param topic: nsq topic :param msg: list of messages bodies (which are bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ if isinstance(msg, bytes_types): msg = [msg] assert isinstance(msg, (list, set, tuple)) self._pub('mpub', topic, msg, callback=callback)
def mpub(self, topic, msg, callback=None): """ publish multiple messages in one command (efficiently) :param topic: nsq topic :param msg: list of messages bodies (which are bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ if isinstance(msg, bytes_types): msg = [msg] assert isinstance(msg, (list, set, tuple)) self._pub('mpub', topic, msg, callback=callback)
[ "publish", "multiple", "messages", "in", "one", "command", "(", "efficiently", ")" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/writer.py#L126-L138
[ "def", "mpub", "(", "self", ",", "topic", ",", "msg", ",", "callback", "=", "None", ")", ":", "if", "isinstance", "(", "msg", ",", "bytes_types", ")", ":", "msg", "=", "[", "msg", "]", "assert", "isinstance", "(", "msg", ",", "(", "list", ",", "set", ",", "tuple", ")", ")", "self", ".", "_pub", "(", "'mpub'", ",", "topic", ",", "msg", ",", "callback", "=", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
Writer.dpub
publish multiple messages in one command (efficiently) :param topic: nsq topic :param delay_ms: tell nsqd to delay delivery for this long (integer milliseconds) :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error)
nsq/writer.py
def dpub(self, topic, delay_ms, msg, callback=None): """ publish multiple messages in one command (efficiently) :param topic: nsq topic :param delay_ms: tell nsqd to delay delivery for this long (integer milliseconds) :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ self._pub('dpub', topic, msg, delay_ms, callback=callback)
def dpub(self, topic, delay_ms, msg, callback=None): """ publish multiple messages in one command (efficiently) :param topic: nsq topic :param delay_ms: tell nsqd to delay delivery for this long (integer milliseconds) :param msg: message body (bytes) :param callback: function which takes (conn, data) (data may be nsq.Error) """ self._pub('dpub', topic, msg, delay_ms, callback=callback)
[ "publish", "multiple", "messages", "in", "one", "command", "(", "efficiently", ")" ]
nsqio/pynsq
python
https://github.com/nsqio/pynsq/blob/48bf62d65ea63cddaa401efb23187b95511dbc84/nsq/writer.py#L140-L149
[ "def", "dpub", "(", "self", ",", "topic", ",", "delay_ms", ",", "msg", ",", "callback", "=", "None", ")", ":", "self", ".", "_pub", "(", "'dpub'", ",", "topic", ",", "msg", ",", "delay_ms", ",", "callback", "=", "callback", ")" ]
48bf62d65ea63cddaa401efb23187b95511dbc84
test
BinaryClassifier.score_function
Score function to calculate score
FukuML/SupportVectorMachine.py
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' if (self.svm_kernel == 'polynomial_kernel' or self.svm_kernel == 'gaussian_kernel' or self.svm_kernel == 'soft_polynomial_kernel' or self.svm_kernel == 'soft_gaussian_kernel'): x = x[1:] ''' original_X = self.train_X[:, 1:] score = 0 for i in range(len(self.sv_alpha)): if (self.svm_kernel == 'polynomial_kernel' or self.svm_kernel == 'soft_polynomial_kernel'): score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.polynomial_kernel(self, original_X[self.sv_index[i]], x) elif (self.svm_kernel == 'gaussian_kernel' or self.svm_kernel == 'soft_gaussian_kernel'): score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.sv_index[i]], x) score = np.sign(score + self.sv_avg_b) ''' score = np.sign(np.sum(self.sv_alpha * self.sv_Y * utility.Kernel.kernel_matrix_xX(self, x, self.sv_X)) + self.sv_avg_b) else: score = np.sign(np.inner(x, W)) return score
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' if (self.svm_kernel == 'polynomial_kernel' or self.svm_kernel == 'gaussian_kernel' or self.svm_kernel == 'soft_polynomial_kernel' or self.svm_kernel == 'soft_gaussian_kernel'): x = x[1:] ''' original_X = self.train_X[:, 1:] score = 0 for i in range(len(self.sv_alpha)): if (self.svm_kernel == 'polynomial_kernel' or self.svm_kernel == 'soft_polynomial_kernel'): score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.polynomial_kernel(self, original_X[self.sv_index[i]], x) elif (self.svm_kernel == 'gaussian_kernel' or self.svm_kernel == 'soft_gaussian_kernel'): score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.sv_index[i]], x) score = np.sign(score + self.sv_avg_b) ''' score = np.sign(np.sum(self.sv_alpha * self.sv_Y * utility.Kernel.kernel_matrix_xX(self, x, self.sv_X)) + self.sv_avg_b) else: score = np.sign(np.inner(x, W)) return score
[ "Score", "function", "to", "calculate", "score" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/SupportVectorMachine.py#L132-L155
[ "def", "score_function", "(", "self", ",", "x", ",", "W", ")", ":", "# need refector", "if", "(", "self", ".", "svm_kernel", "==", "'polynomial_kernel'", "or", "self", ".", "svm_kernel", "==", "'gaussian_kernel'", "or", "self", ".", "svm_kernel", "==", "'soft_polynomial_kernel'", "or", "self", ".", "svm_kernel", "==", "'soft_gaussian_kernel'", ")", ":", "x", "=", "x", "[", "1", ":", "]", "'''\n original_X = self.train_X[:, 1:]\n score = 0\n for i in range(len(self.sv_alpha)):\n if (self.svm_kernel == 'polynomial_kernel' or self.svm_kernel == 'soft_polynomial_kernel'):\n score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.polynomial_kernel(self, original_X[self.sv_index[i]], x)\n elif (self.svm_kernel == 'gaussian_kernel' or self.svm_kernel == 'soft_gaussian_kernel'):\n score += self.sv_alpha[i] * self.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.sv_index[i]], x)\n score = np.sign(score + self.sv_avg_b)\n '''", "score", "=", "np", ".", "sign", "(", "np", ".", "sum", "(", "self", ".", "sv_alpha", "*", "self", ".", "sv_Y", "*", "utility", ".", "Kernel", ".", "kernel_matrix_xX", "(", "self", ",", "x", ",", "self", ".", "sv_X", ")", ")", "+", "self", ".", "sv_avg_b", ")", "else", ":", "score", "=", "np", ".", "sign", "(", "np", ".", "inner", "(", "x", ",", "W", ")", ")", "return", "score" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
BinaryClassifier.score_function
Score function to calculate score
FukuML/L2RLogisticRegression.py
def score_function(self, x, W): ''' Score function to calculate score ''' score = super(BinaryClassifier, self).score_function(x, W) if score >= 0.5: score = 1.0 else: score = -1.0 return score
def score_function(self, x, W): ''' Score function to calculate score ''' score = super(BinaryClassifier, self).score_function(x, W) if score >= 0.5: score = 1.0 else: score = -1.0 return score
[ "Score", "function", "to", "calculate", "score" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/L2RLogisticRegression.py#L136-L148
[ "def", "score_function", "(", "self", ",", "x", ",", "W", ")", ":", "score", "=", "super", "(", "BinaryClassifier", ",", "self", ")", ".", "score_function", "(", "x", ",", "W", ")", "if", "score", ">=", "0.5", ":", "score", "=", "1.0", "else", ":", "score", "=", "-", "1.0", "return", "score" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
BinaryClassifier.train
Train Pocket Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W
FukuML/PocketPLA.py
def train(self): ''' Train Pocket Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' new_W = self.W self.temp_avg_error = self.calculate_avg_error(self.train_X, self.train_Y, new_W) for _ in range(self.updates): if (self.loop_mode is 'naive_cycle'): data_check_order = range(self.data_num) elif (self.loop_mode is 'random'): data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) else: data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) for i in data_check_order: if self.error_function(self.score_function(self.train_X[i], new_W), self.train_Y[i]): self.tune_times += 1 new_W = new_W + self.step_alpha * (self.train_Y[i] * self.train_X[i]) new_avg_error = self.calculate_avg_error(self.train_X, self.train_Y, new_W) if new_avg_error < self.temp_avg_error: self.put_in_pocket_times += 1 self.temp_avg_error = new_avg_error self.W = new_W break return self.W
def train(self): ''' Train Pocket Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' new_W = self.W self.temp_avg_error = self.calculate_avg_error(self.train_X, self.train_Y, new_W) for _ in range(self.updates): if (self.loop_mode is 'naive_cycle'): data_check_order = range(self.data_num) elif (self.loop_mode is 'random'): data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) else: data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) for i in data_check_order: if self.error_function(self.score_function(self.train_X[i], new_W), self.train_Y[i]): self.tune_times += 1 new_W = new_W + self.step_alpha * (self.train_Y[i] * self.train_X[i]) new_avg_error = self.calculate_avg_error(self.train_X, self.train_Y, new_W) if new_avg_error < self.temp_avg_error: self.put_in_pocket_times += 1 self.temp_avg_error = new_avg_error self.W = new_W break return self.W
[ "Train", "Pocket", "Perceptron", "Learning", "Algorithm", "From", "f", "(", "x", ")", "=", "WX", "Find", "best", "h", "(", "x", ")", "=", "WX", "similar", "to", "f", "(", "x", ")", "Output", "W" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/PocketPLA.py#L154-L194
[ "def", "train", "(", "self", ")", ":", "if", "(", "self", ".", "status", "!=", "'init'", ")", ":", "print", "(", "\"Please load train data and init W first.\"", ")", "return", "self", ".", "W", "self", ".", "status", "=", "'train'", "new_W", "=", "self", ".", "W", "self", ".", "temp_avg_error", "=", "self", ".", "calculate_avg_error", "(", "self", ".", "train_X", ",", "self", ".", "train_Y", ",", "new_W", ")", "for", "_", "in", "range", "(", "self", ".", "updates", ")", ":", "if", "(", "self", ".", "loop_mode", "is", "'naive_cycle'", ")", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "elif", "(", "self", ".", "loop_mode", "is", "'random'", ")", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "data_check_order", "=", "random", ".", "sample", "(", "data_check_order", ",", "self", ".", "data_num", ")", "else", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "data_check_order", "=", "random", ".", "sample", "(", "data_check_order", ",", "self", ".", "data_num", ")", "for", "i", "in", "data_check_order", ":", "if", "self", ".", "error_function", "(", "self", ".", "score_function", "(", "self", ".", "train_X", "[", "i", "]", ",", "new_W", ")", ",", "self", ".", "train_Y", "[", "i", "]", ")", ":", "self", ".", "tune_times", "+=", "1", "new_W", "=", "new_W", "+", "self", ".", "step_alpha", "*", "(", "self", ".", "train_Y", "[", "i", "]", "*", "self", ".", "train_X", "[", "i", "]", ")", "new_avg_error", "=", "self", ".", "calculate_avg_error", "(", "self", ".", "train_X", ",", "self", ".", "train_Y", ",", "new_W", ")", "if", "new_avg_error", "<", "self", ".", "temp_avg_error", ":", "self", ".", "put_in_pocket_times", "+=", "1", "self", ".", "temp_avg_error", "=", "new_avg_error", "self", ".", "W", "=", "new_W", "break", "return", "self", ".", "W" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
ProbabilisticSVM.svm_score
original_X = self.svm_processor.train_X[:, 1:] score = 0 for i in range(len(self.svm_processor.sv_alpha)): score += self.svm_processor.sv_alpha[i] * self.svm_processor.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.svm_processor.sv_index[i]], x) score = score + self.svm_processor.sv_avg_b
FukuML/ProbabilisticSVM.py
def svm_score(self, x): x = x[1:] ''' original_X = self.svm_processor.train_X[:, 1:] score = 0 for i in range(len(self.svm_processor.sv_alpha)): score += self.svm_processor.sv_alpha[i] * self.svm_processor.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.svm_processor.sv_index[i]], x) score = score + self.svm_processor.sv_avg_b ''' score = np.sum(self.svm_processor.sv_alpha * self.svm_processor.sv_Y * utility.Kernel.kernel_matrix_xX(self, x, self.svm_processor.sv_X)) + self.svm_processor.sv_avg_b return score
def svm_score(self, x): x = x[1:] ''' original_X = self.svm_processor.train_X[:, 1:] score = 0 for i in range(len(self.svm_processor.sv_alpha)): score += self.svm_processor.sv_alpha[i] * self.svm_processor.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.svm_processor.sv_index[i]], x) score = score + self.svm_processor.sv_avg_b ''' score = np.sum(self.svm_processor.sv_alpha * self.svm_processor.sv_Y * utility.Kernel.kernel_matrix_xX(self, x, self.svm_processor.sv_X)) + self.svm_processor.sv_avg_b return score
[ "original_X", "=", "self", ".", "svm_processor", ".", "train_X", "[", ":", "1", ":", "]", "score", "=", "0", "for", "i", "in", "range", "(", "len", "(", "self", ".", "svm_processor", ".", "sv_alpha", "))", ":", "score", "+", "=", "self", ".", "svm_processor", ".", "sv_alpha", "[", "i", "]", "*", "self", ".", "svm_processor", ".", "sv_Y", "[", "i", "]", "*", "utility", ".", "Kernel", ".", "gaussian_kernel", "(", "self", "original_X", "[", "self", ".", "svm_processor", ".", "sv_index", "[", "i", "]]", "x", ")", "score", "=", "score", "+", "self", ".", "svm_processor", ".", "sv_avg_b" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/ProbabilisticSVM.py#L134-L148
[ "def", "svm_score", "(", "self", ",", "x", ")", ":", "x", "=", "x", "[", "1", ":", "]", "score", "=", "np", ".", "sum", "(", "self", ".", "svm_processor", ".", "sv_alpha", "*", "self", ".", "svm_processor", ".", "sv_Y", "*", "utility", ".", "Kernel", ".", "kernel_matrix_xX", "(", "self", ",", "x", ",", "self", ".", "svm_processor", ".", "sv_X", ")", ")", "+", "self", ".", "svm_processor", ".", "sv_avg_b", "return", "score" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
LinearRegression.train
Train Linear Regression Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W
FukuML/LinearRegression.py
def train(self): ''' Train Linear Regression Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' self.xpsedo = self.calculate_psedo_X(self.train_X) self.W = np.dot(self.xpsedo, self.train_Y) return self.W
def train(self): ''' Train Linear Regression Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' self.xpsedo = self.calculate_psedo_X(self.train_X) self.W = np.dot(self.xpsedo, self.train_Y) return self.W
[ "Train", "Linear", "Regression", "Algorithm", "From", "f", "(", "x", ")", "=", "WX", "Find", "best", "h", "(", "x", ")", "=", "WX", "similar", "to", "f", "(", "x", ")", "Output", "W" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/LinearRegression.py#L135-L153
[ "def", "train", "(", "self", ")", ":", "if", "(", "self", ".", "status", "!=", "'init'", ")", ":", "print", "(", "\"Please load train data and init W first.\"", ")", "return", "self", ".", "W", "self", ".", "status", "=", "'train'", "self", ".", "xpsedo", "=", "self", ".", "calculate_psedo_X", "(", "self", ".", "train_X", ")", "self", ".", "W", "=", "np", ".", "dot", "(", "self", ".", "xpsedo", ",", "self", ".", "train_Y", ")", "return", "self", ".", "W" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
BinaryClassifier.score_function
Score function to calculate score
FukuML/DecisionStump.py
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' score = self.sign * np.sign(x[self.feature_index] - self.theta) return score
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' score = self.sign * np.sign(x[self.feature_index] - self.theta) return score
[ "Score", "function", "to", "calculate", "score" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/DecisionStump.py#L112-L121
[ "def", "score_function", "(", "self", ",", "x", ",", "W", ")", ":", "# need refector", "score", "=", "self", ".", "sign", "*", "np", ".", "sign", "(", "x", "[", "self", ".", "feature_index", "]", "-", "self", ".", "theta", ")", "return", "score" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
BinaryClassifier.train
Train Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W
FukuML/PLA.py
def train(self): ''' Train Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' if (self.loop_mode is 'random'): data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) elif (self.loop_mode is 'naive_cycle'): data_check_order = range(self.data_num) else: data_check_order = range(self.data_num) self.tune_times = 0 k = 0 flag = True while True: if (self.tune_times > (2 * self.data_num)): print("Dataset not linear separable.") break if k == self.data_num: if flag: break k = 0 flag = True point_wise_i = data_check_order[k] if self.error_function(self.score_function(self.train_X[point_wise_i], self.W), self.train_Y[point_wise_i]): flag = False self.tune_times += 1 self.W = self.W + self.step_alpha * (self.train_Y[point_wise_i] * self.train_X[point_wise_i]) k += 1 return self.W
def train(self): ''' Train Perceptron Learning Algorithm From f(x) = WX Find best h(x) = WX similar to f(x) Output W ''' if (self.status != 'init'): print("Please load train data and init W first.") return self.W self.status = 'train' if (self.loop_mode is 'random'): data_check_order = range(self.data_num) data_check_order = random.sample(data_check_order, self.data_num) elif (self.loop_mode is 'naive_cycle'): data_check_order = range(self.data_num) else: data_check_order = range(self.data_num) self.tune_times = 0 k = 0 flag = True while True: if (self.tune_times > (2 * self.data_num)): print("Dataset not linear separable.") break if k == self.data_num: if flag: break k = 0 flag = True point_wise_i = data_check_order[k] if self.error_function(self.score_function(self.train_X[point_wise_i], self.W), self.train_Y[point_wise_i]): flag = False self.tune_times += 1 self.W = self.W + self.step_alpha * (self.train_Y[point_wise_i] * self.train_X[point_wise_i]) k += 1 return self.W
[ "Train", "Perceptron", "Learning", "Algorithm", "From", "f", "(", "x", ")", "=", "WX", "Find", "best", "h", "(", "x", ")", "=", "WX", "similar", "to", "f", "(", "x", ")", "Output", "W" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/PLA.py#L147-L193
[ "def", "train", "(", "self", ")", ":", "if", "(", "self", ".", "status", "!=", "'init'", ")", ":", "print", "(", "\"Please load train data and init W first.\"", ")", "return", "self", ".", "W", "self", ".", "status", "=", "'train'", "if", "(", "self", ".", "loop_mode", "is", "'random'", ")", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "data_check_order", "=", "random", ".", "sample", "(", "data_check_order", ",", "self", ".", "data_num", ")", "elif", "(", "self", ".", "loop_mode", "is", "'naive_cycle'", ")", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "else", ":", "data_check_order", "=", "range", "(", "self", ".", "data_num", ")", "self", ".", "tune_times", "=", "0", "k", "=", "0", "flag", "=", "True", "while", "True", ":", "if", "(", "self", ".", "tune_times", ">", "(", "2", "*", "self", ".", "data_num", ")", ")", ":", "print", "(", "\"Dataset not linear separable.\"", ")", "break", "if", "k", "==", "self", ".", "data_num", ":", "if", "flag", ":", "break", "k", "=", "0", "flag", "=", "True", "point_wise_i", "=", "data_check_order", "[", "k", "]", "if", "self", ".", "error_function", "(", "self", ".", "score_function", "(", "self", ".", "train_X", "[", "point_wise_i", "]", ",", "self", ".", "W", ")", ",", "self", ".", "train_Y", "[", "point_wise_i", "]", ")", ":", "flag", "=", "False", "self", ".", "tune_times", "+=", "1", "self", ".", "W", "=", "self", ".", "W", "+", "self", ".", "step_alpha", "*", "(", "self", ".", "train_Y", "[", "point_wise_i", "]", "*", "self", ".", "train_X", "[", "point_wise_i", "]", ")", "k", "+=", "1", "return", "self", ".", "W" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
DatasetLoader.load
load file
FukuML/Utility.py
def load(input_data_file='', data_type='float'): """load file""" X = [] Y = [] if data_type == 'float': with open(input_data_file) as f: for line in f: data = line.split() x = [1] + [float(v) for v in data[:-1]] X.append(x) Y.append(float(data[-1])) else: with open(input_data_file) as f: for line in f: data = line.split() x = [1] + [v for v in data[:-1]] X.append(x) Y.append(data[-1]) return np.array(X), np.array(Y)
def load(input_data_file='', data_type='float'): """load file""" X = [] Y = [] if data_type == 'float': with open(input_data_file) as f: for line in f: data = line.split() x = [1] + [float(v) for v in data[:-1]] X.append(x) Y.append(float(data[-1])) else: with open(input_data_file) as f: for line in f: data = line.split() x = [1] + [v for v in data[:-1]] X.append(x) Y.append(data[-1]) return np.array(X), np.array(Y)
[ "load", "file" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/Utility.py#L20-L40
[ "def", "load", "(", "input_data_file", "=", "''", ",", "data_type", "=", "'float'", ")", ":", "X", "=", "[", "]", "Y", "=", "[", "]", "if", "data_type", "==", "'float'", ":", "with", "open", "(", "input_data_file", ")", "as", "f", ":", "for", "line", "in", "f", ":", "data", "=", "line", ".", "split", "(", ")", "x", "=", "[", "1", "]", "+", "[", "float", "(", "v", ")", "for", "v", "in", "data", "[", ":", "-", "1", "]", "]", "X", ".", "append", "(", "x", ")", "Y", ".", "append", "(", "float", "(", "data", "[", "-", "1", "]", ")", ")", "else", ":", "with", "open", "(", "input_data_file", ")", "as", "f", ":", "for", "line", "in", "f", ":", "data", "=", "line", ".", "split", "(", ")", "x", "=", "[", "1", "]", "+", "[", "v", "for", "v", "in", "data", "[", ":", "-", "1", "]", "]", "X", ".", "append", "(", "x", ")", "Y", ".", "append", "(", "data", "[", "-", "1", "]", ")", "return", "np", ".", "array", "(", "X", ")", ",", "np", ".", "array", "(", "Y", ")" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
Kernel.kernel_matrix
K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_X[i], original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_X[i], original_X[j])
FukuML/Utility.py
def kernel_matrix(svm_model, original_X): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K = (svm_model.zeta + svm_model.gamma * np.dot(original_X, original_X.T)) ** svm_model.Q elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): pairwise_dists = squareform(pdist(original_X, 'euclidean')) K = np.exp(-svm_model.gamma * (pairwise_dists ** 2)) ''' K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_X[i], original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_X[i], original_X[j]) ''' return K
def kernel_matrix(svm_model, original_X): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K = (svm_model.zeta + svm_model.gamma * np.dot(original_X, original_X.T)) ** svm_model.Q elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): pairwise_dists = squareform(pdist(original_X, 'euclidean')) K = np.exp(-svm_model.gamma * (pairwise_dists ** 2)) ''' K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_X[i], original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_X[i], original_X[j]) ''' return K
[ "K", "=", "np", ".", "zeros", "((", "svm_model", ".", "data_num", "svm_model", ".", "data_num", "))" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/Utility.py#L229-L248
[ "def", "kernel_matrix", "(", "svm_model", ",", "original_X", ")", ":", "if", "(", "svm_model", ".", "svm_kernel", "==", "'polynomial_kernel'", "or", "svm_model", ".", "svm_kernel", "==", "'soft_polynomial_kernel'", ")", ":", "K", "=", "(", "svm_model", ".", "zeta", "+", "svm_model", ".", "gamma", "*", "np", ".", "dot", "(", "original_X", ",", "original_X", ".", "T", ")", ")", "**", "svm_model", ".", "Q", "elif", "(", "svm_model", ".", "svm_kernel", "==", "'gaussian_kernel'", "or", "svm_model", ".", "svm_kernel", "==", "'soft_gaussian_kernel'", ")", ":", "pairwise_dists", "=", "squareform", "(", "pdist", "(", "original_X", ",", "'euclidean'", ")", ")", "K", "=", "np", ".", "exp", "(", "-", "svm_model", ".", "gamma", "*", "(", "pairwise_dists", "**", "2", ")", ")", "return", "K" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
Kernel.kernel_matrix_xX
K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_x, original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_x, original_X[j])
FukuML/Utility.py
def kernel_matrix_xX(svm_model, original_x, original_X): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K = (svm_model.zeta + svm_model.gamma * np.dot(original_x, original_X.T)) ** svm_model.Q elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K = np.exp(-svm_model.gamma * (cdist(original_X, np.atleast_2d(original_x), 'euclidean').T ** 2)).ravel() ''' K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_x, original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_x, original_X[j]) ''' return K
def kernel_matrix_xX(svm_model, original_x, original_X): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K = (svm_model.zeta + svm_model.gamma * np.dot(original_x, original_X.T)) ** svm_model.Q elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K = np.exp(-svm_model.gamma * (cdist(original_X, np.atleast_2d(original_x), 'euclidean').T ** 2)).ravel() ''' K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_x, original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_x, original_X[j]) ''' return K
[ "K", "=", "np", ".", "zeros", "((", "svm_model", ".", "data_num", "svm_model", ".", "data_num", "))" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/Utility.py#L251-L269
[ "def", "kernel_matrix_xX", "(", "svm_model", ",", "original_x", ",", "original_X", ")", ":", "if", "(", "svm_model", ".", "svm_kernel", "==", "'polynomial_kernel'", "or", "svm_model", ".", "svm_kernel", "==", "'soft_polynomial_kernel'", ")", ":", "K", "=", "(", "svm_model", ".", "zeta", "+", "svm_model", ".", "gamma", "*", "np", ".", "dot", "(", "original_x", ",", "original_X", ".", "T", ")", ")", "**", "svm_model", ".", "Q", "elif", "(", "svm_model", ".", "svm_kernel", "==", "'gaussian_kernel'", "or", "svm_model", ".", "svm_kernel", "==", "'soft_gaussian_kernel'", ")", ":", "K", "=", "np", ".", "exp", "(", "-", "svm_model", ".", "gamma", "*", "(", "cdist", "(", "original_X", ",", "np", ".", "atleast_2d", "(", "original_x", ")", ",", "'euclidean'", ")", ".", "T", "**", "2", ")", ")", ".", "ravel", "(", ")", "return", "K" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
Learner.set_feature_transform
Transform data feature to high level
FukuML/MLBase.py
def set_feature_transform(self, mode='polynomial', degree=1): ''' Transform data feature to high level ''' if self.status != 'load_train_data': print("Please load train data first.") return self.train_X self.feature_transform_mode = mode self.feature_transform_degree = degree self.train_X = self.train_X[:, 1:] self.train_X = utility.DatasetLoader.feature_transform( self.train_X, self.feature_transform_mode, self.feature_transform_degree ) return self.train_X
def set_feature_transform(self, mode='polynomial', degree=1): ''' Transform data feature to high level ''' if self.status != 'load_train_data': print("Please load train data first.") return self.train_X self.feature_transform_mode = mode self.feature_transform_degree = degree self.train_X = self.train_X[:, 1:] self.train_X = utility.DatasetLoader.feature_transform( self.train_X, self.feature_transform_mode, self.feature_transform_degree ) return self.train_X
[ "Transform", "data", "feature", "to", "high", "level" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/MLBase.py#L34-L55
[ "def", "set_feature_transform", "(", "self", ",", "mode", "=", "'polynomial'", ",", "degree", "=", "1", ")", ":", "if", "self", ".", "status", "!=", "'load_train_data'", ":", "print", "(", "\"Please load train data first.\"", ")", "return", "self", ".", "train_X", "self", ".", "feature_transform_mode", "=", "mode", "self", ".", "feature_transform_degree", "=", "degree", "self", ".", "train_X", "=", "self", ".", "train_X", "[", ":", ",", "1", ":", "]", "self", ".", "train_X", "=", "utility", ".", "DatasetLoader", ".", "feature_transform", "(", "self", ".", "train_X", ",", "self", ".", "feature_transform_mode", ",", "self", ".", "feature_transform_degree", ")", "return", "self", ".", "train_X" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
Learner.prediction
Make prediction input test data output the prediction
FukuML/MLBase.py
def prediction(self, input_data='', mode='test_data'): ''' Make prediction input test data output the prediction ''' prediction = {} if (self.status != 'train'): print("Please load train data and init W then train the W first.") return prediction if (input_data == ''): print("Please input test data for prediction.") return prediction if mode == 'future_data': data = input_data.split() input_data_x = [float(v) for v in data] input_data_x = utility.DatasetLoader.feature_transform( np.array(input_data_x).reshape(1, -1), self.feature_transform_mode, self.feature_transform_degree ) input_data_x = np.ravel(input_data_x) prediction = self.score_function(input_data_x, self.W) return {"input_data_x": input_data_x, "input_data_y": None, "prediction": prediction} else: data = input_data.split() input_data_x = [float(v) for v in data[:-1]] input_data_x = utility.DatasetLoader.feature_transform( np.array(input_data_x).reshape(1, -1), self.feature_transform_mode, self.feature_transform_degree ) input_data_x = np.ravel(input_data_x) input_data_y = float(data[-1]) prediction = self.score_function(input_data_x, self.W) return {"input_data_x": input_data_x, "input_data_y": input_data_y, "prediction": prediction}
def prediction(self, input_data='', mode='test_data'): ''' Make prediction input test data output the prediction ''' prediction = {} if (self.status != 'train'): print("Please load train data and init W then train the W first.") return prediction if (input_data == ''): print("Please input test data for prediction.") return prediction if mode == 'future_data': data = input_data.split() input_data_x = [float(v) for v in data] input_data_x = utility.DatasetLoader.feature_transform( np.array(input_data_x).reshape(1, -1), self.feature_transform_mode, self.feature_transform_degree ) input_data_x = np.ravel(input_data_x) prediction = self.score_function(input_data_x, self.W) return {"input_data_x": input_data_x, "input_data_y": None, "prediction": prediction} else: data = input_data.split() input_data_x = [float(v) for v in data[:-1]] input_data_x = utility.DatasetLoader.feature_transform( np.array(input_data_x).reshape(1, -1), self.feature_transform_mode, self.feature_transform_degree ) input_data_x = np.ravel(input_data_x) input_data_y = float(data[-1]) prediction = self.score_function(input_data_x, self.W) return {"input_data_x": input_data_x, "input_data_y": input_data_y, "prediction": prediction}
[ "Make", "prediction", "input", "test", "data", "output", "the", "prediction" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/MLBase.py#L102-L142
[ "def", "prediction", "(", "self", ",", "input_data", "=", "''", ",", "mode", "=", "'test_data'", ")", ":", "prediction", "=", "{", "}", "if", "(", "self", ".", "status", "!=", "'train'", ")", ":", "print", "(", "\"Please load train data and init W then train the W first.\"", ")", "return", "prediction", "if", "(", "input_data", "==", "''", ")", ":", "print", "(", "\"Please input test data for prediction.\"", ")", "return", "prediction", "if", "mode", "==", "'future_data'", ":", "data", "=", "input_data", ".", "split", "(", ")", "input_data_x", "=", "[", "float", "(", "v", ")", "for", "v", "in", "data", "]", "input_data_x", "=", "utility", ".", "DatasetLoader", ".", "feature_transform", "(", "np", ".", "array", "(", "input_data_x", ")", ".", "reshape", "(", "1", ",", "-", "1", ")", ",", "self", ".", "feature_transform_mode", ",", "self", ".", "feature_transform_degree", ")", "input_data_x", "=", "np", ".", "ravel", "(", "input_data_x", ")", "prediction", "=", "self", ".", "score_function", "(", "input_data_x", ",", "self", ".", "W", ")", "return", "{", "\"input_data_x\"", ":", "input_data_x", ",", "\"input_data_y\"", ":", "None", ",", "\"prediction\"", ":", "prediction", "}", "else", ":", "data", "=", "input_data", ".", "split", "(", ")", "input_data_x", "=", "[", "float", "(", "v", ")", "for", "v", "in", "data", "[", ":", "-", "1", "]", "]", "input_data_x", "=", "utility", ".", "DatasetLoader", ".", "feature_transform", "(", "np", ".", "array", "(", "input_data_x", ")", ".", "reshape", "(", "1", ",", "-", "1", ")", ",", "self", ".", "feature_transform_mode", ",", "self", ".", "feature_transform_degree", ")", "input_data_x", "=", "np", ".", "ravel", "(", "input_data_x", ")", "input_data_y", "=", "float", "(", "data", "[", "-", "1", "]", ")", "prediction", "=", "self", ".", "score_function", "(", "input_data_x", ",", "self", ".", "W", ")", "return", "{", "\"input_data_x\"", ":", "input_data_x", ",", "\"input_data_y\"", ":", "input_data_y", ",", "\"prediction\"", ":", "prediction", "}" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
LogisticRegression.theta
Theta sigmoid function
FukuML/LogisticRegression.py
def theta(self, s): ''' Theta sigmoid function ''' s = np.where(s < -709, -709, s) return 1 / (1 + np.exp((-1) * s))
def theta(self, s): ''' Theta sigmoid function ''' s = np.where(s < -709, -709, s) return 1 / (1 + np.exp((-1) * s))
[ "Theta", "sigmoid", "function" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/LogisticRegression.py#L120-L128
[ "def", "theta", "(", "self", ",", "s", ")", ":", "s", "=", "np", ".", "where", "(", "s", "<", "-", "709", ",", "-", "709", ",", "s", ")", "return", "1", "/", "(", "1", "+", "np", ".", "exp", "(", "(", "-", "1", ")", "*", "s", ")", ")" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
LogisticRegression.score_function
Score function to calculate score
FukuML/LogisticRegression.py
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' score = self.theta(np.inner(x, W)) return score
def score_function(self, x, W): # need refector ''' Score function to calculate score ''' score = self.theta(np.inner(x, W)) return score
[ "Score", "function", "to", "calculate", "score" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/LogisticRegression.py#L130-L139
[ "def", "score_function", "(", "self", ",", "x", ",", "W", ")", ":", "# need refector", "score", "=", "self", ".", "theta", "(", "np", ".", "inner", "(", "x", ",", "W", ")", ")", "return", "score" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
LogisticRegression.error_function
Error function to calculate error: cross entropy error
FukuML/LogisticRegression.py
def error_function(self, x, y, W): # need refector ''' Error function to calculate error: cross entropy error ''' error = np.log(1 + np.exp((-1) * y * np.inner(x, W))) return error
def error_function(self, x, y, W): # need refector ''' Error function to calculate error: cross entropy error ''' error = np.log(1 + np.exp((-1) * y * np.inner(x, W))) return error
[ "Error", "function", "to", "calculate", "error", ":", "cross", "entropy", "error" ]
fukuball/fuku-ml
python
https://github.com/fukuball/fuku-ml/blob/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0/FukuML/LogisticRegression.py#L141-L150
[ "def", "error_function", "(", "self", ",", "x", ",", "y", ",", "W", ")", ":", "# need refector", "error", "=", "np", ".", "log", "(", "1", "+", "np", ".", "exp", "(", "(", "-", "1", ")", "*", "y", "*", "np", ".", "inner", "(", "x", ",", "W", ")", ")", ")", "return", "error" ]
0da15ad7af76adf344b5a6b3f3dbabbbab3446b0
test
parse_log
Retrieves some statistics from a single Trimmomatic log file. This function parses Trimmomatic's log file and stores some trimming statistics in an :py:class:`OrderedDict` object. This object contains the following keys: - ``clean_len``: Total length after trimming. - ``total_trim``: Total trimmed base pairs. - ``total_trim_perc``: Total trimmed base pairs in percentage. - ``5trim``: Total base pairs trimmed at 5' end. - ``3trim``: Total base pairs trimmed at 3' end. Parameters ---------- log_file : str Path to trimmomatic log file. Returns ------- x : :py:class:`OrderedDict` Object storing the trimming statistics.
flowcraft/templates/trimmomatic.py
def parse_log(log_file): """Retrieves some statistics from a single Trimmomatic log file. This function parses Trimmomatic's log file and stores some trimming statistics in an :py:class:`OrderedDict` object. This object contains the following keys: - ``clean_len``: Total length after trimming. - ``total_trim``: Total trimmed base pairs. - ``total_trim_perc``: Total trimmed base pairs in percentage. - ``5trim``: Total base pairs trimmed at 5' end. - ``3trim``: Total base pairs trimmed at 3' end. Parameters ---------- log_file : str Path to trimmomatic log file. Returns ------- x : :py:class:`OrderedDict` Object storing the trimming statistics. """ template = OrderedDict([ # Total length after trimming ("clean_len", 0), # Total trimmed base pairs ("total_trim", 0), # Total trimmed base pairs in percentage ("total_trim_perc", 0), # Total trimmed at 5' end ("5trim", 0), # Total trimmed at 3' end ("3trim", 0), # Bad reads (completely trimmed) ("bad_reads", 0) ]) with open(log_file) as fh: for line in fh: # This will split the log fields into: # 0. read length after trimming # 1. amount trimmed from the start # 2. last surviving base # 3. amount trimmed from the end fields = [int(x) for x in line.strip().split()[-4:]] if not fields[0]: template["bad_reads"] += 1 template["5trim"] += fields[1] template["3trim"] += fields[3] template["total_trim"] += fields[1] + fields[3] template["clean_len"] += fields[0] total_len = template["clean_len"] + template["total_trim"] if total_len: template["total_trim_perc"] = round( (template["total_trim"] / total_len) * 100, 2) else: template["total_trim_perc"] = 0 return template
def parse_log(log_file): """Retrieves some statistics from a single Trimmomatic log file. This function parses Trimmomatic's log file and stores some trimming statistics in an :py:class:`OrderedDict` object. This object contains the following keys: - ``clean_len``: Total length after trimming. - ``total_trim``: Total trimmed base pairs. - ``total_trim_perc``: Total trimmed base pairs in percentage. - ``5trim``: Total base pairs trimmed at 5' end. - ``3trim``: Total base pairs trimmed at 3' end. Parameters ---------- log_file : str Path to trimmomatic log file. Returns ------- x : :py:class:`OrderedDict` Object storing the trimming statistics. """ template = OrderedDict([ # Total length after trimming ("clean_len", 0), # Total trimmed base pairs ("total_trim", 0), # Total trimmed base pairs in percentage ("total_trim_perc", 0), # Total trimmed at 5' end ("5trim", 0), # Total trimmed at 3' end ("3trim", 0), # Bad reads (completely trimmed) ("bad_reads", 0) ]) with open(log_file) as fh: for line in fh: # This will split the log fields into: # 0. read length after trimming # 1. amount trimmed from the start # 2. last surviving base # 3. amount trimmed from the end fields = [int(x) for x in line.strip().split()[-4:]] if not fields[0]: template["bad_reads"] += 1 template["5trim"] += fields[1] template["3trim"] += fields[3] template["total_trim"] += fields[1] + fields[3] template["clean_len"] += fields[0] total_len = template["clean_len"] + template["total_trim"] if total_len: template["total_trim_perc"] = round( (template["total_trim"] / total_len) * 100, 2) else: template["total_trim_perc"] = 0 return template
[ "Retrieves", "some", "statistics", "from", "a", "single", "Trimmomatic", "log", "file", "." ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/trimmomatic.py#L113-L179
[ "def", "parse_log", "(", "log_file", ")", ":", "template", "=", "OrderedDict", "(", "[", "# Total length after trimming", "(", "\"clean_len\"", ",", "0", ")", ",", "# Total trimmed base pairs", "(", "\"total_trim\"", ",", "0", ")", ",", "# Total trimmed base pairs in percentage", "(", "\"total_trim_perc\"", ",", "0", ")", ",", "# Total trimmed at 5' end", "(", "\"5trim\"", ",", "0", ")", ",", "# Total trimmed at 3' end", "(", "\"3trim\"", ",", "0", ")", ",", "# Bad reads (completely trimmed)", "(", "\"bad_reads\"", ",", "0", ")", "]", ")", "with", "open", "(", "log_file", ")", "as", "fh", ":", "for", "line", "in", "fh", ":", "# This will split the log fields into:", "# 0. read length after trimming", "# 1. amount trimmed from the start", "# 2. last surviving base", "# 3. amount trimmed from the end", "fields", "=", "[", "int", "(", "x", ")", "for", "x", "in", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "-", "4", ":", "]", "]", "if", "not", "fields", "[", "0", "]", ":", "template", "[", "\"bad_reads\"", "]", "+=", "1", "template", "[", "\"5trim\"", "]", "+=", "fields", "[", "1", "]", "template", "[", "\"3trim\"", "]", "+=", "fields", "[", "3", "]", "template", "[", "\"total_trim\"", "]", "+=", "fields", "[", "1", "]", "+", "fields", "[", "3", "]", "template", "[", "\"clean_len\"", "]", "+=", "fields", "[", "0", "]", "total_len", "=", "template", "[", "\"clean_len\"", "]", "+", "template", "[", "\"total_trim\"", "]", "if", "total_len", ":", "template", "[", "\"total_trim_perc\"", "]", "=", "round", "(", "(", "template", "[", "\"total_trim\"", "]", "/", "total_len", ")", "*", "100", ",", "2", ")", "else", ":", "template", "[", "\"total_trim_perc\"", "]", "=", "0", "return", "template" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
clean_up
Cleans the working directory of unwanted temporary files
flowcraft/templates/trimmomatic.py
def clean_up(fastq_pairs, clear): """Cleans the working directory of unwanted temporary files""" # Find unpaired fastq files unpaired_fastq = [f for f in os.listdir(".") if f.endswith("_U.fastq.gz")] # Remove unpaired fastq files, if any for fpath in unpaired_fastq: os.remove(fpath) # Expected output to assess whether it is safe to remove temporary input expected_out = [f for f in os.listdir(".") if f.endswith("_trim.fastq.gz")] if clear == "true" and len(expected_out) == 2: for fq in fastq_pairs: # Get real path of fastq files, following symlinks rp = os.path.realpath(fq) logger.debug("Removing temporary fastq file path: {}".format(rp)) if re.match(".*/work/.{2}/.{30}/.*", rp): os.remove(rp)
def clean_up(fastq_pairs, clear): """Cleans the working directory of unwanted temporary files""" # Find unpaired fastq files unpaired_fastq = [f for f in os.listdir(".") if f.endswith("_U.fastq.gz")] # Remove unpaired fastq files, if any for fpath in unpaired_fastq: os.remove(fpath) # Expected output to assess whether it is safe to remove temporary input expected_out = [f for f in os.listdir(".") if f.endswith("_trim.fastq.gz")] if clear == "true" and len(expected_out) == 2: for fq in fastq_pairs: # Get real path of fastq files, following symlinks rp = os.path.realpath(fq) logger.debug("Removing temporary fastq file path: {}".format(rp)) if re.match(".*/work/.{2}/.{30}/.*", rp): os.remove(rp)
[ "Cleans", "the", "working", "directory", "of", "unwanted", "temporary", "files" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/trimmomatic.py#L242-L262
[ "def", "clean_up", "(", "fastq_pairs", ",", "clear", ")", ":", "# Find unpaired fastq files", "unpaired_fastq", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "\".\"", ")", "if", "f", ".", "endswith", "(", "\"_U.fastq.gz\"", ")", "]", "# Remove unpaired fastq files, if any", "for", "fpath", "in", "unpaired_fastq", ":", "os", ".", "remove", "(", "fpath", ")", "# Expected output to assess whether it is safe to remove temporary input", "expected_out", "=", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "\".\"", ")", "if", "f", ".", "endswith", "(", "\"_trim.fastq.gz\"", ")", "]", "if", "clear", "==", "\"true\"", "and", "len", "(", "expected_out", ")", "==", "2", ":", "for", "fq", "in", "fastq_pairs", ":", "# Get real path of fastq files, following symlinks", "rp", "=", "os", ".", "path", ".", "realpath", "(", "fq", ")", "logger", ".", "debug", "(", "\"Removing temporary fastq file path: {}\"", ".", "format", "(", "rp", ")", ")", "if", "re", ".", "match", "(", "\".*/work/.{2}/.{30}/.*\"", ",", "rp", ")", ":", "os", ".", "remove", "(", "rp", ")" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
merge_default_adapters
Merges the default adapters file in the trimmomatic adapters directory Returns ------- str Path with the merged adapters file.
flowcraft/templates/trimmomatic.py
def merge_default_adapters(): """Merges the default adapters file in the trimmomatic adapters directory Returns ------- str Path with the merged adapters file. """ default_adapters = [os.path.join(ADAPTERS_PATH, x) for x in os.listdir(ADAPTERS_PATH)] filepath = os.path.join(os.getcwd(), "default_adapters.fasta") with open(filepath, "w") as fh, \ fileinput.input(default_adapters) as in_fh: for line in in_fh: fh.write("{}{}".format(line, "\\n")) return filepath
def merge_default_adapters(): """Merges the default adapters file in the trimmomatic adapters directory Returns ------- str Path with the merged adapters file. """ default_adapters = [os.path.join(ADAPTERS_PATH, x) for x in os.listdir(ADAPTERS_PATH)] filepath = os.path.join(os.getcwd(), "default_adapters.fasta") with open(filepath, "w") as fh, \ fileinput.input(default_adapters) as in_fh: for line in in_fh: fh.write("{}{}".format(line, "\\n")) return filepath
[ "Merges", "the", "default", "adapters", "file", "in", "the", "trimmomatic", "adapters", "directory" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/trimmomatic.py#L265-L283
[ "def", "merge_default_adapters", "(", ")", ":", "default_adapters", "=", "[", "os", ".", "path", ".", "join", "(", "ADAPTERS_PATH", ",", "x", ")", "for", "x", "in", "os", ".", "listdir", "(", "ADAPTERS_PATH", ")", "]", "filepath", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "\"default_adapters.fasta\"", ")", "with", "open", "(", "filepath", ",", "\"w\"", ")", "as", "fh", ",", "fileinput", ".", "input", "(", "default_adapters", ")", "as", "in_fh", ":", "for", "line", "in", "in_fh", ":", "fh", ".", "write", "(", "\"{}{}\"", ".", "format", "(", "line", ",", "\"\\\\n\"", ")", ")", "return", "filepath" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
main
Main executor of the trimmomatic template. Parameters ---------- sample_id : str Sample Identification string. fastq_pair : list Two element list containing the paired FastQ files. trim_range : list Two element list containing the trimming range. trim_opts : list Four element list containing several trimmomatic options: [*SLIDINGWINDOW*; *LEADING*; *TRAILING*; *MINLEN*] phred : int Guessed phred score for the sample. The phred score is a generated output from :py:class:`templates.integrity_coverage`. adapters_file : str Path to adapters file. If not provided, or the path is not available, it will use the default adapters from Trimmomatic will be used clear : str Can be either 'true' or 'false'. If 'true', the input fastq files will be removed at the end of the run, IF they are in the working directory
flowcraft/templates/trimmomatic.py
def main(sample_id, fastq_pair, trim_range, trim_opts, phred, adapters_file, clear): """ Main executor of the trimmomatic template. Parameters ---------- sample_id : str Sample Identification string. fastq_pair : list Two element list containing the paired FastQ files. trim_range : list Two element list containing the trimming range. trim_opts : list Four element list containing several trimmomatic options: [*SLIDINGWINDOW*; *LEADING*; *TRAILING*; *MINLEN*] phred : int Guessed phred score for the sample. The phred score is a generated output from :py:class:`templates.integrity_coverage`. adapters_file : str Path to adapters file. If not provided, or the path is not available, it will use the default adapters from Trimmomatic will be used clear : str Can be either 'true' or 'false'. If 'true', the input fastq files will be removed at the end of the run, IF they are in the working directory """ logger.info("Starting trimmomatic") # Create base CLI cli = [ "java", "-Xmx{}".format("$task.memory"[:-1].lower().replace(" ", "")), "-jar", TRIM_PATH.strip(), "PE", "-threads", "$task.cpus" ] # If the phred encoding was detected, provide it try: # Check if the provided PHRED can be converted to int phred = int(phred) phred_flag = "-phred{}".format(str(phred)) cli += [phred_flag] # Could not detect phred encoding. Do not add explicit encoding to # trimmomatic and let it guess except ValueError: pass # Add input samples to CLI cli += fastq_pair # Add output file names output_names = [] for i in range(len(fastq_pair)): output_names.append("{}_{}_trim.fastq.gz".format( SAMPLE_ID, str(i + 1))) output_names.append("{}_{}_U.fastq.gz".format( SAMPLE_ID, str(i + 1))) cli += output_names if trim_range != ["None"]: cli += [ "CROP:{}".format(trim_range[1]), "HEADCROP:{}".format(trim_range[0]), ] if os.path.exists(adapters_file): logger.debug("Using the provided adapters file '{}'".format( adapters_file)) else: logger.debug("Adapters file '{}' not provided or does not exist. Using" " default adapters".format(adapters_file)) adapters_file = merge_default_adapters() cli += [ "ILLUMINACLIP:{}:3:30:10:6:true".format(adapters_file) ] #create log file im temporary dir to avoid issues when running on a docker container in macOS logfile = os.path.join(tempfile.mkdtemp(prefix='tmp'), "{}_trimlog.txt".format(sample_id)) # Add trimmomatic options cli += [ "SLIDINGWINDOW:{}".format(trim_opts[0]), "LEADING:{}".format(trim_opts[1]), "TRAILING:{}".format(trim_opts[2]), "MINLEN:{}".format(trim_opts[3]), "TOPHRED33", "-trimlog", logfile ] logger.debug("Running trimmomatic subprocess with command: {}".format(cli)) p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() # Attempt to decode STDERR output from bytes. If unsuccessful, coerce to # string try: stderr = stderr.decode("utf8") except (UnicodeDecodeError, AttributeError): stderr = str(stderr) logger.info("Finished trimmomatic subprocess with STDOUT:\\n" "======================================\\n{}".format(stdout)) logger.info("Finished trimmomatic subprocesswith STDERR:\\n" "======================================\\n{}".format(stderr)) logger.info("Finished trimmomatic with return code: {}".format( p.returncode)) trimmomatic_log(logfile, sample_id) if p.returncode == 0 and os.path.exists("{}_1_trim.fastq.gz".format( SAMPLE_ID)): clean_up(fastq_pair, clear) # Check if trimmomatic ran successfully. If not, write the error message # to the status channel and exit. with open(".status", "w") as status_fh: if p.returncode != 0: status_fh.write("fail") return else: status_fh.write("pass")
def main(sample_id, fastq_pair, trim_range, trim_opts, phred, adapters_file, clear): """ Main executor of the trimmomatic template. Parameters ---------- sample_id : str Sample Identification string. fastq_pair : list Two element list containing the paired FastQ files. trim_range : list Two element list containing the trimming range. trim_opts : list Four element list containing several trimmomatic options: [*SLIDINGWINDOW*; *LEADING*; *TRAILING*; *MINLEN*] phred : int Guessed phred score for the sample. The phred score is a generated output from :py:class:`templates.integrity_coverage`. adapters_file : str Path to adapters file. If not provided, or the path is not available, it will use the default adapters from Trimmomatic will be used clear : str Can be either 'true' or 'false'. If 'true', the input fastq files will be removed at the end of the run, IF they are in the working directory """ logger.info("Starting trimmomatic") # Create base CLI cli = [ "java", "-Xmx{}".format("$task.memory"[:-1].lower().replace(" ", "")), "-jar", TRIM_PATH.strip(), "PE", "-threads", "$task.cpus" ] # If the phred encoding was detected, provide it try: # Check if the provided PHRED can be converted to int phred = int(phred) phred_flag = "-phred{}".format(str(phred)) cli += [phred_flag] # Could not detect phred encoding. Do not add explicit encoding to # trimmomatic and let it guess except ValueError: pass # Add input samples to CLI cli += fastq_pair # Add output file names output_names = [] for i in range(len(fastq_pair)): output_names.append("{}_{}_trim.fastq.gz".format( SAMPLE_ID, str(i + 1))) output_names.append("{}_{}_U.fastq.gz".format( SAMPLE_ID, str(i + 1))) cli += output_names if trim_range != ["None"]: cli += [ "CROP:{}".format(trim_range[1]), "HEADCROP:{}".format(trim_range[0]), ] if os.path.exists(adapters_file): logger.debug("Using the provided adapters file '{}'".format( adapters_file)) else: logger.debug("Adapters file '{}' not provided or does not exist. Using" " default adapters".format(adapters_file)) adapters_file = merge_default_adapters() cli += [ "ILLUMINACLIP:{}:3:30:10:6:true".format(adapters_file) ] #create log file im temporary dir to avoid issues when running on a docker container in macOS logfile = os.path.join(tempfile.mkdtemp(prefix='tmp'), "{}_trimlog.txt".format(sample_id)) # Add trimmomatic options cli += [ "SLIDINGWINDOW:{}".format(trim_opts[0]), "LEADING:{}".format(trim_opts[1]), "TRAILING:{}".format(trim_opts[2]), "MINLEN:{}".format(trim_opts[3]), "TOPHRED33", "-trimlog", logfile ] logger.debug("Running trimmomatic subprocess with command: {}".format(cli)) p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() # Attempt to decode STDERR output from bytes. If unsuccessful, coerce to # string try: stderr = stderr.decode("utf8") except (UnicodeDecodeError, AttributeError): stderr = str(stderr) logger.info("Finished trimmomatic subprocess with STDOUT:\\n" "======================================\\n{}".format(stdout)) logger.info("Finished trimmomatic subprocesswith STDERR:\\n" "======================================\\n{}".format(stderr)) logger.info("Finished trimmomatic with return code: {}".format( p.returncode)) trimmomatic_log(logfile, sample_id) if p.returncode == 0 and os.path.exists("{}_1_trim.fastq.gz".format( SAMPLE_ID)): clean_up(fastq_pair, clear) # Check if trimmomatic ran successfully. If not, write the error message # to the status channel and exit. with open(".status", "w") as status_fh: if p.returncode != 0: status_fh.write("fail") return else: status_fh.write("pass")
[ "Main", "executor", "of", "the", "trimmomatic", "template", "." ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/trimmomatic.py#L287-L413
[ "def", "main", "(", "sample_id", ",", "fastq_pair", ",", "trim_range", ",", "trim_opts", ",", "phred", ",", "adapters_file", ",", "clear", ")", ":", "logger", ".", "info", "(", "\"Starting trimmomatic\"", ")", "# Create base CLI", "cli", "=", "[", "\"java\"", ",", "\"-Xmx{}\"", ".", "format", "(", "\"$task.memory\"", "[", ":", "-", "1", "]", ".", "lower", "(", ")", ".", "replace", "(", "\" \"", ",", "\"\"", ")", ")", ",", "\"-jar\"", ",", "TRIM_PATH", ".", "strip", "(", ")", ",", "\"PE\"", ",", "\"-threads\"", ",", "\"$task.cpus\"", "]", "# If the phred encoding was detected, provide it", "try", ":", "# Check if the provided PHRED can be converted to int", "phred", "=", "int", "(", "phred", ")", "phred_flag", "=", "\"-phred{}\"", ".", "format", "(", "str", "(", "phred", ")", ")", "cli", "+=", "[", "phred_flag", "]", "# Could not detect phred encoding. Do not add explicit encoding to", "# trimmomatic and let it guess", "except", "ValueError", ":", "pass", "# Add input samples to CLI", "cli", "+=", "fastq_pair", "# Add output file names", "output_names", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "fastq_pair", ")", ")", ":", "output_names", ".", "append", "(", "\"{}_{}_trim.fastq.gz\"", ".", "format", "(", "SAMPLE_ID", ",", "str", "(", "i", "+", "1", ")", ")", ")", "output_names", ".", "append", "(", "\"{}_{}_U.fastq.gz\"", ".", "format", "(", "SAMPLE_ID", ",", "str", "(", "i", "+", "1", ")", ")", ")", "cli", "+=", "output_names", "if", "trim_range", "!=", "[", "\"None\"", "]", ":", "cli", "+=", "[", "\"CROP:{}\"", ".", "format", "(", "trim_range", "[", "1", "]", ")", ",", "\"HEADCROP:{}\"", ".", "format", "(", "trim_range", "[", "0", "]", ")", ",", "]", "if", "os", ".", "path", ".", "exists", "(", "adapters_file", ")", ":", "logger", ".", "debug", "(", "\"Using the provided adapters file '{}'\"", ".", "format", "(", "adapters_file", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Adapters file '{}' not provided or does not exist. Using\"", "\" default adapters\"", ".", "format", "(", "adapters_file", ")", ")", "adapters_file", "=", "merge_default_adapters", "(", ")", "cli", "+=", "[", "\"ILLUMINACLIP:{}:3:30:10:6:true\"", ".", "format", "(", "adapters_file", ")", "]", "#create log file im temporary dir to avoid issues when running on a docker container in macOS", "logfile", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "'tmp'", ")", ",", "\"{}_trimlog.txt\"", ".", "format", "(", "sample_id", ")", ")", "# Add trimmomatic options", "cli", "+=", "[", "\"SLIDINGWINDOW:{}\"", ".", "format", "(", "trim_opts", "[", "0", "]", ")", ",", "\"LEADING:{}\"", ".", "format", "(", "trim_opts", "[", "1", "]", ")", ",", "\"TRAILING:{}\"", ".", "format", "(", "trim_opts", "[", "2", "]", ")", ",", "\"MINLEN:{}\"", ".", "format", "(", "trim_opts", "[", "3", "]", ")", ",", "\"TOPHRED33\"", ",", "\"-trimlog\"", ",", "logfile", "]", "logger", ".", "debug", "(", "\"Running trimmomatic subprocess with command: {}\"", ".", "format", "(", "cli", ")", ")", "p", "=", "subprocess", ".", "Popen", "(", "cli", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ")", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "# Attempt to decode STDERR output from bytes. If unsuccessful, coerce to", "# string", "try", ":", "stderr", "=", "stderr", ".", "decode", "(", "\"utf8\"", ")", "except", "(", "UnicodeDecodeError", ",", "AttributeError", ")", ":", "stderr", "=", "str", "(", "stderr", ")", "logger", ".", "info", "(", "\"Finished trimmomatic subprocess with STDOUT:\\\\n\"", "\"======================================\\\\n{}\"", ".", "format", "(", "stdout", ")", ")", "logger", ".", "info", "(", "\"Finished trimmomatic subprocesswith STDERR:\\\\n\"", "\"======================================\\\\n{}\"", ".", "format", "(", "stderr", ")", ")", "logger", ".", "info", "(", "\"Finished trimmomatic with return code: {}\"", ".", "format", "(", "p", ".", "returncode", ")", ")", "trimmomatic_log", "(", "logfile", ",", "sample_id", ")", "if", "p", ".", "returncode", "==", "0", "and", "os", ".", "path", ".", "exists", "(", "\"{}_1_trim.fastq.gz\"", ".", "format", "(", "SAMPLE_ID", ")", ")", ":", "clean_up", "(", "fastq_pair", ",", "clear", ")", "# Check if trimmomatic ran successfully. If not, write the error message", "# to the status channel and exit.", "with", "open", "(", "\".status\"", ",", "\"w\"", ")", "as", "status_fh", ":", "if", "p", ".", "returncode", "!=", "0", ":", "status_fh", ".", "write", "(", "\"fail\"", ")", "return", "else", ":", "status_fh", ".", "write", "(", "\"pass\"", ")" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
depth_file_reader
Function that parse samtools depth file and creates 3 dictionaries that will be useful to make the outputs of this script, both the tabular file and the json file that may be imported by pATLAS Parameters ---------- depth_file: textIO the path to depth file for each sample Returns ------- depth_dic_coverage: dict dictionary with the coverage per position for each plasmid
flowcraft/templates/mapping2json.py
def depth_file_reader(depth_file): """ Function that parse samtools depth file and creates 3 dictionaries that will be useful to make the outputs of this script, both the tabular file and the json file that may be imported by pATLAS Parameters ---------- depth_file: textIO the path to depth file for each sample Returns ------- depth_dic_coverage: dict dictionary with the coverage per position for each plasmid """ # dict to store the mean coverage for each reference depth_dic_coverage = {} for line in depth_file: tab_split = line.split() # split by any white space reference = "_".join(tab_split[0].strip().split("_")[0:3]) # store # only the gi for the reference position = tab_split[1] num_reads_align = float(tab_split[2].rstrip()) if reference not in depth_dic_coverage: depth_dic_coverage[reference] = {} depth_dic_coverage[reference][position] = num_reads_align logger.info("Finished parsing depth file.") depth_file.close() logger.debug("Size of dict_cov: {} kb".format( asizeof(depth_dic_coverage)/1024)) return depth_dic_coverage
def depth_file_reader(depth_file): """ Function that parse samtools depth file and creates 3 dictionaries that will be useful to make the outputs of this script, both the tabular file and the json file that may be imported by pATLAS Parameters ---------- depth_file: textIO the path to depth file for each sample Returns ------- depth_dic_coverage: dict dictionary with the coverage per position for each plasmid """ # dict to store the mean coverage for each reference depth_dic_coverage = {} for line in depth_file: tab_split = line.split() # split by any white space reference = "_".join(tab_split[0].strip().split("_")[0:3]) # store # only the gi for the reference position = tab_split[1] num_reads_align = float(tab_split[2].rstrip()) if reference not in depth_dic_coverage: depth_dic_coverage[reference] = {} depth_dic_coverage[reference][position] = num_reads_align logger.info("Finished parsing depth file.") depth_file.close() logger.debug("Size of dict_cov: {} kb".format( asizeof(depth_dic_coverage)/1024)) return depth_dic_coverage
[ "Function", "that", "parse", "samtools", "depth", "file", "and", "creates", "3", "dictionaries", "that", "will", "be", "useful", "to", "make", "the", "outputs", "of", "this", "script", "both", "the", "tabular", "file", "and", "the", "json", "file", "that", "may", "be", "imported", "by", "pATLAS" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/mapping2json.py#L74-L112
[ "def", "depth_file_reader", "(", "depth_file", ")", ":", "# dict to store the mean coverage for each reference", "depth_dic_coverage", "=", "{", "}", "for", "line", "in", "depth_file", ":", "tab_split", "=", "line", ".", "split", "(", ")", "# split by any white space", "reference", "=", "\"_\"", ".", "join", "(", "tab_split", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "\"_\"", ")", "[", "0", ":", "3", "]", ")", "# store", "# only the gi for the reference", "position", "=", "tab_split", "[", "1", "]", "num_reads_align", "=", "float", "(", "tab_split", "[", "2", "]", ".", "rstrip", "(", ")", ")", "if", "reference", "not", "in", "depth_dic_coverage", ":", "depth_dic_coverage", "[", "reference", "]", "=", "{", "}", "depth_dic_coverage", "[", "reference", "]", "[", "position", "]", "=", "num_reads_align", "logger", ".", "info", "(", "\"Finished parsing depth file.\"", ")", "depth_file", ".", "close", "(", ")", "logger", ".", "debug", "(", "\"Size of dict_cov: {} kb\"", ".", "format", "(", "asizeof", "(", "depth_dic_coverage", ")", "/", "1024", ")", ")", "return", "depth_dic_coverage" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
main
Function that handles the inputs required to parse depth files from bowtie and dumps a dict to a json file that can be imported into pATLAS. Parameters ---------- depth_file: str the path to depth file for each sample json_dict: str the file that contains the dictionary with keys and values for accessions and their respective lengths cutoff: str the cutoff used to trim the unwanted matches for the minimum coverage results from mapping. This value may range between 0 and 1. sample_id: str the id of the sample being parsed
flowcraft/templates/mapping2json.py
def main(depth_file, json_dict, cutoff, sample_id): """ Function that handles the inputs required to parse depth files from bowtie and dumps a dict to a json file that can be imported into pATLAS. Parameters ---------- depth_file: str the path to depth file for each sample json_dict: str the file that contains the dictionary with keys and values for accessions and their respective lengths cutoff: str the cutoff used to trim the unwanted matches for the minimum coverage results from mapping. This value may range between 0 and 1. sample_id: str the id of the sample being parsed """ # check for the appropriate value for the cutoff value for coverage results logger.debug("Cutoff value: {}. Type: {}".format(cutoff, type(cutoff))) try: cutoff_val = float(cutoff) if cutoff_val < 0.4: logger.warning("This cutoff value will generate a high volume of " "plot data. Therefore '.report.json' can be too big") except ValueError: logger.error("Cutoff value should be a string such as: '0.6'. " "The outputted value: {}. Make sure to provide an " "appropriate value for --cov_cutoff".format(cutoff)) sys.exit(1) # loads dict from file, this file is provided in docker image plasmid_length = json.load(open(json_dict)) if plasmid_length: logger.info("Loaded dictionary of plasmid lengths") else: logger.error("Something went wrong and plasmid lengths dictionary" "could not be loaded. Check if process received this" "param successfully.") sys.exit(1) # read depth file depth_file_in = open(depth_file) # first reads the depth file and generates dictionaries to handle the input # to a simpler format logger.info("Reading depth file and creating dictionary to dump.") depth_dic_coverage = depth_file_reader(depth_file_in) percentage_bases_covered, dict_cov = generate_jsons(depth_dic_coverage, plasmid_length, cutoff_val) if percentage_bases_covered and dict_cov: logger.info("percentage_bases_covered length: {}".format( str(len(percentage_bases_covered)))) logger.info("dict_cov length: {}".format(str(len(dict_cov)))) else: logger.error("Both dicts that dump to JSON file or .report.json are " "empty.") # then dump do file logger.info("Dumping to {}".format("{}_mapping.json".format(depth_file))) with open("{}_mapping.json".format(depth_file), "w") as output_json: output_json.write(json.dumps(percentage_bases_covered)) json_dic = { "tableRow": [{ "sample": sample_id, "data": [{ "header": "Mapping", "table": "plasmids", "patlas_mapping": percentage_bases_covered, "value": len(percentage_bases_covered) }] }], "sample": sample_id, "patlas_mapping": percentage_bases_covered, "plotData": [{ "sample": sample_id, "data": { "patlasMappingSliding": dict_cov }, }] } logger.debug("Size of dict_cov: {} kb".format(asizeof(json_dic)/1024)) logger.info("Writing to .report.json") with open(".report.json", "w") as json_report: json_report.write(json.dumps(json_dic, separators=(",", ":")))
def main(depth_file, json_dict, cutoff, sample_id): """ Function that handles the inputs required to parse depth files from bowtie and dumps a dict to a json file that can be imported into pATLAS. Parameters ---------- depth_file: str the path to depth file for each sample json_dict: str the file that contains the dictionary with keys and values for accessions and their respective lengths cutoff: str the cutoff used to trim the unwanted matches for the minimum coverage results from mapping. This value may range between 0 and 1. sample_id: str the id of the sample being parsed """ # check for the appropriate value for the cutoff value for coverage results logger.debug("Cutoff value: {}. Type: {}".format(cutoff, type(cutoff))) try: cutoff_val = float(cutoff) if cutoff_val < 0.4: logger.warning("This cutoff value will generate a high volume of " "plot data. Therefore '.report.json' can be too big") except ValueError: logger.error("Cutoff value should be a string such as: '0.6'. " "The outputted value: {}. Make sure to provide an " "appropriate value for --cov_cutoff".format(cutoff)) sys.exit(1) # loads dict from file, this file is provided in docker image plasmid_length = json.load(open(json_dict)) if plasmid_length: logger.info("Loaded dictionary of plasmid lengths") else: logger.error("Something went wrong and plasmid lengths dictionary" "could not be loaded. Check if process received this" "param successfully.") sys.exit(1) # read depth file depth_file_in = open(depth_file) # first reads the depth file and generates dictionaries to handle the input # to a simpler format logger.info("Reading depth file and creating dictionary to dump.") depth_dic_coverage = depth_file_reader(depth_file_in) percentage_bases_covered, dict_cov = generate_jsons(depth_dic_coverage, plasmid_length, cutoff_val) if percentage_bases_covered and dict_cov: logger.info("percentage_bases_covered length: {}".format( str(len(percentage_bases_covered)))) logger.info("dict_cov length: {}".format(str(len(dict_cov)))) else: logger.error("Both dicts that dump to JSON file or .report.json are " "empty.") # then dump do file logger.info("Dumping to {}".format("{}_mapping.json".format(depth_file))) with open("{}_mapping.json".format(depth_file), "w") as output_json: output_json.write(json.dumps(percentage_bases_covered)) json_dic = { "tableRow": [{ "sample": sample_id, "data": [{ "header": "Mapping", "table": "plasmids", "patlas_mapping": percentage_bases_covered, "value": len(percentage_bases_covered) }] }], "sample": sample_id, "patlas_mapping": percentage_bases_covered, "plotData": [{ "sample": sample_id, "data": { "patlasMappingSliding": dict_cov }, }] } logger.debug("Size of dict_cov: {} kb".format(asizeof(json_dic)/1024)) logger.info("Writing to .report.json") with open(".report.json", "w") as json_report: json_report.write(json.dumps(json_dic, separators=(",", ":")))
[ "Function", "that", "handles", "the", "inputs", "required", "to", "parse", "depth", "files", "from", "bowtie", "and", "dumps", "a", "dict", "to", "a", "json", "file", "that", "can", "be", "imported", "into", "pATLAS", "." ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/templates/mapping2json.py#L195-L287
[ "def", "main", "(", "depth_file", ",", "json_dict", ",", "cutoff", ",", "sample_id", ")", ":", "# check for the appropriate value for the cutoff value for coverage results", "logger", ".", "debug", "(", "\"Cutoff value: {}. Type: {}\"", ".", "format", "(", "cutoff", ",", "type", "(", "cutoff", ")", ")", ")", "try", ":", "cutoff_val", "=", "float", "(", "cutoff", ")", "if", "cutoff_val", "<", "0.4", ":", "logger", ".", "warning", "(", "\"This cutoff value will generate a high volume of \"", "\"plot data. Therefore '.report.json' can be too big\"", ")", "except", "ValueError", ":", "logger", ".", "error", "(", "\"Cutoff value should be a string such as: '0.6'. \"", "\"The outputted value: {}. Make sure to provide an \"", "\"appropriate value for --cov_cutoff\"", ".", "format", "(", "cutoff", ")", ")", "sys", ".", "exit", "(", "1", ")", "# loads dict from file, this file is provided in docker image", "plasmid_length", "=", "json", ".", "load", "(", "open", "(", "json_dict", ")", ")", "if", "plasmid_length", ":", "logger", ".", "info", "(", "\"Loaded dictionary of plasmid lengths\"", ")", "else", ":", "logger", ".", "error", "(", "\"Something went wrong and plasmid lengths dictionary\"", "\"could not be loaded. Check if process received this\"", "\"param successfully.\"", ")", "sys", ".", "exit", "(", "1", ")", "# read depth file", "depth_file_in", "=", "open", "(", "depth_file", ")", "# first reads the depth file and generates dictionaries to handle the input", "# to a simpler format", "logger", ".", "info", "(", "\"Reading depth file and creating dictionary to dump.\"", ")", "depth_dic_coverage", "=", "depth_file_reader", "(", "depth_file_in", ")", "percentage_bases_covered", ",", "dict_cov", "=", "generate_jsons", "(", "depth_dic_coverage", ",", "plasmid_length", ",", "cutoff_val", ")", "if", "percentage_bases_covered", "and", "dict_cov", ":", "logger", ".", "info", "(", "\"percentage_bases_covered length: {}\"", ".", "format", "(", "str", "(", "len", "(", "percentage_bases_covered", ")", ")", ")", ")", "logger", ".", "info", "(", "\"dict_cov length: {}\"", ".", "format", "(", "str", "(", "len", "(", "dict_cov", ")", ")", ")", ")", "else", ":", "logger", ".", "error", "(", "\"Both dicts that dump to JSON file or .report.json are \"", "\"empty.\"", ")", "# then dump do file", "logger", ".", "info", "(", "\"Dumping to {}\"", ".", "format", "(", "\"{}_mapping.json\"", ".", "format", "(", "depth_file", ")", ")", ")", "with", "open", "(", "\"{}_mapping.json\"", ".", "format", "(", "depth_file", ")", ",", "\"w\"", ")", "as", "output_json", ":", "output_json", ".", "write", "(", "json", ".", "dumps", "(", "percentage_bases_covered", ")", ")", "json_dic", "=", "{", "\"tableRow\"", ":", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"data\"", ":", "[", "{", "\"header\"", ":", "\"Mapping\"", ",", "\"table\"", ":", "\"plasmids\"", ",", "\"patlas_mapping\"", ":", "percentage_bases_covered", ",", "\"value\"", ":", "len", "(", "percentage_bases_covered", ")", "}", "]", "}", "]", ",", "\"sample\"", ":", "sample_id", ",", "\"patlas_mapping\"", ":", "percentage_bases_covered", ",", "\"plotData\"", ":", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"data\"", ":", "{", "\"patlasMappingSliding\"", ":", "dict_cov", "}", ",", "}", "]", "}", "logger", ".", "debug", "(", "\"Size of dict_cov: {} kb\"", ".", "format", "(", "asizeof", "(", "json_dic", ")", "/", "1024", ")", ")", "logger", ".", "info", "(", "\"Writing to .report.json\"", ")", "with", "open", "(", "\".report.json\"", ",", "\"w\"", ")", "as", "json_report", ":", "json_report", ".", "write", "(", "json", ".", "dumps", "(", "json_dic", ",", "separators", "=", "(", "\",\"", ",", "\":\"", ")", ")", ")" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
Process._set_template
Sets the path to the appropriate jinja template file When a Process instance is initialized, this method will fetch the location of the appropriate template file, based on the ``template`` argument. It will raise an exception is the template file is not found. Otherwise, it will set the :py:attr:`Process.template_path` attribute.
flowcraft/generator/process.py
def _set_template(self, template): """Sets the path to the appropriate jinja template file When a Process instance is initialized, this method will fetch the location of the appropriate template file, based on the ``template`` argument. It will raise an exception is the template file is not found. Otherwise, it will set the :py:attr:`Process.template_path` attribute. """ # Set template directory tpl_dir = join(dirname(abspath(__file__)), "templates") # Set template file path tpl_path = join(tpl_dir, template + ".nf") if not os.path.exists(tpl_path): raise eh.ProcessError( "Template {} does not exist".format(tpl_path)) self._template_path = join(tpl_dir, template + ".nf")
def _set_template(self, template): """Sets the path to the appropriate jinja template file When a Process instance is initialized, this method will fetch the location of the appropriate template file, based on the ``template`` argument. It will raise an exception is the template file is not found. Otherwise, it will set the :py:attr:`Process.template_path` attribute. """ # Set template directory tpl_dir = join(dirname(abspath(__file__)), "templates") # Set template file path tpl_path = join(tpl_dir, template + ".nf") if not os.path.exists(tpl_path): raise eh.ProcessError( "Template {} does not exist".format(tpl_path)) self._template_path = join(tpl_dir, template + ".nf")
[ "Sets", "the", "path", "to", "the", "appropriate", "jinja", "template", "file" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/generator/process.py#L308-L328
[ "def", "_set_template", "(", "self", ",", "template", ")", ":", "# Set template directory", "tpl_dir", "=", "join", "(", "dirname", "(", "abspath", "(", "__file__", ")", ")", ",", "\"templates\"", ")", "# Set template file path", "tpl_path", "=", "join", "(", "tpl_dir", ",", "template", "+", "\".nf\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "tpl_path", ")", ":", "raise", "eh", ".", "ProcessError", "(", "\"Template {} does not exist\"", ".", "format", "(", "tpl_path", ")", ")", "self", ".", "_template_path", "=", "join", "(", "tpl_dir", ",", "template", "+", "\".nf\"", ")" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
Process.set_main_channel_names
Sets the main channel names based on the provide input and output channel suffixes. This is performed when connecting processes. Parameters ---------- input_suffix : str Suffix added to the input channel. Should be based on the lane and an arbitrary unique id output_suffix : str Suffix added to the output channel. Should be based on the lane and an arbitrary unique id lane : int Sets the lane of the process.
flowcraft/generator/process.py
def set_main_channel_names(self, input_suffix, output_suffix, lane): """Sets the main channel names based on the provide input and output channel suffixes. This is performed when connecting processes. Parameters ---------- input_suffix : str Suffix added to the input channel. Should be based on the lane and an arbitrary unique id output_suffix : str Suffix added to the output channel. Should be based on the lane and an arbitrary unique id lane : int Sets the lane of the process. """ self.input_channel = "{}_in_{}".format(self.template, input_suffix) self.output_channel = "{}_out_{}".format(self.template, output_suffix) self.lane = lane
def set_main_channel_names(self, input_suffix, output_suffix, lane): """Sets the main channel names based on the provide input and output channel suffixes. This is performed when connecting processes. Parameters ---------- input_suffix : str Suffix added to the input channel. Should be based on the lane and an arbitrary unique id output_suffix : str Suffix added to the output channel. Should be based on the lane and an arbitrary unique id lane : int Sets the lane of the process. """ self.input_channel = "{}_in_{}".format(self.template, input_suffix) self.output_channel = "{}_out_{}".format(self.template, output_suffix) self.lane = lane
[ "Sets", "the", "main", "channel", "names", "based", "on", "the", "provide", "input", "and", "output", "channel", "suffixes", ".", "This", "is", "performed", "when", "connecting", "processes", "." ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/generator/process.py#L330-L348
[ "def", "set_main_channel_names", "(", "self", ",", "input_suffix", ",", "output_suffix", ",", "lane", ")", ":", "self", ".", "input_channel", "=", "\"{}_in_{}\"", ".", "format", "(", "self", ".", "template", ",", "input_suffix", ")", "self", ".", "output_channel", "=", "\"{}_out_{}\"", ".", "format", "(", "self", ".", "template", ",", "output_suffix", ")", "self", ".", "lane", "=", "lane" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
Process.get_user_channel
Returns the main raw channel for the process Provided with at least a channel name, this method returns the raw channel name and specification (the nextflow string definition) for the process. By default, it will fork from the raw input of the process' :attr:`~Process.input_type` attribute. However, this behaviour can be overridden by providing the ``input_type`` argument. If the specified or inferred input type exists in the :attr:`~Process.RAW_MAPPING` dictionary, the channel info dictionary will be retrieved along with the specified input channel. Otherwise, it will return None. An example of the returned dictionary is:: {"input_channel": "myChannel", "params": "fastq", "channel": "IN_fastq_raw", "channel_str":"IN_fastq_raw = Channel.fromFilePairs(params.fastq)" } Returns ------- dict or None Dictionary with the complete raw channel info. None if no channel is found.
flowcraft/generator/process.py
def get_user_channel(self, input_channel, input_type=None): """Returns the main raw channel for the process Provided with at least a channel name, this method returns the raw channel name and specification (the nextflow string definition) for the process. By default, it will fork from the raw input of the process' :attr:`~Process.input_type` attribute. However, this behaviour can be overridden by providing the ``input_type`` argument. If the specified or inferred input type exists in the :attr:`~Process.RAW_MAPPING` dictionary, the channel info dictionary will be retrieved along with the specified input channel. Otherwise, it will return None. An example of the returned dictionary is:: {"input_channel": "myChannel", "params": "fastq", "channel": "IN_fastq_raw", "channel_str":"IN_fastq_raw = Channel.fromFilePairs(params.fastq)" } Returns ------- dict or None Dictionary with the complete raw channel info. None if no channel is found. """ res = {"input_channel": input_channel} itype = input_type if input_type else self.input_type if itype in self.RAW_MAPPING: channel_info = self.RAW_MAPPING[itype] return {**res, **channel_info}
def get_user_channel(self, input_channel, input_type=None): """Returns the main raw channel for the process Provided with at least a channel name, this method returns the raw channel name and specification (the nextflow string definition) for the process. By default, it will fork from the raw input of the process' :attr:`~Process.input_type` attribute. However, this behaviour can be overridden by providing the ``input_type`` argument. If the specified or inferred input type exists in the :attr:`~Process.RAW_MAPPING` dictionary, the channel info dictionary will be retrieved along with the specified input channel. Otherwise, it will return None. An example of the returned dictionary is:: {"input_channel": "myChannel", "params": "fastq", "channel": "IN_fastq_raw", "channel_str":"IN_fastq_raw = Channel.fromFilePairs(params.fastq)" } Returns ------- dict or None Dictionary with the complete raw channel info. None if no channel is found. """ res = {"input_channel": input_channel} itype = input_type if input_type else self.input_type if itype in self.RAW_MAPPING: channel_info = self.RAW_MAPPING[itype] return {**res, **channel_info}
[ "Returns", "the", "main", "raw", "channel", "for", "the", "process" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/generator/process.py#L362-L399
[ "def", "get_user_channel", "(", "self", ",", "input_channel", ",", "input_type", "=", "None", ")", ":", "res", "=", "{", "\"input_channel\"", ":", "input_channel", "}", "itype", "=", "input_type", "if", "input_type", "else", "self", ".", "input_type", "if", "itype", "in", "self", ".", "RAW_MAPPING", ":", "channel_info", "=", "self", ".", "RAW_MAPPING", "[", "itype", "]", "return", "{", "*", "*", "res", ",", "*", "*", "channel_info", "}" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
Process.render
Wrapper to the jinja2 render method from a template file Parameters ---------- template : str Path to template file. context : dict Dictionary with kwargs context to populate the template
flowcraft/generator/process.py
def render(template, context): """Wrapper to the jinja2 render method from a template file Parameters ---------- template : str Path to template file. context : dict Dictionary with kwargs context to populate the template """ path, filename = os.path.split(template) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './') ).get_template(filename).render(context)
def render(template, context): """Wrapper to the jinja2 render method from a template file Parameters ---------- template : str Path to template file. context : dict Dictionary with kwargs context to populate the template """ path, filename = os.path.split(template) return jinja2.Environment( loader=jinja2.FileSystemLoader(path or './') ).get_template(filename).render(context)
[ "Wrapper", "to", "the", "jinja2", "render", "method", "from", "a", "template", "file" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/generator/process.py#L402-L417
[ "def", "render", "(", "template", ",", "context", ")", ":", "path", ",", "filename", "=", "os", ".", "path", ".", "split", "(", "template", ")", "return", "jinja2", ".", "Environment", "(", "loader", "=", "jinja2", ".", "FileSystemLoader", "(", "path", "or", "'./'", ")", ")", ".", "get_template", "(", "filename", ")", ".", "render", "(", "context", ")" ]
fc3f4bddded1efc76006600016dc71a06dd908c0
test
Process.template_str
Class property that returns a populated template string This property allows the template of a particular process to be dynamically generated and returned when doing ``Process.template_str``. Returns ------- x : str String with the complete and populated process template
flowcraft/generator/process.py
def template_str(self): """Class property that returns a populated template string This property allows the template of a particular process to be dynamically generated and returned when doing ``Process.template_str``. Returns ------- x : str String with the complete and populated process template """ if not self._context: raise eh.ProcessError("Channels must be setup first using the " "set_channels method") logger.debug("Setting context for template {}: {}".format( self.template, self._context )) x = self.render(self._template_path, self._context) return x
def template_str(self): """Class property that returns a populated template string This property allows the template of a particular process to be dynamically generated and returned when doing ``Process.template_str``. Returns ------- x : str String with the complete and populated process template """ if not self._context: raise eh.ProcessError("Channels must be setup first using the " "set_channels method") logger.debug("Setting context for template {}: {}".format( self.template, self._context )) x = self.render(self._template_path, self._context) return x
[ "Class", "property", "that", "returns", "a", "populated", "template", "string" ]
assemblerflow/flowcraft
python
https://github.com/assemblerflow/flowcraft/blob/fc3f4bddded1efc76006600016dc71a06dd908c0/flowcraft/generator/process.py#L420-L442
[ "def", "template_str", "(", "self", ")", ":", "if", "not", "self", ".", "_context", ":", "raise", "eh", ".", "ProcessError", "(", "\"Channels must be setup first using the \"", "\"set_channels method\"", ")", "logger", ".", "debug", "(", "\"Setting context for template {}: {}\"", ".", "format", "(", "self", ".", "template", ",", "self", ".", "_context", ")", ")", "x", "=", "self", ".", "render", "(", "self", ".", "_template_path", ",", "self", ".", "_context", ")", "return", "x" ]
fc3f4bddded1efc76006600016dc71a06dd908c0