partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
dict_as_tuple_list
Format a dict to a list of tuples :param d: the dictionary :param as_list: return a list of lists rather than a list of tuples :return: formatted dictionary list
deeputils/common.py
def dict_as_tuple_list(d, as_list=False): """ Format a dict to a list of tuples :param d: the dictionary :param as_list: return a list of lists rather than a list of tuples :return: formatted dictionary list """ dd = list() for k, v in d.items(): dd.append([k, v] if as_list else (k, v)) return dd
def dict_as_tuple_list(d, as_list=False): """ Format a dict to a list of tuples :param d: the dictionary :param as_list: return a list of lists rather than a list of tuples :return: formatted dictionary list """ dd = list() for k, v in d.items(): dd.append([k, v] if as_list else (k, v)) return dd
[ "Format", "a", "dict", "to", "a", "list", "of", "tuples", ":", "param", "d", ":", "the", "dictionary", ":", "param", "as_list", ":", "return", "a", "list", "of", "lists", "rather", "than", "a", "list", "of", "tuples", ":", "return", ":", "formatted", "dictionary", "list" ]
valency/deeputils
python
https://github.com/valency/deeputils/blob/27efd91668de0223ed8b07cfadf2151632521520/deeputils/common.py#L199-L209
[ "def", "dict_as_tuple_list", "(", "d", ",", "as_list", "=", "False", ")", ":", "dd", "=", "list", "(", ")", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "dd", ".", "append", "(", "[", "k", ",", "v", "]", "if", "as_list", "else", "(", "k", ",", "v", ")", ")", "return", "dd" ]
27efd91668de0223ed8b07cfadf2151632521520
valid
tuple_search
Search tuple array by index and value :param t: tuple array :param i: index of the value in each tuple :param v: value :return: the first tuple in the array with the specific index / value
deeputils/common.py
def tuple_search(t, i, v): """ Search tuple array by index and value :param t: tuple array :param i: index of the value in each tuple :param v: value :return: the first tuple in the array with the specific index / value """ for e in t: if e[i] == v: return e return None
def tuple_search(t, i, v): """ Search tuple array by index and value :param t: tuple array :param i: index of the value in each tuple :param v: value :return: the first tuple in the array with the specific index / value """ for e in t: if e[i] == v: return e return None
[ "Search", "tuple", "array", "by", "index", "and", "value", ":", "param", "t", ":", "tuple", "array", ":", "param", "i", ":", "index", "of", "the", "value", "in", "each", "tuple", ":", "param", "v", ":", "value", ":", "return", ":", "the", "first", "tuple", "in", "the", "array", "with", "the", "specific", "index", "/", "value" ]
valency/deeputils
python
https://github.com/valency/deeputils/blob/27efd91668de0223ed8b07cfadf2151632521520/deeputils/common.py#L212-L223
[ "def", "tuple_search", "(", "t", ",", "i", ",", "v", ")", ":", "for", "e", "in", "t", ":", "if", "e", "[", "i", "]", "==", "v", ":", "return", "e", "return", "None" ]
27efd91668de0223ed8b07cfadf2151632521520
valid
parse_comment_telemetry
Looks for base91 telemetry found in comment field Returns [remaining_text, telemetry]
aprslib/parsing/telemetry.py
def parse_comment_telemetry(text): """ Looks for base91 telemetry found in comment field Returns [remaining_text, telemetry] """ parsed = {} match = re.findall(r"^(.*?)\|([!-{]{4,14})\|(.*)$", text) if match and len(match[0][1]) % 2 == 0: text, telemetry, post = match[0] text += post temp = [0] * 7 for i in range(7): temp[i] = base91.to_decimal(telemetry[i*2:i*2+2]) parsed.update({ 'telemetry': { 'seq': temp[0], 'vals': temp[1:6] } }) if temp[6] != '': parsed['telemetry'].update({ 'bits': "{0:08b}".format(temp[6] & 0xFF)[::-1] }) return (text, parsed)
def parse_comment_telemetry(text): """ Looks for base91 telemetry found in comment field Returns [remaining_text, telemetry] """ parsed = {} match = re.findall(r"^(.*?)\|([!-{]{4,14})\|(.*)$", text) if match and len(match[0][1]) % 2 == 0: text, telemetry, post = match[0] text += post temp = [0] * 7 for i in range(7): temp[i] = base91.to_decimal(telemetry[i*2:i*2+2]) parsed.update({ 'telemetry': { 'seq': temp[0], 'vals': temp[1:6] } }) if temp[6] != '': parsed['telemetry'].update({ 'bits': "{0:08b}".format(temp[6] & 0xFF)[::-1] }) return (text, parsed)
[ "Looks", "for", "base91", "telemetry", "found", "in", "comment", "field", "Returns", "[", "remaining_text", "telemetry", "]" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/parsing/telemetry.py#L12-L40
[ "def", "parse_comment_telemetry", "(", "text", ")", ":", "parsed", "=", "{", "}", "match", "=", "re", ".", "findall", "(", "r\"^(.*?)\\|([!-{]{4,14})\\|(.*)$\"", ",", "text", ")", "if", "match", "and", "len", "(", "match", "[", "0", "]", "[", "1", "]", ")", "%", "2", "==", "0", ":", "text", ",", "telemetry", ",", "post", "=", "match", "[", "0", "]", "text", "+=", "post", "temp", "=", "[", "0", "]", "*", "7", "for", "i", "in", "range", "(", "7", ")", ":", "temp", "[", "i", "]", "=", "base91", ".", "to_decimal", "(", "telemetry", "[", "i", "*", "2", ":", "i", "*", "2", "+", "2", "]", ")", "parsed", ".", "update", "(", "{", "'telemetry'", ":", "{", "'seq'", ":", "temp", "[", "0", "]", ",", "'vals'", ":", "temp", "[", "1", ":", "6", "]", "}", "}", ")", "if", "temp", "[", "6", "]", "!=", "''", ":", "parsed", "[", "'telemetry'", "]", ".", "update", "(", "{", "'bits'", ":", "\"{0:08b}\"", ".", "format", "(", "temp", "[", "6", "]", "&", "0xFF", ")", "[", ":", ":", "-", "1", "]", "}", ")", "return", "(", "text", ",", "parsed", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
parse
Parses an APRS packet and returns a dict with decoded data - All attributes are in metric units
aprslib/parsing/__init__.py
def parse(packet): """ Parses an APRS packet and returns a dict with decoded data - All attributes are in metric units """ if not isinstance(packet, string_type_parse): raise TypeError("Expected packet to be str/unicode/bytes, got %s", type(packet)) if len(packet) == 0: raise ParseError("packet is empty", packet) # attempt to detect encoding if isinstance(packet, bytes): packet = _unicode_packet(packet) packet = packet.rstrip("\r\n") logger.debug("Parsing: %s", packet) # split into head and body try: (head, body) = packet.split(':', 1) except: raise ParseError("packet has no body", packet) if len(body) == 0: raise ParseError("packet body is empty", packet) parsed = { 'raw': packet, } # parse head try: parsed.update(parse_header(head)) except ParseError as msg: raise ParseError(str(msg), packet) # parse body packet_type = body[0] body = body[1:] if len(body) == 0 and packet_type != '>': raise ParseError("packet body is empty after packet type character", packet) # attempt to parse the body try: _try_toparse_body(packet_type, body, parsed) # capture ParseErrors and attach the packet except (UnknownFormat, ParseError) as exp: exp.packet = packet raise # if we fail all attempts to parse, try beacon packet if 'format' not in parsed: if not re.match(r"^(AIR.*|ALL.*|AP.*|BEACON|CQ.*|GPS.*|DF.*|DGPS.*|" "DRILL.*|DX.*|ID.*|JAVA.*|MAIL.*|MICE.*|QST.*|QTH.*|" "RTCM.*|SKY.*|SPACE.*|SPC.*|SYM.*|TEL.*|TEST.*|TLM.*|" "WX.*|ZIP.*|UIDIGI)$", parsed['to']): raise UnknownFormat("format is not supported", packet) parsed.update({ 'format': 'beacon', 'text': packet_type + body, }) logger.debug("Parsed ok.") return parsed
def parse(packet): """ Parses an APRS packet and returns a dict with decoded data - All attributes are in metric units """ if not isinstance(packet, string_type_parse): raise TypeError("Expected packet to be str/unicode/bytes, got %s", type(packet)) if len(packet) == 0: raise ParseError("packet is empty", packet) # attempt to detect encoding if isinstance(packet, bytes): packet = _unicode_packet(packet) packet = packet.rstrip("\r\n") logger.debug("Parsing: %s", packet) # split into head and body try: (head, body) = packet.split(':', 1) except: raise ParseError("packet has no body", packet) if len(body) == 0: raise ParseError("packet body is empty", packet) parsed = { 'raw': packet, } # parse head try: parsed.update(parse_header(head)) except ParseError as msg: raise ParseError(str(msg), packet) # parse body packet_type = body[0] body = body[1:] if len(body) == 0 and packet_type != '>': raise ParseError("packet body is empty after packet type character", packet) # attempt to parse the body try: _try_toparse_body(packet_type, body, parsed) # capture ParseErrors and attach the packet except (UnknownFormat, ParseError) as exp: exp.packet = packet raise # if we fail all attempts to parse, try beacon packet if 'format' not in parsed: if not re.match(r"^(AIR.*|ALL.*|AP.*|BEACON|CQ.*|GPS.*|DF.*|DGPS.*|" "DRILL.*|DX.*|ID.*|JAVA.*|MAIL.*|MICE.*|QST.*|QTH.*|" "RTCM.*|SKY.*|SPACE.*|SPC.*|SYM.*|TEL.*|TEST.*|TLM.*|" "WX.*|ZIP.*|UIDIGI)$", parsed['to']): raise UnknownFormat("format is not supported", packet) parsed.update({ 'format': 'beacon', 'text': packet_type + body, }) logger.debug("Parsed ok.") return parsed
[ "Parses", "an", "APRS", "packet", "and", "returns", "a", "dict", "with", "decoded", "data" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/parsing/__init__.py#L66-L135
[ "def", "parse", "(", "packet", ")", ":", "if", "not", "isinstance", "(", "packet", ",", "string_type_parse", ")", ":", "raise", "TypeError", "(", "\"Expected packet to be str/unicode/bytes, got %s\"", ",", "type", "(", "packet", ")", ")", "if", "len", "(", "packet", ")", "==", "0", ":", "raise", "ParseError", "(", "\"packet is empty\"", ",", "packet", ")", "# attempt to detect encoding", "if", "isinstance", "(", "packet", ",", "bytes", ")", ":", "packet", "=", "_unicode_packet", "(", "packet", ")", "packet", "=", "packet", ".", "rstrip", "(", "\"\\r\\n\"", ")", "logger", ".", "debug", "(", "\"Parsing: %s\"", ",", "packet", ")", "# split into head and body", "try", ":", "(", "head", ",", "body", ")", "=", "packet", ".", "split", "(", "':'", ",", "1", ")", "except", ":", "raise", "ParseError", "(", "\"packet has no body\"", ",", "packet", ")", "if", "len", "(", "body", ")", "==", "0", ":", "raise", "ParseError", "(", "\"packet body is empty\"", ",", "packet", ")", "parsed", "=", "{", "'raw'", ":", "packet", ",", "}", "# parse head", "try", ":", "parsed", ".", "update", "(", "parse_header", "(", "head", ")", ")", "except", "ParseError", "as", "msg", ":", "raise", "ParseError", "(", "str", "(", "msg", ")", ",", "packet", ")", "# parse body", "packet_type", "=", "body", "[", "0", "]", "body", "=", "body", "[", "1", ":", "]", "if", "len", "(", "body", ")", "==", "0", "and", "packet_type", "!=", "'>'", ":", "raise", "ParseError", "(", "\"packet body is empty after packet type character\"", ",", "packet", ")", "# attempt to parse the body", "try", ":", "_try_toparse_body", "(", "packet_type", ",", "body", ",", "parsed", ")", "# capture ParseErrors and attach the packet", "except", "(", "UnknownFormat", ",", "ParseError", ")", "as", "exp", ":", "exp", ".", "packet", "=", "packet", "raise", "# if we fail all attempts to parse, try beacon packet", "if", "'format'", "not", "in", "parsed", ":", "if", "not", "re", ".", "match", "(", "r\"^(AIR.*|ALL.*|AP.*|BEACON|CQ.*|GPS.*|DF.*|DGPS.*|\"", "\"DRILL.*|DX.*|ID.*|JAVA.*|MAIL.*|MICE.*|QST.*|QTH.*|\"", "\"RTCM.*|SKY.*|SPACE.*|SPC.*|SYM.*|TEL.*|TEST.*|TLM.*|\"", "\"WX.*|ZIP.*|UIDIGI)$\"", ",", "parsed", "[", "'to'", "]", ")", ":", "raise", "UnknownFormat", "(", "\"format is not supported\"", ",", "packet", ")", "parsed", ".", "update", "(", "{", "'format'", ":", "'beacon'", ",", "'text'", ":", "packet_type", "+", "body", ",", "}", ")", "logger", ".", "debug", "(", "\"Parsed ok.\"", ")", "return", "parsed" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
to_decimal
Takes a base91 char string and returns decimal
aprslib/base91.py
def to_decimal(text): """ Takes a base91 char string and returns decimal """ if not isinstance(text, string_type): raise TypeError("expected str or unicode, %s given" % type(text)) if findall(r"[\x00-\x20\x7c-\xff]", text): raise ValueError("invalid character in sequence") text = text.lstrip('!') decimal = 0 length = len(text) - 1 for i, char in enumerate(text): decimal += (ord(char) - 33) * (91 ** (length - i)) return decimal if text != '' else 0
def to_decimal(text): """ Takes a base91 char string and returns decimal """ if not isinstance(text, string_type): raise TypeError("expected str or unicode, %s given" % type(text)) if findall(r"[\x00-\x20\x7c-\xff]", text): raise ValueError("invalid character in sequence") text = text.lstrip('!') decimal = 0 length = len(text) - 1 for i, char in enumerate(text): decimal += (ord(char) - 33) * (91 ** (length - i)) return decimal if text != '' else 0
[ "Takes", "a", "base91", "char", "string", "and", "returns", "decimal" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/base91.py#L34-L51
[ "def", "to_decimal", "(", "text", ")", ":", "if", "not", "isinstance", "(", "text", ",", "string_type", ")", ":", "raise", "TypeError", "(", "\"expected str or unicode, %s given\"", "%", "type", "(", "text", ")", ")", "if", "findall", "(", "r\"[\\x00-\\x20\\x7c-\\xff]\"", ",", "text", ")", ":", "raise", "ValueError", "(", "\"invalid character in sequence\"", ")", "text", "=", "text", ".", "lstrip", "(", "'!'", ")", "decimal", "=", "0", "length", "=", "len", "(", "text", ")", "-", "1", "for", "i", ",", "char", "in", "enumerate", "(", "text", ")", ":", "decimal", "+=", "(", "ord", "(", "char", ")", "-", "33", ")", "*", "(", "91", "**", "(", "length", "-", "i", ")", ")", "return", "decimal", "if", "text", "!=", "''", "else", "0" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
from_decimal
Takes a decimal and returns base91 char string. With optional parameter for fix with output
aprslib/base91.py
def from_decimal(number, width=1): """ Takes a decimal and returns base91 char string. With optional parameter for fix with output """ text = [] if not isinstance(number, int_type): raise TypeError("Expected number to be int, got %s", type(number)) elif not isinstance(width, int_type): raise TypeError("Expected width to be int, got %s", type(number)) elif number < 0: raise ValueError("Expected number to be positive integer") elif number > 0: max_n = ceil(log(number) / log(91)) for n in _range(int(max_n), -1, -1): quotient, number = divmod(number, 91**n) text.append(chr(33 + quotient)) return "".join(text).lstrip('!').rjust(max(1, width), '!')
def from_decimal(number, width=1): """ Takes a decimal and returns base91 char string. With optional parameter for fix with output """ text = [] if not isinstance(number, int_type): raise TypeError("Expected number to be int, got %s", type(number)) elif not isinstance(width, int_type): raise TypeError("Expected width to be int, got %s", type(number)) elif number < 0: raise ValueError("Expected number to be positive integer") elif number > 0: max_n = ceil(log(number) / log(91)) for n in _range(int(max_n), -1, -1): quotient, number = divmod(number, 91**n) text.append(chr(33 + quotient)) return "".join(text).lstrip('!').rjust(max(1, width), '!')
[ "Takes", "a", "decimal", "and", "returns", "base91", "char", "string", ".", "With", "optional", "parameter", "for", "fix", "with", "output" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/base91.py#L54-L74
[ "def", "from_decimal", "(", "number", ",", "width", "=", "1", ")", ":", "text", "=", "[", "]", "if", "not", "isinstance", "(", "number", ",", "int_type", ")", ":", "raise", "TypeError", "(", "\"Expected number to be int, got %s\"", ",", "type", "(", "number", ")", ")", "elif", "not", "isinstance", "(", "width", ",", "int_type", ")", ":", "raise", "TypeError", "(", "\"Expected width to be int, got %s\"", ",", "type", "(", "number", ")", ")", "elif", "number", "<", "0", ":", "raise", "ValueError", "(", "\"Expected number to be positive integer\"", ")", "elif", "number", ">", "0", ":", "max_n", "=", "ceil", "(", "log", "(", "number", ")", "/", "log", "(", "91", ")", ")", "for", "n", "in", "_range", "(", "int", "(", "max_n", ")", ",", "-", "1", ",", "-", "1", ")", ":", "quotient", ",", "number", "=", "divmod", "(", "number", ",", "91", "**", "n", ")", "text", ".", "append", "(", "chr", "(", "33", "+", "quotient", ")", ")", "return", "\"\"", ".", "join", "(", "text", ")", ".", "lstrip", "(", "'!'", ")", ".", "rjust", "(", "max", "(", "1", ",", "width", ")", ",", "'!'", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
passcode
Takes a CALLSIGN and returns passcode
aprslib/passcode.py
def passcode(callsign): """ Takes a CALLSIGN and returns passcode """ assert isinstance(callsign, str) callsign = callsign.split('-')[0].upper() code = 0x73e2 for i, char in enumerate(callsign): code ^= ord(char) << (8 if not i % 2 else 0) return code & 0x7fff
def passcode(callsign): """ Takes a CALLSIGN and returns passcode """ assert isinstance(callsign, str) callsign = callsign.split('-')[0].upper() code = 0x73e2 for i, char in enumerate(callsign): code ^= ord(char) << (8 if not i % 2 else 0) return code & 0x7fff
[ "Takes", "a", "CALLSIGN", "and", "returns", "passcode" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/passcode.py#L22-L34
[ "def", "passcode", "(", "callsign", ")", ":", "assert", "isinstance", "(", "callsign", ",", "str", ")", "callsign", "=", "callsign", ".", "split", "(", "'-'", ")", "[", "0", "]", ".", "upper", "(", ")", "code", "=", "0x73e2", "for", "i", ",", "char", "in", "enumerate", "(", "callsign", ")", ":", "code", "^=", "ord", "(", "char", ")", "<<", "(", "8", "if", "not", "i", "%", "2", "else", "0", ")", "return", "code", "&", "0x7fff" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
parse_header
Parses the header part of packet Returns a dict
aprslib/parsing/common.py
def parse_header(head): """ Parses the header part of packet Returns a dict """ try: (fromcall, path) = head.split('>', 1) except: raise ParseError("invalid packet header") if (not 1 <= len(fromcall) <= 9 or not re.findall(r"^[a-z0-9]{0,9}(\-[a-z0-9]{1,8})?$", fromcall, re.I)): raise ParseError("fromcallsign is invalid") path = path.split(',') if len(path[0]) == 0: raise ParseError("no tocallsign in header") tocall = path[0] path = path[1:] validate_callsign(tocall, "tocallsign") for digi in path: if not re.findall(r"^[A-Z0-9\-]{1,9}\*?$", digi, re.I): raise ParseError("invalid callsign in path") parsed = { 'from': fromcall, 'to': tocall, 'path': path, } viacall = "" if len(path) >= 2 and re.match(r"^q..$", path[-2]): viacall = path[-1] parsed.update({'via': viacall}) return parsed
def parse_header(head): """ Parses the header part of packet Returns a dict """ try: (fromcall, path) = head.split('>', 1) except: raise ParseError("invalid packet header") if (not 1 <= len(fromcall) <= 9 or not re.findall(r"^[a-z0-9]{0,9}(\-[a-z0-9]{1,8})?$", fromcall, re.I)): raise ParseError("fromcallsign is invalid") path = path.split(',') if len(path[0]) == 0: raise ParseError("no tocallsign in header") tocall = path[0] path = path[1:] validate_callsign(tocall, "tocallsign") for digi in path: if not re.findall(r"^[A-Z0-9\-]{1,9}\*?$", digi, re.I): raise ParseError("invalid callsign in path") parsed = { 'from': fromcall, 'to': tocall, 'path': path, } viacall = "" if len(path) >= 2 and re.match(r"^q..$", path[-2]): viacall = path[-1] parsed.update({'via': viacall}) return parsed
[ "Parses", "the", "header", "part", "of", "packet", "Returns", "a", "dict" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/parsing/common.py#L32-L73
[ "def", "parse_header", "(", "head", ")", ":", "try", ":", "(", "fromcall", ",", "path", ")", "=", "head", ".", "split", "(", "'>'", ",", "1", ")", "except", ":", "raise", "ParseError", "(", "\"invalid packet header\"", ")", "if", "(", "not", "1", "<=", "len", "(", "fromcall", ")", "<=", "9", "or", "not", "re", ".", "findall", "(", "r\"^[a-z0-9]{0,9}(\\-[a-z0-9]{1,8})?$\"", ",", "fromcall", ",", "re", ".", "I", ")", ")", ":", "raise", "ParseError", "(", "\"fromcallsign is invalid\"", ")", "path", "=", "path", ".", "split", "(", "','", ")", "if", "len", "(", "path", "[", "0", "]", ")", "==", "0", ":", "raise", "ParseError", "(", "\"no tocallsign in header\"", ")", "tocall", "=", "path", "[", "0", "]", "path", "=", "path", "[", "1", ":", "]", "validate_callsign", "(", "tocall", ",", "\"tocallsign\"", ")", "for", "digi", "in", "path", ":", "if", "not", "re", ".", "findall", "(", "r\"^[A-Z0-9\\-]{1,9}\\*?$\"", ",", "digi", ",", "re", ".", "I", ")", ":", "raise", "ParseError", "(", "\"invalid callsign in path\"", ")", "parsed", "=", "{", "'from'", ":", "fromcall", ",", "'to'", ":", "tocall", ",", "'path'", ":", "path", ",", "}", "viacall", "=", "\"\"", "if", "len", "(", "path", ")", ">=", "2", "and", "re", ".", "match", "(", "r\"^q..$\"", ",", "path", "[", "-", "2", "]", ")", ":", "viacall", "=", "path", "[", "-", "1", "]", "parsed", ".", "update", "(", "{", "'via'", ":", "viacall", "}", ")", "return", "parsed" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.set_filter
Set a specified aprs-is filter for this connection
aprslib/inet.py
def set_filter(self, filter_text): """ Set a specified aprs-is filter for this connection """ self.filter = filter_text self.logger.info("Setting filter to: %s", self.filter) if self._connected: self._sendall("#filter %s\r\n" % self.filter)
def set_filter(self, filter_text): """ Set a specified aprs-is filter for this connection """ self.filter = filter_text self.logger.info("Setting filter to: %s", self.filter) if self._connected: self._sendall("#filter %s\r\n" % self.filter)
[ "Set", "a", "specified", "aprs", "-", "is", "filter", "for", "this", "connection" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L77-L86
[ "def", "set_filter", "(", "self", ",", "filter_text", ")", ":", "self", ".", "filter", "=", "filter_text", "self", ".", "logger", ".", "info", "(", "\"Setting filter to: %s\"", ",", "self", ".", "filter", ")", "if", "self", ".", "_connected", ":", "self", ".", "_sendall", "(", "\"#filter %s\\r\\n\"", "%", "self", ".", "filter", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.set_login
Set callsign and password
aprslib/inet.py
def set_login(self, callsign, passwd="-1", skip_login=False): """ Set callsign and password """ self.__dict__.update(locals())
def set_login(self, callsign, passwd="-1", skip_login=False): """ Set callsign and password """ self.__dict__.update(locals())
[ "Set", "callsign", "and", "password" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L88-L92
[ "def", "set_login", "(", "self", ",", "callsign", ",", "passwd", "=", "\"-1\"", ",", "skip_login", "=", "False", ")", ":", "self", ".", "__dict__", ".", "update", "(", "locals", "(", ")", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.connect
Initiate connection to APRS server and attempt to login blocking = False - Should we block until connected and logged-in retry = 30 - Retry interval in seconds
aprslib/inet.py
def connect(self, blocking=False, retry=30): """ Initiate connection to APRS server and attempt to login blocking = False - Should we block until connected and logged-in retry = 30 - Retry interval in seconds """ if self._connected: return while True: try: self._connect() if not self.skip_login: self._send_login() break except (LoginError, ConnectionError): if not blocking: raise self.logger.info("Retrying connection is %d seconds." % retry) time.sleep(retry)
def connect(self, blocking=False, retry=30): """ Initiate connection to APRS server and attempt to login blocking = False - Should we block until connected and logged-in retry = 30 - Retry interval in seconds """ if self._connected: return while True: try: self._connect() if not self.skip_login: self._send_login() break except (LoginError, ConnectionError): if not blocking: raise self.logger.info("Retrying connection is %d seconds." % retry) time.sleep(retry)
[ "Initiate", "connection", "to", "APRS", "server", "and", "attempt", "to", "login" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L100-L122
[ "def", "connect", "(", "self", ",", "blocking", "=", "False", ",", "retry", "=", "30", ")", ":", "if", "self", ".", "_connected", ":", "return", "while", "True", ":", "try", ":", "self", ".", "_connect", "(", ")", "if", "not", "self", ".", "skip_login", ":", "self", ".", "_send_login", "(", ")", "break", "except", "(", "LoginError", ",", "ConnectionError", ")", ":", "if", "not", "blocking", ":", "raise", "self", ".", "logger", ".", "info", "(", "\"Retrying connection is %d seconds.\"", "%", "retry", ")", "time", ".", "sleep", "(", "retry", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.close
Closes the socket Called internally when Exceptions are raised
aprslib/inet.py
def close(self): """ Closes the socket Called internally when Exceptions are raised """ self._connected = False self.buf = b'' if self.sock is not None: self.sock.close()
def close(self): """ Closes the socket Called internally when Exceptions are raised """ self._connected = False self.buf = b'' if self.sock is not None: self.sock.close()
[ "Closes", "the", "socket", "Called", "internally", "when", "Exceptions", "are", "raised" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L124-L134
[ "def", "close", "(", "self", ")", ":", "self", ".", "_connected", "=", "False", "self", ".", "buf", "=", "b''", "if", "self", ".", "sock", "is", "not", "None", ":", "self", ".", "sock", ".", "close", "(", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.sendall
Send a line, or multiple lines sperapted by '\\r\\n'
aprslib/inet.py
def sendall(self, line): """ Send a line, or multiple lines sperapted by '\\r\\n' """ if isinstance(line, APRSPacket): line = str(line) elif not isinstance(line, string_type): raise TypeError("Expected line to be str or APRSPacket, got %s", type(line)) if not self._connected: raise ConnectionError("not connected") if line == "": return line = line.rstrip("\r\n") + "\r\n" try: self.sock.setblocking(1) self.sock.settimeout(5) self._sendall(line) except socket.error as exp: self.close() raise ConnectionError(str(exp))
def sendall(self, line): """ Send a line, or multiple lines sperapted by '\\r\\n' """ if isinstance(line, APRSPacket): line = str(line) elif not isinstance(line, string_type): raise TypeError("Expected line to be str or APRSPacket, got %s", type(line)) if not self._connected: raise ConnectionError("not connected") if line == "": return line = line.rstrip("\r\n") + "\r\n" try: self.sock.setblocking(1) self.sock.settimeout(5) self._sendall(line) except socket.error as exp: self.close() raise ConnectionError(str(exp))
[ "Send", "a", "line", "or", "multiple", "lines", "sperapted", "by", "\\\\", "r", "\\\\", "n" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L136-L158
[ "def", "sendall", "(", "self", ",", "line", ")", ":", "if", "isinstance", "(", "line", ",", "APRSPacket", ")", ":", "line", "=", "str", "(", "line", ")", "elif", "not", "isinstance", "(", "line", ",", "string_type", ")", ":", "raise", "TypeError", "(", "\"Expected line to be str or APRSPacket, got %s\"", ",", "type", "(", "line", ")", ")", "if", "not", "self", ".", "_connected", ":", "raise", "ConnectionError", "(", "\"not connected\"", ")", "if", "line", "==", "\"\"", ":", "return", "line", "=", "line", ".", "rstrip", "(", "\"\\r\\n\"", ")", "+", "\"\\r\\n\"", "try", ":", "self", ".", "sock", ".", "setblocking", "(", "1", ")", "self", ".", "sock", ".", "settimeout", "(", "5", ")", "self", ".", "_sendall", "(", "line", ")", "except", "socket", ".", "error", "as", "exp", ":", "self", ".", "close", "(", ")", "raise", "ConnectionError", "(", "str", "(", "exp", ")", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS.consumer
When a position sentence is received, it will be passed to the callback function blocking: if true (default), runs forever, otherwise will return after one sentence You can still exit the loop, by raising StopIteration in the callback function immortal: When true, consumer will try to reconnect and stop propagation of Parse exceptions if false (default), consumer will return raw: when true, raw packet is passed to callback, otherwise the result from aprs.parse()
aprslib/inet.py
def consumer(self, callback, blocking=True, immortal=False, raw=False): """ When a position sentence is received, it will be passed to the callback function blocking: if true (default), runs forever, otherwise will return after one sentence You can still exit the loop, by raising StopIteration in the callback function immortal: When true, consumer will try to reconnect and stop propagation of Parse exceptions if false (default), consumer will return raw: when true, raw packet is passed to callback, otherwise the result from aprs.parse() """ if not self._connected: raise ConnectionError("not connected to a server") line = b'' while True: try: for line in self._socket_readlines(blocking): if line[0:1] != b'#': if raw: callback(line) else: callback(self._parse(line)) else: self.logger.debug("Server: %s", line.decode('utf8')) except ParseError as exp: self.logger.log(11, "%s\n Packet: %s", exp.message, exp.packet) except UnknownFormat as exp: self.logger.log(9, "%s\n Packet: %s", exp.message, exp.packet) except LoginError as exp: self.logger.error("%s: %s", exp.__class__.__name__, exp.message) except (KeyboardInterrupt, SystemExit): raise except (ConnectionDrop, ConnectionError): self.close() if not immortal: raise else: self.connect(blocking=blocking) continue except GenericError: pass except StopIteration: break except: self.logger.error("APRS Packet: %s", line) raise if not blocking: break
def consumer(self, callback, blocking=True, immortal=False, raw=False): """ When a position sentence is received, it will be passed to the callback function blocking: if true (default), runs forever, otherwise will return after one sentence You can still exit the loop, by raising StopIteration in the callback function immortal: When true, consumer will try to reconnect and stop propagation of Parse exceptions if false (default), consumer will return raw: when true, raw packet is passed to callback, otherwise the result from aprs.parse() """ if not self._connected: raise ConnectionError("not connected to a server") line = b'' while True: try: for line in self._socket_readlines(blocking): if line[0:1] != b'#': if raw: callback(line) else: callback(self._parse(line)) else: self.logger.debug("Server: %s", line.decode('utf8')) except ParseError as exp: self.logger.log(11, "%s\n Packet: %s", exp.message, exp.packet) except UnknownFormat as exp: self.logger.log(9, "%s\n Packet: %s", exp.message, exp.packet) except LoginError as exp: self.logger.error("%s: %s", exp.__class__.__name__, exp.message) except (KeyboardInterrupt, SystemExit): raise except (ConnectionDrop, ConnectionError): self.close() if not immortal: raise else: self.connect(blocking=blocking) continue except GenericError: pass except StopIteration: break except: self.logger.error("APRS Packet: %s", line) raise if not blocking: break
[ "When", "a", "position", "sentence", "is", "received", "it", "will", "be", "passed", "to", "the", "callback", "function" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L160-L213
[ "def", "consumer", "(", "self", ",", "callback", ",", "blocking", "=", "True", ",", "immortal", "=", "False", ",", "raw", "=", "False", ")", ":", "if", "not", "self", ".", "_connected", ":", "raise", "ConnectionError", "(", "\"not connected to a server\"", ")", "line", "=", "b''", "while", "True", ":", "try", ":", "for", "line", "in", "self", ".", "_socket_readlines", "(", "blocking", ")", ":", "if", "line", "[", "0", ":", "1", "]", "!=", "b'#'", ":", "if", "raw", ":", "callback", "(", "line", ")", "else", ":", "callback", "(", "self", ".", "_parse", "(", "line", ")", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "\"Server: %s\"", ",", "line", ".", "decode", "(", "'utf8'", ")", ")", "except", "ParseError", "as", "exp", ":", "self", ".", "logger", ".", "log", "(", "11", ",", "\"%s\\n Packet: %s\"", ",", "exp", ".", "message", ",", "exp", ".", "packet", ")", "except", "UnknownFormat", "as", "exp", ":", "self", ".", "logger", ".", "log", "(", "9", ",", "\"%s\\n Packet: %s\"", ",", "exp", ".", "message", ",", "exp", ".", "packet", ")", "except", "LoginError", "as", "exp", ":", "self", ".", "logger", ".", "error", "(", "\"%s: %s\"", ",", "exp", ".", "__class__", ".", "__name__", ",", "exp", ".", "message", ")", "except", "(", "KeyboardInterrupt", ",", "SystemExit", ")", ":", "raise", "except", "(", "ConnectionDrop", ",", "ConnectionError", ")", ":", "self", ".", "close", "(", ")", "if", "not", "immortal", ":", "raise", "else", ":", "self", ".", "connect", "(", "blocking", "=", "blocking", ")", "continue", "except", "GenericError", ":", "pass", "except", "StopIteration", ":", "break", "except", ":", "self", ".", "logger", ".", "error", "(", "\"APRS Packet: %s\"", ",", "line", ")", "raise", "if", "not", "blocking", ":", "break" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS._connect
Attemps connection to the server
aprslib/inet.py
def _connect(self): """ Attemps connection to the server """ self.logger.info("Attempting connection to %s:%s", self.server[0], self.server[1]) try: self._open_socket() peer = self.sock.getpeername() self.logger.info("Connected to %s", str(peer)) # 5 second timeout to receive server banner self.sock.setblocking(1) self.sock.settimeout(5) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) banner = self.sock.recv(512) if is_py3: banner = banner.decode('latin-1') if banner[0] == "#": self.logger.debug("Banner: %s", banner.rstrip()) else: raise ConnectionError("invalid banner from server") except ConnectionError as e: self.logger.error(str(e)) self.close() raise except (socket.error, socket.timeout) as e: self.close() self.logger.error("Socket error: %s" % str(e)) if str(e) == "timed out": raise ConnectionError("no banner from server") else: raise ConnectionError(e) self._connected = True
def _connect(self): """ Attemps connection to the server """ self.logger.info("Attempting connection to %s:%s", self.server[0], self.server[1]) try: self._open_socket() peer = self.sock.getpeername() self.logger.info("Connected to %s", str(peer)) # 5 second timeout to receive server banner self.sock.setblocking(1) self.sock.settimeout(5) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) banner = self.sock.recv(512) if is_py3: banner = banner.decode('latin-1') if banner[0] == "#": self.logger.debug("Banner: %s", banner.rstrip()) else: raise ConnectionError("invalid banner from server") except ConnectionError as e: self.logger.error(str(e)) self.close() raise except (socket.error, socket.timeout) as e: self.close() self.logger.error("Socket error: %s" % str(e)) if str(e) == "timed out": raise ConnectionError("no banner from server") else: raise ConnectionError(e) self._connected = True
[ "Attemps", "connection", "to", "the", "server" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L221-L263
[ "def", "_connect", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "\"Attempting connection to %s:%s\"", ",", "self", ".", "server", "[", "0", "]", ",", "self", ".", "server", "[", "1", "]", ")", "try", ":", "self", ".", "_open_socket", "(", ")", "peer", "=", "self", ".", "sock", ".", "getpeername", "(", ")", "self", ".", "logger", ".", "info", "(", "\"Connected to %s\"", ",", "str", "(", "peer", ")", ")", "# 5 second timeout to receive server banner", "self", ".", "sock", ".", "setblocking", "(", "1", ")", "self", ".", "sock", ".", "settimeout", "(", "5", ")", "self", ".", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_KEEPALIVE", ",", "1", ")", "banner", "=", "self", ".", "sock", ".", "recv", "(", "512", ")", "if", "is_py3", ":", "banner", "=", "banner", ".", "decode", "(", "'latin-1'", ")", "if", "banner", "[", "0", "]", "==", "\"#\"", ":", "self", ".", "logger", ".", "debug", "(", "\"Banner: %s\"", ",", "banner", ".", "rstrip", "(", ")", ")", "else", ":", "raise", "ConnectionError", "(", "\"invalid banner from server\"", ")", "except", "ConnectionError", "as", "e", ":", "self", ".", "logger", ".", "error", "(", "str", "(", "e", ")", ")", "self", ".", "close", "(", ")", "raise", "except", "(", "socket", ".", "error", ",", "socket", ".", "timeout", ")", "as", "e", ":", "self", ".", "close", "(", ")", "self", ".", "logger", ".", "error", "(", "\"Socket error: %s\"", "%", "str", "(", "e", ")", ")", "if", "str", "(", "e", ")", "==", "\"timed out\"", ":", "raise", "ConnectionError", "(", "\"no banner from server\"", ")", "else", ":", "raise", "ConnectionError", "(", "e", ")", "self", ".", "_connected", "=", "True" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS._send_login
Sends login string to server
aprslib/inet.py
def _send_login(self): """ Sends login string to server """ login_str = "user {0} pass {1} vers aprslib {3}{2}\r\n" login_str = login_str.format( self.callsign, self.passwd, (" filter " + self.filter) if self.filter != "" else "", __version__ ) self.logger.info("Sending login information") try: self._sendall(login_str) self.sock.settimeout(5) test = self.sock.recv(len(login_str) + 100) if is_py3: test = test.decode('latin-1') test = test.rstrip() self.logger.debug("Server: %s", test) _, _, callsign, status, _ = test.split(' ', 4) if callsign == "": raise LoginError("Server responded with empty callsign???") if callsign != self.callsign: raise LoginError("Server: %s" % test) if status != "verified," and self.passwd != "-1": raise LoginError("Password is incorrect") if self.passwd == "-1": self.logger.info("Login successful (receive only)") else: self.logger.info("Login successful") except LoginError as e: self.logger.error(str(e)) self.close() raise except: self.close() self.logger.error("Failed to login") raise LoginError("Failed to login")
def _send_login(self): """ Sends login string to server """ login_str = "user {0} pass {1} vers aprslib {3}{2}\r\n" login_str = login_str.format( self.callsign, self.passwd, (" filter " + self.filter) if self.filter != "" else "", __version__ ) self.logger.info("Sending login information") try: self._sendall(login_str) self.sock.settimeout(5) test = self.sock.recv(len(login_str) + 100) if is_py3: test = test.decode('latin-1') test = test.rstrip() self.logger.debug("Server: %s", test) _, _, callsign, status, _ = test.split(' ', 4) if callsign == "": raise LoginError("Server responded with empty callsign???") if callsign != self.callsign: raise LoginError("Server: %s" % test) if status != "verified," and self.passwd != "-1": raise LoginError("Password is incorrect") if self.passwd == "-1": self.logger.info("Login successful (receive only)") else: self.logger.info("Login successful") except LoginError as e: self.logger.error(str(e)) self.close() raise except: self.close() self.logger.error("Failed to login") raise LoginError("Failed to login")
[ "Sends", "login", "string", "to", "server" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L265-L310
[ "def", "_send_login", "(", "self", ")", ":", "login_str", "=", "\"user {0} pass {1} vers aprslib {3}{2}\\r\\n\"", "login_str", "=", "login_str", ".", "format", "(", "self", ".", "callsign", ",", "self", ".", "passwd", ",", "(", "\" filter \"", "+", "self", ".", "filter", ")", "if", "self", ".", "filter", "!=", "\"\"", "else", "\"\"", ",", "__version__", ")", "self", ".", "logger", ".", "info", "(", "\"Sending login information\"", ")", "try", ":", "self", ".", "_sendall", "(", "login_str", ")", "self", ".", "sock", ".", "settimeout", "(", "5", ")", "test", "=", "self", ".", "sock", ".", "recv", "(", "len", "(", "login_str", ")", "+", "100", ")", "if", "is_py3", ":", "test", "=", "test", ".", "decode", "(", "'latin-1'", ")", "test", "=", "test", ".", "rstrip", "(", ")", "self", ".", "logger", ".", "debug", "(", "\"Server: %s\"", ",", "test", ")", "_", ",", "_", ",", "callsign", ",", "status", ",", "_", "=", "test", ".", "split", "(", "' '", ",", "4", ")", "if", "callsign", "==", "\"\"", ":", "raise", "LoginError", "(", "\"Server responded with empty callsign???\"", ")", "if", "callsign", "!=", "self", ".", "callsign", ":", "raise", "LoginError", "(", "\"Server: %s\"", "%", "test", ")", "if", "status", "!=", "\"verified,\"", "and", "self", ".", "passwd", "!=", "\"-1\"", ":", "raise", "LoginError", "(", "\"Password is incorrect\"", ")", "if", "self", ".", "passwd", "==", "\"-1\"", ":", "self", ".", "logger", ".", "info", "(", "\"Login successful (receive only)\"", ")", "else", ":", "self", ".", "logger", ".", "info", "(", "\"Login successful\"", ")", "except", "LoginError", "as", "e", ":", "self", ".", "logger", ".", "error", "(", "str", "(", "e", ")", ")", "self", ".", "close", "(", ")", "raise", "except", ":", "self", ".", "close", "(", ")", "self", ".", "logger", ".", "error", "(", "\"Failed to login\"", ")", "raise", "LoginError", "(", "\"Failed to login\"", ")" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
IS._socket_readlines
Generator for complete lines, received from the server
aprslib/inet.py
def _socket_readlines(self, blocking=False): """ Generator for complete lines, received from the server """ try: self.sock.setblocking(0) except socket.error as e: self.logger.error("socket error when setblocking(0): %s" % str(e)) raise ConnectionDrop("connection dropped") while True: short_buf = b'' newline = b'\r\n' select.select([self.sock], [], [], None if blocking else 0) try: short_buf = self.sock.recv(4096) # sock.recv returns empty if the connection drops if not short_buf: self.logger.error("socket.recv(): returned empty") raise ConnectionDrop("connection dropped") except socket.error as e: self.logger.error("socket error on recv(): %s" % str(e)) if "Resource temporarily unavailable" in str(e): if not blocking: if len(self.buf) == 0: break self.buf += short_buf while newline in self.buf: line, self.buf = self.buf.split(newline, 1) yield line
def _socket_readlines(self, blocking=False): """ Generator for complete lines, received from the server """ try: self.sock.setblocking(0) except socket.error as e: self.logger.error("socket error when setblocking(0): %s" % str(e)) raise ConnectionDrop("connection dropped") while True: short_buf = b'' newline = b'\r\n' select.select([self.sock], [], [], None if blocking else 0) try: short_buf = self.sock.recv(4096) # sock.recv returns empty if the connection drops if not short_buf: self.logger.error("socket.recv(): returned empty") raise ConnectionDrop("connection dropped") except socket.error as e: self.logger.error("socket error on recv(): %s" % str(e)) if "Resource temporarily unavailable" in str(e): if not blocking: if len(self.buf) == 0: break self.buf += short_buf while newline in self.buf: line, self.buf = self.buf.split(newline, 1) yield line
[ "Generator", "for", "complete", "lines", "received", "from", "the", "server" ]
rossengeorgiev/aprs-python
python
https://github.com/rossengeorgiev/aprs-python/blob/94b89a6da47a322129484efcaf1e82f6a9932891/aprslib/inet.py#L312-L347
[ "def", "_socket_readlines", "(", "self", ",", "blocking", "=", "False", ")", ":", "try", ":", "self", ".", "sock", ".", "setblocking", "(", "0", ")", "except", "socket", ".", "error", "as", "e", ":", "self", ".", "logger", ".", "error", "(", "\"socket error when setblocking(0): %s\"", "%", "str", "(", "e", ")", ")", "raise", "ConnectionDrop", "(", "\"connection dropped\"", ")", "while", "True", ":", "short_buf", "=", "b''", "newline", "=", "b'\\r\\n'", "select", ".", "select", "(", "[", "self", ".", "sock", "]", ",", "[", "]", ",", "[", "]", ",", "None", "if", "blocking", "else", "0", ")", "try", ":", "short_buf", "=", "self", ".", "sock", ".", "recv", "(", "4096", ")", "# sock.recv returns empty if the connection drops", "if", "not", "short_buf", ":", "self", ".", "logger", ".", "error", "(", "\"socket.recv(): returned empty\"", ")", "raise", "ConnectionDrop", "(", "\"connection dropped\"", ")", "except", "socket", ".", "error", "as", "e", ":", "self", ".", "logger", ".", "error", "(", "\"socket error on recv(): %s\"", "%", "str", "(", "e", ")", ")", "if", "\"Resource temporarily unavailable\"", "in", "str", "(", "e", ")", ":", "if", "not", "blocking", ":", "if", "len", "(", "self", ".", "buf", ")", "==", "0", ":", "break", "self", ".", "buf", "+=", "short_buf", "while", "newline", "in", "self", ".", "buf", ":", "line", ",", "self", ".", "buf", "=", "self", ".", "buf", ".", "split", "(", "newline", ",", "1", ")", "yield", "line" ]
94b89a6da47a322129484efcaf1e82f6a9932891
valid
OrderedUUIDField.db_value
Convert UUID to binary blob
old/old.py
def db_value(self, value): """ Convert UUID to binary blob """ # ensure we have a valid UUID if not isinstance(value, UUID): value = UUID(value) # reconstruct for optimal indexing parts = str(value).split("-") reordered = ''.join([parts[2], parts[1], parts[0], parts[3], parts[4]]) value = binascii.unhexlify(reordered) return super(OrderedUUIDField, self).db_value(value)
def db_value(self, value): """ Convert UUID to binary blob """ # ensure we have a valid UUID if not isinstance(value, UUID): value = UUID(value) # reconstruct for optimal indexing parts = str(value).split("-") reordered = ''.join([parts[2], parts[1], parts[0], parts[3], parts[4]]) value = binascii.unhexlify(reordered) return super(OrderedUUIDField, self).db_value(value)
[ "Convert", "UUID", "to", "binary", "blob" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/old/old.py#L13-L26
[ "def", "db_value", "(", "self", ",", "value", ")", ":", "# ensure we have a valid UUID", "if", "not", "isinstance", "(", "value", ",", "UUID", ")", ":", "value", "=", "UUID", "(", "value", ")", "# reconstruct for optimal indexing", "parts", "=", "str", "(", "value", ")", ".", "split", "(", "\"-\"", ")", "reordered", "=", "''", ".", "join", "(", "[", "parts", "[", "2", "]", ",", "parts", "[", "1", "]", ",", "parts", "[", "0", "]", ",", "parts", "[", "3", "]", ",", "parts", "[", "4", "]", "]", ")", "value", "=", "binascii", ".", "unhexlify", "(", "reordered", ")", "return", "super", "(", "OrderedUUIDField", ",", "self", ")", ".", "db_value", "(", "value", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
OrderedUUIDField.python_value
Convert binary blob to UUID instance
old/old.py
def python_value(self, value): """ Convert binary blob to UUID instance """ value = super(OrderedUUIDField, self).python_value(value) u = binascii.b2a_hex(value) value = u[8:16] + u[4:8] + u[0:4] + u[16:22] + u[22:32] return UUID(value.decode())
def python_value(self, value): """ Convert binary blob to UUID instance """ value = super(OrderedUUIDField, self).python_value(value) u = binascii.b2a_hex(value) value = u[8:16] + u[4:8] + u[0:4] + u[16:22] + u[22:32] return UUID(value.decode())
[ "Convert", "binary", "blob", "to", "UUID", "instance" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/old/old.py#L28-L35
[ "def", "python_value", "(", "self", ",", "value", ")", ":", "value", "=", "super", "(", "OrderedUUIDField", ",", "self", ")", ".", "python_value", "(", "value", ")", "u", "=", "binascii", ".", "b2a_hex", "(", "value", ")", "value", "=", "u", "[", "8", ":", "16", "]", "+", "u", "[", "4", ":", "8", "]", "+", "u", "[", "0", ":", "4", "]", "+", "u", "[", "16", ":", "22", "]", "+", "u", "[", "22", ":", "32", "]", "return", "UUID", "(", "value", ".", "decode", "(", ")", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
HashField.db_value
Convert the python value for storage in the database.
old/old.py
def db_value(self, value): """Convert the python value for storage in the database.""" value = self.transform_value(value) return self.hhash.encrypt(value, salt_size=self.salt_size, rounds=self.rounds)
def db_value(self, value): """Convert the python value for storage in the database.""" value = self.transform_value(value) return self.hhash.encrypt(value, salt_size=self.salt_size, rounds=self.rounds)
[ "Convert", "the", "python", "value", "for", "storage", "in", "the", "database", "." ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/old/old.py#L135-L139
[ "def", "db_value", "(", "self", ",", "value", ")", ":", "value", "=", "self", ".", "transform_value", "(", "value", ")", "return", "self", ".", "hhash", ".", "encrypt", "(", "value", ",", "salt_size", "=", "self", ".", "salt_size", ",", "rounds", "=", "self", ".", "rounds", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
HashField.python_value
Convert the database value to a pythonic value.
old/old.py
def python_value(self, value): """Convert the database value to a pythonic value.""" value = coerce_to_bytes(value) obj = HashValue(value) obj.field = self return obj
def python_value(self, value): """Convert the database value to a pythonic value.""" value = coerce_to_bytes(value) obj = HashValue(value) obj.field = self return obj
[ "Convert", "the", "database", "value", "to", "a", "pythonic", "value", "." ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/old/old.py#L141-L146
[ "def", "python_value", "(", "self", ",", "value", ")", ":", "value", "=", "coerce_to_bytes", "(", "value", ")", "obj", "=", "HashValue", "(", "value", ")", "obj", ".", "field", "=", "self", "return", "obj" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
ModelManager.register
Register model(s) with app
peewee_extras.py
def register(self, model_cls): """Register model(s) with app""" assert issubclass(model_cls, peewee.Model) assert not hasattr(model_cls._meta, 'database_manager') if model_cls in self: raise RuntimeError("Model already registered") self.append(model_cls) model_cls._meta.database = self.dbm return model_cls
def register(self, model_cls): """Register model(s) with app""" assert issubclass(model_cls, peewee.Model) assert not hasattr(model_cls._meta, 'database_manager') if model_cls in self: raise RuntimeError("Model already registered") self.append(model_cls) model_cls._meta.database = self.dbm return model_cls
[ "Register", "model", "(", "s", ")", "with", "app" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L28-L36
[ "def", "register", "(", "self", ",", "model_cls", ")", ":", "assert", "issubclass", "(", "model_cls", ",", "peewee", ".", "Model", ")", "assert", "not", "hasattr", "(", "model_cls", ".", "_meta", ",", "'database_manager'", ")", "if", "model_cls", "in", "self", ":", "raise", "RuntimeError", "(", "\"Model already registered\"", ")", "self", ".", "append", "(", "model_cls", ")", "model_cls", ".", "_meta", ".", "database", "=", "self", ".", "dbm", "return", "model_cls" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
DatabaseManager.disconnect
Disconnect from all databases
peewee_extras.py
def disconnect(self): """Disconnect from all databases""" for name, connection in self.items(): if not connection.is_closed(): connection.close()
def disconnect(self): """Disconnect from all databases""" for name, connection in self.items(): if not connection.is_closed(): connection.close()
[ "Disconnect", "from", "all", "databases" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L56-L60
[ "def", "disconnect", "(", "self", ")", ":", "for", "name", ",", "connection", "in", "self", ".", "items", "(", ")", ":", "if", "not", "connection", ".", "is_closed", "(", ")", ":", "connection", ".", "close", "(", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
DatabaseManager.get_database
Find matching database router
peewee_extras.py
def get_database(self, model): """Find matching database router""" for router in self.routers: r = router.get_database(model) if r is not None: return r return self.get('default')
def get_database(self, model): """Find matching database router""" for router in self.routers: r = router.get_database(model) if r is not None: return r return self.get('default')
[ "Find", "matching", "database", "router" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L62-L68
[ "def", "get_database", "(", "self", ",", "model", ")", ":", "for", "router", "in", "self", ".", "routers", ":", "r", "=", "router", ".", "get_database", "(", "model", ")", "if", "r", "is", "not", "None", ":", "return", "r", "return", "self", ".", "get", "(", "'default'", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
Model.to_cursor_ref
Returns dict of values to uniquely reference this item
peewee_extras.py
def to_cursor_ref(self): """Returns dict of values to uniquely reference this item""" fields = self._meta.get_primary_keys() assert fields values = {field.name:self.__data__[field.name] for field in fields} return values
def to_cursor_ref(self): """Returns dict of values to uniquely reference this item""" fields = self._meta.get_primary_keys() assert fields values = {field.name:self.__data__[field.name] for field in fields} return values
[ "Returns", "dict", "of", "values", "to", "uniquely", "reference", "this", "item" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L143-L148
[ "def", "to_cursor_ref", "(", "self", ")", ":", "fields", "=", "self", ".", "_meta", ".", "get_primary_keys", "(", ")", "assert", "fields", "values", "=", "{", "field", ".", "name", ":", "self", ".", "__data__", "[", "field", ".", "name", "]", "for", "field", "in", "fields", "}", "return", "values" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
PrimaryKeyPagination.paginate_query
Apply pagination to query :attr query: Instance of `peewee.Query` :attr count: Max rows to return :attr offset: Pagination offset, str/int :attr sort: List of tuples, e.g. [('id', 'asc')] :returns: Instance of `peewee.Query`
peewee_extras.py
def paginate_query(self, query, count, offset=None, sort=None): """ Apply pagination to query :attr query: Instance of `peewee.Query` :attr count: Max rows to return :attr offset: Pagination offset, str/int :attr sort: List of tuples, e.g. [('id', 'asc')] :returns: Instance of `peewee.Query` """ assert isinstance(query, peewee.Query) assert isinstance(count, int) assert isinstance(offset, (str, int, type(None))) assert isinstance(sort, (list, set, tuple, type(None))) # ensure our model has a primary key fields = query.model._meta.get_primary_keys() if len(fields) == 0: raise peewee.ProgrammingError( 'Cannot apply pagination on model without primary key') # ensure our model doesn't use a compound primary key if len(fields) > 1: raise peewee.ProgrammingError( 'Cannot apply pagination on model with compound primary key') # apply offset if offset is not None: query = query.where(fields[0] >= offset) # do we need to apply sorting? order_bys = [] if sort: for field, direction in sort: # does this field have a valid sort direction? if not isinstance(direction, str): raise ValueError("Invalid sort direction on field '{}'".format(field)) direction = direction.lower().strip() if direction not in ['asc', 'desc']: raise ValueError("Invalid sort direction on field '{}'".format(field)) # apply sorting order_by = peewee.SQL(field) order_by = getattr(order_by, direction)() order_bys += [order_by] # add primary key ordering after user sorting order_bys += [fields[0].asc()] # apply ordering and limits query = query.order_by(*order_bys) query = query.limit(count) return query
def paginate_query(self, query, count, offset=None, sort=None): """ Apply pagination to query :attr query: Instance of `peewee.Query` :attr count: Max rows to return :attr offset: Pagination offset, str/int :attr sort: List of tuples, e.g. [('id', 'asc')] :returns: Instance of `peewee.Query` """ assert isinstance(query, peewee.Query) assert isinstance(count, int) assert isinstance(offset, (str, int, type(None))) assert isinstance(sort, (list, set, tuple, type(None))) # ensure our model has a primary key fields = query.model._meta.get_primary_keys() if len(fields) == 0: raise peewee.ProgrammingError( 'Cannot apply pagination on model without primary key') # ensure our model doesn't use a compound primary key if len(fields) > 1: raise peewee.ProgrammingError( 'Cannot apply pagination on model with compound primary key') # apply offset if offset is not None: query = query.where(fields[0] >= offset) # do we need to apply sorting? order_bys = [] if sort: for field, direction in sort: # does this field have a valid sort direction? if not isinstance(direction, str): raise ValueError("Invalid sort direction on field '{}'".format(field)) direction = direction.lower().strip() if direction not in ['asc', 'desc']: raise ValueError("Invalid sort direction on field '{}'".format(field)) # apply sorting order_by = peewee.SQL(field) order_by = getattr(order_by, direction)() order_bys += [order_by] # add primary key ordering after user sorting order_bys += [fields[0].asc()] # apply ordering and limits query = query.order_by(*order_bys) query = query.limit(count) return query
[ "Apply", "pagination", "to", "query" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L204-L258
[ "def", "paginate_query", "(", "self", ",", "query", ",", "count", ",", "offset", "=", "None", ",", "sort", "=", "None", ")", ":", "assert", "isinstance", "(", "query", ",", "peewee", ".", "Query", ")", "assert", "isinstance", "(", "count", ",", "int", ")", "assert", "isinstance", "(", "offset", ",", "(", "str", ",", "int", ",", "type", "(", "None", ")", ")", ")", "assert", "isinstance", "(", "sort", ",", "(", "list", ",", "set", ",", "tuple", ",", "type", "(", "None", ")", ")", ")", "# ensure our model has a primary key", "fields", "=", "query", ".", "model", ".", "_meta", ".", "get_primary_keys", "(", ")", "if", "len", "(", "fields", ")", "==", "0", ":", "raise", "peewee", ".", "ProgrammingError", "(", "'Cannot apply pagination on model without primary key'", ")", "# ensure our model doesn't use a compound primary key", "if", "len", "(", "fields", ")", ">", "1", ":", "raise", "peewee", ".", "ProgrammingError", "(", "'Cannot apply pagination on model with compound primary key'", ")", "# apply offset", "if", "offset", "is", "not", "None", ":", "query", "=", "query", ".", "where", "(", "fields", "[", "0", "]", ">=", "offset", ")", "# do we need to apply sorting?", "order_bys", "=", "[", "]", "if", "sort", ":", "for", "field", ",", "direction", "in", "sort", ":", "# does this field have a valid sort direction?", "if", "not", "isinstance", "(", "direction", ",", "str", ")", ":", "raise", "ValueError", "(", "\"Invalid sort direction on field '{}'\"", ".", "format", "(", "field", ")", ")", "direction", "=", "direction", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "direction", "not", "in", "[", "'asc'", ",", "'desc'", "]", ":", "raise", "ValueError", "(", "\"Invalid sort direction on field '{}'\"", ".", "format", "(", "field", ")", ")", "# apply sorting", "order_by", "=", "peewee", ".", "SQL", "(", "field", ")", "order_by", "=", "getattr", "(", "order_by", ",", "direction", ")", "(", ")", "order_bys", "+=", "[", "order_by", "]", "# add primary key ordering after user sorting", "order_bys", "+=", "[", "fields", "[", "0", "]", ".", "asc", "(", ")", "]", "# apply ordering and limits", "query", "=", "query", ".", "order_by", "(", "*", "order_bys", ")", "query", "=", "query", ".", "limit", "(", "count", ")", "return", "query" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
ModelCRUD.apply_filters
Apply user specified filters to query
peewee_extras.py
def apply_filters(self, query, filters): """ Apply user specified filters to query """ assert isinstance(query, peewee.Query) assert isinstance(filters, dict)
def apply_filters(self, query, filters): """ Apply user specified filters to query """ assert isinstance(query, peewee.Query) assert isinstance(filters, dict)
[ "Apply", "user", "specified", "filters", "to", "query" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L315-L320
[ "def", "apply_filters", "(", "self", ",", "query", ",", "filters", ")", ":", "assert", "isinstance", "(", "query", ",", "peewee", ".", "Query", ")", "assert", "isinstance", "(", "filters", ",", "dict", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
ModelCRUD.list
List items from query
peewee_extras.py
def list(self, filters, cursor, count): """ List items from query """ assert isinstance(filters, dict), "expected filters type 'dict'" assert isinstance(cursor, dict), "expected cursor type 'dict'" # start with our base query query = self.get_query() assert isinstance(query, peewee.Query) # XXX: convert and apply user specified filters #filters = {field.name: cursor[field.name] for field in fields} #query.where( paginator = self.get_paginator() assert isinstance(paginator, Pagination) # always include an extra row for next cursor position count += 1 # apply pagination to query pquery = paginator.filter_query(query, cursor, count) items = [ item for item in pquery ] # determine next cursor position next_item = items.pop(1) next_cursor = next_item.to_cursor_ref() ''' # is this field allowed for sort? if field not in self.sort_fields: raise ValueError("Cannot sort on field '{}'".format(field)) ''' return items, next_cursor
def list(self, filters, cursor, count): """ List items from query """ assert isinstance(filters, dict), "expected filters type 'dict'" assert isinstance(cursor, dict), "expected cursor type 'dict'" # start with our base query query = self.get_query() assert isinstance(query, peewee.Query) # XXX: convert and apply user specified filters #filters = {field.name: cursor[field.name] for field in fields} #query.where( paginator = self.get_paginator() assert isinstance(paginator, Pagination) # always include an extra row for next cursor position count += 1 # apply pagination to query pquery = paginator.filter_query(query, cursor, count) items = [ item for item in pquery ] # determine next cursor position next_item = items.pop(1) next_cursor = next_item.to_cursor_ref() ''' # is this field allowed for sort? if field not in self.sort_fields: raise ValueError("Cannot sort on field '{}'".format(field)) ''' return items, next_cursor
[ "List", "items", "from", "query" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L322-L357
[ "def", "list", "(", "self", ",", "filters", ",", "cursor", ",", "count", ")", ":", "assert", "isinstance", "(", "filters", ",", "dict", ")", ",", "\"expected filters type 'dict'\"", "assert", "isinstance", "(", "cursor", ",", "dict", ")", ",", "\"expected cursor type 'dict'\"", "# start with our base query", "query", "=", "self", ".", "get_query", "(", ")", "assert", "isinstance", "(", "query", ",", "peewee", ".", "Query", ")", "# XXX: convert and apply user specified filters", "#filters = {field.name: cursor[field.name] for field in fields}", "#query.where(", "paginator", "=", "self", ".", "get_paginator", "(", ")", "assert", "isinstance", "(", "paginator", ",", "Pagination", ")", "# always include an extra row for next cursor position", "count", "+=", "1", "# apply pagination to query", "pquery", "=", "paginator", ".", "filter_query", "(", "query", ",", "cursor", ",", "count", ")", "items", "=", "[", "item", "for", "item", "in", "pquery", "]", "# determine next cursor position", "next_item", "=", "items", ".", "pop", "(", "1", ")", "next_cursor", "=", "next_item", ".", "to_cursor_ref", "(", ")", "'''\n # is this field allowed for sort?\n if field not in self.sort_fields:\n raise ValueError(\"Cannot sort on field '{}'\".format(field))\n '''", "return", "items", ",", "next_cursor" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
ModelCRUD.retrieve
Retrieve items from query
peewee_extras.py
def retrieve(self, cursor): """ Retrieve items from query """ assert isinstance(cursor, dict), "expected cursor type 'dict'" # look for record in query query = self.get_query() assert isinstance(query, peewee.Query) query return query.get(**cursor)
def retrieve(self, cursor): """ Retrieve items from query """ assert isinstance(cursor, dict), "expected cursor type 'dict'" # look for record in query query = self.get_query() assert isinstance(query, peewee.Query) query return query.get(**cursor)
[ "Retrieve", "items", "from", "query" ]
foxx/peewee-extras
python
https://github.com/foxx/peewee-extras/blob/327e7e63465b3f6e1afc0e6a651f4cb5c8c60889/peewee_extras.py#L359-L370
[ "def", "retrieve", "(", "self", ",", "cursor", ")", ":", "assert", "isinstance", "(", "cursor", ",", "dict", ")", ",", "\"expected cursor type 'dict'\"", "# look for record in query", "query", "=", "self", ".", "get_query", "(", ")", "assert", "isinstance", "(", "query", ",", "peewee", ".", "Query", ")", "query", "return", "query", ".", "get", "(", "*", "*", "cursor", ")" ]
327e7e63465b3f6e1afc0e6a651f4cb5c8c60889
valid
AWS4Auth.regenerate_signing_key
Regenerate the signing key for this instance. Store the new key in signing_key property. Take scope elements of the new key from the equivalent properties (region, service, date) of the current AWS4Auth instance. Scope elements can be overridden for the new key by supplying arguments to this function. If overrides are supplied update the current AWS4Auth instance's equivalent properties to match the new values. If secret_key is not specified use the value of the secret_key property of the current AWS4Auth instance's signing key. If the existing signing key is not storing its secret key (i.e. store_secret_key was set to False at instantiation) then raise a NoSecretKeyError and do not regenerate the key. In order to regenerate a key which is not storing its secret key, secret_key must be supplied to this function. Use the value of the existing key's store_secret_key property when generating the new key. If there is no existing key, then default to setting store_secret_key to True for new key.
requests_aws4auth/aws4auth.py
def regenerate_signing_key(self, secret_key=None, region=None, service=None, date=None): """ Regenerate the signing key for this instance. Store the new key in signing_key property. Take scope elements of the new key from the equivalent properties (region, service, date) of the current AWS4Auth instance. Scope elements can be overridden for the new key by supplying arguments to this function. If overrides are supplied update the current AWS4Auth instance's equivalent properties to match the new values. If secret_key is not specified use the value of the secret_key property of the current AWS4Auth instance's signing key. If the existing signing key is not storing its secret key (i.e. store_secret_key was set to False at instantiation) then raise a NoSecretKeyError and do not regenerate the key. In order to regenerate a key which is not storing its secret key, secret_key must be supplied to this function. Use the value of the existing key's store_secret_key property when generating the new key. If there is no existing key, then default to setting store_secret_key to True for new key. """ if secret_key is None and (self.signing_key is None or self.signing_key.secret_key is None): raise NoSecretKeyError secret_key = secret_key or self.signing_key.secret_key region = region or self.region service = service or self.service date = date or self.date if self.signing_key is None: store_secret_key = True else: store_secret_key = self.signing_key.store_secret_key self.signing_key = AWS4SigningKey(secret_key, region, service, date, store_secret_key) self.region = region self.service = service self.date = self.signing_key.date
def regenerate_signing_key(self, secret_key=None, region=None, service=None, date=None): """ Regenerate the signing key for this instance. Store the new key in signing_key property. Take scope elements of the new key from the equivalent properties (region, service, date) of the current AWS4Auth instance. Scope elements can be overridden for the new key by supplying arguments to this function. If overrides are supplied update the current AWS4Auth instance's equivalent properties to match the new values. If secret_key is not specified use the value of the secret_key property of the current AWS4Auth instance's signing key. If the existing signing key is not storing its secret key (i.e. store_secret_key was set to False at instantiation) then raise a NoSecretKeyError and do not regenerate the key. In order to regenerate a key which is not storing its secret key, secret_key must be supplied to this function. Use the value of the existing key's store_secret_key property when generating the new key. If there is no existing key, then default to setting store_secret_key to True for new key. """ if secret_key is None and (self.signing_key is None or self.signing_key.secret_key is None): raise NoSecretKeyError secret_key = secret_key or self.signing_key.secret_key region = region or self.region service = service or self.service date = date or self.date if self.signing_key is None: store_secret_key = True else: store_secret_key = self.signing_key.store_secret_key self.signing_key = AWS4SigningKey(secret_key, region, service, date, store_secret_key) self.region = region self.service = service self.date = self.signing_key.date
[ "Regenerate", "the", "signing", "key", "for", "this", "instance", ".", "Store", "the", "new", "key", "in", "signing_key", "property", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L264-L306
[ "def", "regenerate_signing_key", "(", "self", ",", "secret_key", "=", "None", ",", "region", "=", "None", ",", "service", "=", "None", ",", "date", "=", "None", ")", ":", "if", "secret_key", "is", "None", "and", "(", "self", ".", "signing_key", "is", "None", "or", "self", ".", "signing_key", ".", "secret_key", "is", "None", ")", ":", "raise", "NoSecretKeyError", "secret_key", "=", "secret_key", "or", "self", ".", "signing_key", ".", "secret_key", "region", "=", "region", "or", "self", ".", "region", "service", "=", "service", "or", "self", ".", "service", "date", "=", "date", "or", "self", ".", "date", "if", "self", ".", "signing_key", "is", "None", ":", "store_secret_key", "=", "True", "else", ":", "store_secret_key", "=", "self", ".", "signing_key", ".", "store_secret_key", "self", ".", "signing_key", "=", "AWS4SigningKey", "(", "secret_key", ",", "region", ",", "service", ",", "date", ",", "store_secret_key", ")", "self", ".", "region", "=", "region", "self", ".", "service", "=", "service", "self", ".", "date", "=", "self", ".", "signing_key", ".", "date" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.get_request_date
Try to pull a date from the request by looking first at the x-amz-date header, and if that's not present then the Date header. Return a datetime.date object, or None if neither date header is found or is in a recognisable format. req -- a requests PreparedRequest object
requests_aws4auth/aws4auth.py
def get_request_date(cls, req): """ Try to pull a date from the request by looking first at the x-amz-date header, and if that's not present then the Date header. Return a datetime.date object, or None if neither date header is found or is in a recognisable format. req -- a requests PreparedRequest object """ date = None for header in ['x-amz-date', 'date']: if header not in req.headers: continue try: date_str = cls.parse_date(req.headers[header]) except DateFormatError: continue try: date = datetime.datetime.strptime(date_str, '%Y-%m-%d').date() except ValueError: continue else: break return date
def get_request_date(cls, req): """ Try to pull a date from the request by looking first at the x-amz-date header, and if that's not present then the Date header. Return a datetime.date object, or None if neither date header is found or is in a recognisable format. req -- a requests PreparedRequest object """ date = None for header in ['x-amz-date', 'date']: if header not in req.headers: continue try: date_str = cls.parse_date(req.headers[header]) except DateFormatError: continue try: date = datetime.datetime.strptime(date_str, '%Y-%m-%d').date() except ValueError: continue else: break return date
[ "Try", "to", "pull", "a", "date", "from", "the", "request", "by", "looking", "first", "at", "the", "x", "-", "amz", "-", "date", "header", "and", "if", "that", "s", "not", "present", "then", "the", "Date", "header", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L368-L394
[ "def", "get_request_date", "(", "cls", ",", "req", ")", ":", "date", "=", "None", "for", "header", "in", "[", "'x-amz-date'", ",", "'date'", "]", ":", "if", "header", "not", "in", "req", ".", "headers", ":", "continue", "try", ":", "date_str", "=", "cls", ".", "parse_date", "(", "req", ".", "headers", "[", "header", "]", ")", "except", "DateFormatError", ":", "continue", "try", ":", "date", "=", "datetime", ".", "datetime", ".", "strptime", "(", "date_str", ",", "'%Y-%m-%d'", ")", ".", "date", "(", ")", "except", "ValueError", ":", "continue", "else", ":", "break", "return", "date" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.parse_date
Check if date_str is in a recognised format and return an ISO yyyy-mm-dd format version if so. Raise DateFormatError if not. Recognised formats are: * RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT) * RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT) * C time (e.g. Wed Dec 4 00:00:00 2002) * Amz-Date format (e.g. 20090325T010101Z) * ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00) date_str -- Str containing a date and optional time
requests_aws4auth/aws4auth.py
def parse_date(date_str): """ Check if date_str is in a recognised format and return an ISO yyyy-mm-dd format version if so. Raise DateFormatError if not. Recognised formats are: * RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT) * RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT) * C time (e.g. Wed Dec 4 00:00:00 2002) * Amz-Date format (e.g. 20090325T010101Z) * ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00) date_str -- Str containing a date and optional time """ months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] formats = { # RFC 7231, e.g. 'Mon, 09 Sep 2011 23:36:00 GMT' r'^(?:\w{3}, )?(\d{2}) (\w{3}) (\d{4})\D.*$': lambda m: '{}-{:02d}-{}'.format( m.group(3), months.index(m.group(2).lower())+1, m.group(1)), # RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT) # assumes current century r'^\w+day, (\d{2})-(\w{3})-(\d{2})\D.*$': lambda m: '{}{}-{:02d}-{}'.format( str(datetime.date.today().year)[:2], m.group(3), months.index(m.group(2).lower())+1, m.group(1)), # C time, e.g. 'Wed Dec 4 00:00:00 2002' r'^\w{3} (\w{3}) (\d{1,2}) \d{2}:\d{2}:\d{2} (\d{4})$': lambda m: '{}-{:02d}-{:02d}'.format( m.group(3), months.index(m.group(1).lower())+1, int(m.group(2))), # x-amz-date format dates, e.g. 20100325T010101Z r'^(\d{4})(\d{2})(\d{2})T\d{6}Z$': lambda m: '{}-{}-{}'.format(*m.groups()), # ISO 8601 / RFC 3339, e.g. '2009-03-25T10:11:12.13-01:00' r'^(\d{4}-\d{2}-\d{2})(?:[Tt].*)?$': lambda m: m.group(1), } out_date = None for regex, xform in formats.items(): m = re.search(regex, date_str) if m: out_date = xform(m) break if out_date is None: raise DateFormatError else: return out_date
def parse_date(date_str): """ Check if date_str is in a recognised format and return an ISO yyyy-mm-dd format version if so. Raise DateFormatError if not. Recognised formats are: * RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT) * RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT) * C time (e.g. Wed Dec 4 00:00:00 2002) * Amz-Date format (e.g. 20090325T010101Z) * ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00) date_str -- Str containing a date and optional time """ months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] formats = { # RFC 7231, e.g. 'Mon, 09 Sep 2011 23:36:00 GMT' r'^(?:\w{3}, )?(\d{2}) (\w{3}) (\d{4})\D.*$': lambda m: '{}-{:02d}-{}'.format( m.group(3), months.index(m.group(2).lower())+1, m.group(1)), # RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT) # assumes current century r'^\w+day, (\d{2})-(\w{3})-(\d{2})\D.*$': lambda m: '{}{}-{:02d}-{}'.format( str(datetime.date.today().year)[:2], m.group(3), months.index(m.group(2).lower())+1, m.group(1)), # C time, e.g. 'Wed Dec 4 00:00:00 2002' r'^\w{3} (\w{3}) (\d{1,2}) \d{2}:\d{2}:\d{2} (\d{4})$': lambda m: '{}-{:02d}-{:02d}'.format( m.group(3), months.index(m.group(1).lower())+1, int(m.group(2))), # x-amz-date format dates, e.g. 20100325T010101Z r'^(\d{4})(\d{2})(\d{2})T\d{6}Z$': lambda m: '{}-{}-{}'.format(*m.groups()), # ISO 8601 / RFC 3339, e.g. '2009-03-25T10:11:12.13-01:00' r'^(\d{4}-\d{2}-\d{2})(?:[Tt].*)?$': lambda m: m.group(1), } out_date = None for regex, xform in formats.items(): m = re.search(regex, date_str) if m: out_date = xform(m) break if out_date is None: raise DateFormatError else: return out_date
[ "Check", "if", "date_str", "is", "in", "a", "recognised", "format", "and", "return", "an", "ISO", "yyyy", "-", "mm", "-", "dd", "format", "version", "if", "so", ".", "Raise", "DateFormatError", "if", "not", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L397-L452
[ "def", "parse_date", "(", "date_str", ")", ":", "months", "=", "[", "'jan'", ",", "'feb'", ",", "'mar'", ",", "'apr'", ",", "'may'", ",", "'jun'", ",", "'jul'", ",", "'aug'", ",", "'sep'", ",", "'oct'", ",", "'nov'", ",", "'dec'", "]", "formats", "=", "{", "# RFC 7231, e.g. 'Mon, 09 Sep 2011 23:36:00 GMT'", "r'^(?:\\w{3}, )?(\\d{2}) (\\w{3}) (\\d{4})\\D.*$'", ":", "lambda", "m", ":", "'{}-{:02d}-{}'", ".", "format", "(", "m", ".", "group", "(", "3", ")", ",", "months", ".", "index", "(", "m", ".", "group", "(", "2", ")", ".", "lower", "(", ")", ")", "+", "1", ",", "m", ".", "group", "(", "1", ")", ")", ",", "# RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT)", "# assumes current century", "r'^\\w+day, (\\d{2})-(\\w{3})-(\\d{2})\\D.*$'", ":", "lambda", "m", ":", "'{}{}-{:02d}-{}'", ".", "format", "(", "str", "(", "datetime", ".", "date", ".", "today", "(", ")", ".", "year", ")", "[", ":", "2", "]", ",", "m", ".", "group", "(", "3", ")", ",", "months", ".", "index", "(", "m", ".", "group", "(", "2", ")", ".", "lower", "(", ")", ")", "+", "1", ",", "m", ".", "group", "(", "1", ")", ")", ",", "# C time, e.g. 'Wed Dec 4 00:00:00 2002'", "r'^\\w{3} (\\w{3}) (\\d{1,2}) \\d{2}:\\d{2}:\\d{2} (\\d{4})$'", ":", "lambda", "m", ":", "'{}-{:02d}-{:02d}'", ".", "format", "(", "m", ".", "group", "(", "3", ")", ",", "months", ".", "index", "(", "m", ".", "group", "(", "1", ")", ".", "lower", "(", ")", ")", "+", "1", ",", "int", "(", "m", ".", "group", "(", "2", ")", ")", ")", ",", "# x-amz-date format dates, e.g. 20100325T010101Z", "r'^(\\d{4})(\\d{2})(\\d{2})T\\d{6}Z$'", ":", "lambda", "m", ":", "'{}-{}-{}'", ".", "format", "(", "*", "m", ".", "groups", "(", ")", ")", ",", "# ISO 8601 / RFC 3339, e.g. '2009-03-25T10:11:12.13-01:00'", "r'^(\\d{4}-\\d{2}-\\d{2})(?:[Tt].*)?$'", ":", "lambda", "m", ":", "m", ".", "group", "(", "1", ")", ",", "}", "out_date", "=", "None", "for", "regex", ",", "xform", "in", "formats", ".", "items", "(", ")", ":", "m", "=", "re", ".", "search", "(", "regex", ",", "date_str", ")", "if", "m", ":", "out_date", "=", "xform", "(", "m", ")", "break", "if", "out_date", "is", "None", ":", "raise", "DateFormatError", "else", ":", "return", "out_date" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.handle_date_mismatch
Handle a request whose date doesn't match the signing key scope date. This AWS4Auth class implementation regenerates the signing key. See StrictAWS4Auth class if you would prefer an exception to be raised. req -- a requests prepared request object
requests_aws4auth/aws4auth.py
def handle_date_mismatch(self, req): """ Handle a request whose date doesn't match the signing key scope date. This AWS4Auth class implementation regenerates the signing key. See StrictAWS4Auth class if you would prefer an exception to be raised. req -- a requests prepared request object """ req_datetime = self.get_request_date(req) new_key_date = req_datetime.strftime('%Y%m%d') self.regenerate_signing_key(date=new_key_date)
def handle_date_mismatch(self, req): """ Handle a request whose date doesn't match the signing key scope date. This AWS4Auth class implementation regenerates the signing key. See StrictAWS4Auth class if you would prefer an exception to be raised. req -- a requests prepared request object """ req_datetime = self.get_request_date(req) new_key_date = req_datetime.strftime('%Y%m%d') self.regenerate_signing_key(date=new_key_date)
[ "Handle", "a", "request", "whose", "date", "doesn", "t", "match", "the", "signing", "key", "scope", "date", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L454-L466
[ "def", "handle_date_mismatch", "(", "self", ",", "req", ")", ":", "req_datetime", "=", "self", ".", "get_request_date", "(", "req", ")", "new_key_date", "=", "req_datetime", ".", "strftime", "(", "'%Y%m%d'", ")", "self", ".", "regenerate_signing_key", "(", "date", "=", "new_key_date", ")" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.encode_body
Encode body of request to bytes and update content-type if required. If the body of req is Unicode then encode to the charset found in content-type header if present, otherwise UTF-8, or ASCII if content-type is application/x-www-form-urlencoded. If encoding to UTF-8 then add charset to content-type. Modifies req directly, does not return a modified copy. req -- Requests PreparedRequest object
requests_aws4auth/aws4auth.py
def encode_body(req): """ Encode body of request to bytes and update content-type if required. If the body of req is Unicode then encode to the charset found in content-type header if present, otherwise UTF-8, or ASCII if content-type is application/x-www-form-urlencoded. If encoding to UTF-8 then add charset to content-type. Modifies req directly, does not return a modified copy. req -- Requests PreparedRequest object """ if isinstance(req.body, text_type): split = req.headers.get('content-type', 'text/plain').split(';') if len(split) == 2: ct, cs = split cs = cs.split('=')[1] req.body = req.body.encode(cs) else: ct = split[0] if (ct == 'application/x-www-form-urlencoded' or 'x-amz-' in ct): req.body = req.body.encode() else: req.body = req.body.encode('utf-8') req.headers['content-type'] = ct + '; charset=utf-8'
def encode_body(req): """ Encode body of request to bytes and update content-type if required. If the body of req is Unicode then encode to the charset found in content-type header if present, otherwise UTF-8, or ASCII if content-type is application/x-www-form-urlencoded. If encoding to UTF-8 then add charset to content-type. Modifies req directly, does not return a modified copy. req -- Requests PreparedRequest object """ if isinstance(req.body, text_type): split = req.headers.get('content-type', 'text/plain').split(';') if len(split) == 2: ct, cs = split cs = cs.split('=')[1] req.body = req.body.encode(cs) else: ct = split[0] if (ct == 'application/x-www-form-urlencoded' or 'x-amz-' in ct): req.body = req.body.encode() else: req.body = req.body.encode('utf-8') req.headers['content-type'] = ct + '; charset=utf-8'
[ "Encode", "body", "of", "request", "to", "bytes", "and", "update", "content", "-", "type", "if", "required", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L469-L495
[ "def", "encode_body", "(", "req", ")", ":", "if", "isinstance", "(", "req", ".", "body", ",", "text_type", ")", ":", "split", "=", "req", ".", "headers", ".", "get", "(", "'content-type'", ",", "'text/plain'", ")", ".", "split", "(", "';'", ")", "if", "len", "(", "split", ")", "==", "2", ":", "ct", ",", "cs", "=", "split", "cs", "=", "cs", ".", "split", "(", "'='", ")", "[", "1", "]", "req", ".", "body", "=", "req", ".", "body", ".", "encode", "(", "cs", ")", "else", ":", "ct", "=", "split", "[", "0", "]", "if", "(", "ct", "==", "'application/x-www-form-urlencoded'", "or", "'x-amz-'", "in", "ct", ")", ":", "req", ".", "body", "=", "req", ".", "body", ".", "encode", "(", ")", "else", ":", "req", ".", "body", "=", "req", ".", "body", ".", "encode", "(", "'utf-8'", ")", "req", ".", "headers", "[", "'content-type'", "]", "=", "ct", "+", "'; charset=utf-8'" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.get_canonical_request
Create the AWS authentication Canonical Request string. req -- Requests PreparedRequest object. Should already include an x-amz-content-sha256 header cano_headers -- Canonical Headers section of Canonical Request, as returned by get_canonical_headers() signed_headers -- Signed Headers, as returned by get_canonical_headers()
requests_aws4auth/aws4auth.py
def get_canonical_request(self, req, cano_headers, signed_headers): """ Create the AWS authentication Canonical Request string. req -- Requests PreparedRequest object. Should already include an x-amz-content-sha256 header cano_headers -- Canonical Headers section of Canonical Request, as returned by get_canonical_headers() signed_headers -- Signed Headers, as returned by get_canonical_headers() """ url = urlparse(req.url) path = self.amz_cano_path(url.path) # AWS handles "extreme" querystrings differently to urlparse # (see post-vanilla-query-nonunreserved test in aws_testsuite) split = req.url.split('?', 1) qs = split[1] if len(split) == 2 else '' qs = self.amz_cano_querystring(qs) payload_hash = req.headers['x-amz-content-sha256'] req_parts = [req.method.upper(), path, qs, cano_headers, signed_headers, payload_hash] cano_req = '\n'.join(req_parts) return cano_req
def get_canonical_request(self, req, cano_headers, signed_headers): """ Create the AWS authentication Canonical Request string. req -- Requests PreparedRequest object. Should already include an x-amz-content-sha256 header cano_headers -- Canonical Headers section of Canonical Request, as returned by get_canonical_headers() signed_headers -- Signed Headers, as returned by get_canonical_headers() """ url = urlparse(req.url) path = self.amz_cano_path(url.path) # AWS handles "extreme" querystrings differently to urlparse # (see post-vanilla-query-nonunreserved test in aws_testsuite) split = req.url.split('?', 1) qs = split[1] if len(split) == 2 else '' qs = self.amz_cano_querystring(qs) payload_hash = req.headers['x-amz-content-sha256'] req_parts = [req.method.upper(), path, qs, cano_headers, signed_headers, payload_hash] cano_req = '\n'.join(req_parts) return cano_req
[ "Create", "the", "AWS", "authentication", "Canonical", "Request", "string", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L497-L520
[ "def", "get_canonical_request", "(", "self", ",", "req", ",", "cano_headers", ",", "signed_headers", ")", ":", "url", "=", "urlparse", "(", "req", ".", "url", ")", "path", "=", "self", ".", "amz_cano_path", "(", "url", ".", "path", ")", "# AWS handles \"extreme\" querystrings differently to urlparse", "# (see post-vanilla-query-nonunreserved test in aws_testsuite)", "split", "=", "req", ".", "url", ".", "split", "(", "'?'", ",", "1", ")", "qs", "=", "split", "[", "1", "]", "if", "len", "(", "split", ")", "==", "2", "else", "''", "qs", "=", "self", ".", "amz_cano_querystring", "(", "qs", ")", "payload_hash", "=", "req", ".", "headers", "[", "'x-amz-content-sha256'", "]", "req_parts", "=", "[", "req", ".", "method", ".", "upper", "(", ")", ",", "path", ",", "qs", ",", "cano_headers", ",", "signed_headers", ",", "payload_hash", "]", "cano_req", "=", "'\\n'", ".", "join", "(", "req_parts", ")", "return", "cano_req" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.get_canonical_headers
Generate the Canonical Headers section of the Canonical Request. Return the Canonical Headers and the Signed Headers strs as a tuple (canonical_headers, signed_headers). req -- Requests PreparedRequest object include -- List of headers to include in the canonical and signed headers. It's primarily included to allow testing against specific examples from Amazon. If omitted or None it includes host, content-type and any header starting 'x-amz-' except for x-amz-client context, which appears to break mobile analytics auth if included. Except for the x-amz-client-context exclusion these defaults are per the AWS documentation.
requests_aws4auth/aws4auth.py
def get_canonical_headers(cls, req, include=None): """ Generate the Canonical Headers section of the Canonical Request. Return the Canonical Headers and the Signed Headers strs as a tuple (canonical_headers, signed_headers). req -- Requests PreparedRequest object include -- List of headers to include in the canonical and signed headers. It's primarily included to allow testing against specific examples from Amazon. If omitted or None it includes host, content-type and any header starting 'x-amz-' except for x-amz-client context, which appears to break mobile analytics auth if included. Except for the x-amz-client-context exclusion these defaults are per the AWS documentation. """ if include is None: include = cls.default_include_headers include = [x.lower() for x in include] headers = req.headers.copy() # Temporarily include the host header - AWS requires it to be included # in the signed headers, but Requests doesn't include it in a # PreparedRequest if 'host' not in headers: headers['host'] = urlparse(req.url).netloc.split(':')[0] # Aggregate for upper/lowercase header name collisions in header names, # AMZ requires values of colliding headers be concatenated into a # single header with lowercase name. Although this is not possible with # Requests, since it uses a case-insensitive dict to hold headers, this # is here just in case you duck type with a regular dict cano_headers_dict = {} for hdr, val in headers.items(): hdr = hdr.strip().lower() val = cls.amz_norm_whitespace(val).strip() if (hdr in include or '*' in include or ('x-amz-*' in include and hdr.startswith('x-amz-') and not hdr == 'x-amz-client-context')): vals = cano_headers_dict.setdefault(hdr, []) vals.append(val) # Flatten cano_headers dict to string and generate signed_headers cano_headers = '' signed_headers_list = [] for hdr in sorted(cano_headers_dict): vals = cano_headers_dict[hdr] val = ','.join(sorted(vals)) cano_headers += '{}:{}\n'.format(hdr, val) signed_headers_list.append(hdr) signed_headers = ';'.join(signed_headers_list) return (cano_headers, signed_headers)
def get_canonical_headers(cls, req, include=None): """ Generate the Canonical Headers section of the Canonical Request. Return the Canonical Headers and the Signed Headers strs as a tuple (canonical_headers, signed_headers). req -- Requests PreparedRequest object include -- List of headers to include in the canonical and signed headers. It's primarily included to allow testing against specific examples from Amazon. If omitted or None it includes host, content-type and any header starting 'x-amz-' except for x-amz-client context, which appears to break mobile analytics auth if included. Except for the x-amz-client-context exclusion these defaults are per the AWS documentation. """ if include is None: include = cls.default_include_headers include = [x.lower() for x in include] headers = req.headers.copy() # Temporarily include the host header - AWS requires it to be included # in the signed headers, but Requests doesn't include it in a # PreparedRequest if 'host' not in headers: headers['host'] = urlparse(req.url).netloc.split(':')[0] # Aggregate for upper/lowercase header name collisions in header names, # AMZ requires values of colliding headers be concatenated into a # single header with lowercase name. Although this is not possible with # Requests, since it uses a case-insensitive dict to hold headers, this # is here just in case you duck type with a regular dict cano_headers_dict = {} for hdr, val in headers.items(): hdr = hdr.strip().lower() val = cls.amz_norm_whitespace(val).strip() if (hdr in include or '*' in include or ('x-amz-*' in include and hdr.startswith('x-amz-') and not hdr == 'x-amz-client-context')): vals = cano_headers_dict.setdefault(hdr, []) vals.append(val) # Flatten cano_headers dict to string and generate signed_headers cano_headers = '' signed_headers_list = [] for hdr in sorted(cano_headers_dict): vals = cano_headers_dict[hdr] val = ','.join(sorted(vals)) cano_headers += '{}:{}\n'.format(hdr, val) signed_headers_list.append(hdr) signed_headers = ';'.join(signed_headers_list) return (cano_headers, signed_headers)
[ "Generate", "the", "Canonical", "Headers", "section", "of", "the", "Canonical", "Request", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L523-L573
[ "def", "get_canonical_headers", "(", "cls", ",", "req", ",", "include", "=", "None", ")", ":", "if", "include", "is", "None", ":", "include", "=", "cls", ".", "default_include_headers", "include", "=", "[", "x", ".", "lower", "(", ")", "for", "x", "in", "include", "]", "headers", "=", "req", ".", "headers", ".", "copy", "(", ")", "# Temporarily include the host header - AWS requires it to be included", "# in the signed headers, but Requests doesn't include it in a", "# PreparedRequest", "if", "'host'", "not", "in", "headers", ":", "headers", "[", "'host'", "]", "=", "urlparse", "(", "req", ".", "url", ")", ".", "netloc", ".", "split", "(", "':'", ")", "[", "0", "]", "# Aggregate for upper/lowercase header name collisions in header names,", "# AMZ requires values of colliding headers be concatenated into a", "# single header with lowercase name. Although this is not possible with", "# Requests, since it uses a case-insensitive dict to hold headers, this", "# is here just in case you duck type with a regular dict", "cano_headers_dict", "=", "{", "}", "for", "hdr", ",", "val", "in", "headers", ".", "items", "(", ")", ":", "hdr", "=", "hdr", ".", "strip", "(", ")", ".", "lower", "(", ")", "val", "=", "cls", ".", "amz_norm_whitespace", "(", "val", ")", ".", "strip", "(", ")", "if", "(", "hdr", "in", "include", "or", "'*'", "in", "include", "or", "(", "'x-amz-*'", "in", "include", "and", "hdr", ".", "startswith", "(", "'x-amz-'", ")", "and", "not", "hdr", "==", "'x-amz-client-context'", ")", ")", ":", "vals", "=", "cano_headers_dict", ".", "setdefault", "(", "hdr", ",", "[", "]", ")", "vals", ".", "append", "(", "val", ")", "# Flatten cano_headers dict to string and generate signed_headers", "cano_headers", "=", "''", "signed_headers_list", "=", "[", "]", "for", "hdr", "in", "sorted", "(", "cano_headers_dict", ")", ":", "vals", "=", "cano_headers_dict", "[", "hdr", "]", "val", "=", "','", ".", "join", "(", "sorted", "(", "vals", ")", ")", "cano_headers", "+=", "'{}:{}\\n'", ".", "format", "(", "hdr", ",", "val", ")", "signed_headers_list", ".", "append", "(", "hdr", ")", "signed_headers", "=", "';'", ".", "join", "(", "signed_headers_list", ")", "return", "(", "cano_headers", ",", "signed_headers", ")" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.get_sig_string
Generate the AWS4 auth string to sign for the request. req -- Requests PreparedRequest object. This should already include an x-amz-date header. cano_req -- The Canonical Request, as returned by get_canonical_request()
requests_aws4auth/aws4auth.py
def get_sig_string(req, cano_req, scope): """ Generate the AWS4 auth string to sign for the request. req -- Requests PreparedRequest object. This should already include an x-amz-date header. cano_req -- The Canonical Request, as returned by get_canonical_request() """ amz_date = req.headers['x-amz-date'] hsh = hashlib.sha256(cano_req.encode()) sig_items = ['AWS4-HMAC-SHA256', amz_date, scope, hsh.hexdigest()] sig_string = '\n'.join(sig_items) return sig_string
def get_sig_string(req, cano_req, scope): """ Generate the AWS4 auth string to sign for the request. req -- Requests PreparedRequest object. This should already include an x-amz-date header. cano_req -- The Canonical Request, as returned by get_canonical_request() """ amz_date = req.headers['x-amz-date'] hsh = hashlib.sha256(cano_req.encode()) sig_items = ['AWS4-HMAC-SHA256', amz_date, scope, hsh.hexdigest()] sig_string = '\n'.join(sig_items) return sig_string
[ "Generate", "the", "AWS4", "auth", "string", "to", "sign", "for", "the", "request", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L576-L590
[ "def", "get_sig_string", "(", "req", ",", "cano_req", ",", "scope", ")", ":", "amz_date", "=", "req", ".", "headers", "[", "'x-amz-date'", "]", "hsh", "=", "hashlib", ".", "sha256", "(", "cano_req", ".", "encode", "(", ")", ")", "sig_items", "=", "[", "'AWS4-HMAC-SHA256'", ",", "amz_date", ",", "scope", ",", "hsh", ".", "hexdigest", "(", ")", "]", "sig_string", "=", "'\\n'", ".", "join", "(", "sig_items", ")", "return", "sig_string" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.amz_cano_path
Generate the canonical path as per AWS4 auth requirements. Not documented anywhere, determined from aws4_testsuite examples, problem reports and testing against the live services. path -- request path
requests_aws4auth/aws4auth.py
def amz_cano_path(self, path): """ Generate the canonical path as per AWS4 auth requirements. Not documented anywhere, determined from aws4_testsuite examples, problem reports and testing against the live services. path -- request path """ safe_chars = '/~' qs = '' fixed_path = path if '?' in fixed_path: fixed_path, qs = fixed_path.split('?', 1) fixed_path = posixpath.normpath(fixed_path) fixed_path = re.sub('/+', '/', fixed_path) if path.endswith('/') and not fixed_path.endswith('/'): fixed_path += '/' full_path = fixed_path # If Python 2, switch to working entirely in str as quote() has problems # with Unicode if PY2: full_path = full_path.encode('utf-8') safe_chars = safe_chars.encode('utf-8') qs = qs.encode('utf-8') # S3 seems to require unquoting first. 'host' service is used in # amz_testsuite tests if self.service in ['s3', 'host']: full_path = unquote(full_path) full_path = quote(full_path, safe=safe_chars) if qs: qm = b'?' if PY2 else '?' full_path = qm.join((full_path, qs)) if PY2: full_path = unicode(full_path) return full_path
def amz_cano_path(self, path): """ Generate the canonical path as per AWS4 auth requirements. Not documented anywhere, determined from aws4_testsuite examples, problem reports and testing against the live services. path -- request path """ safe_chars = '/~' qs = '' fixed_path = path if '?' in fixed_path: fixed_path, qs = fixed_path.split('?', 1) fixed_path = posixpath.normpath(fixed_path) fixed_path = re.sub('/+', '/', fixed_path) if path.endswith('/') and not fixed_path.endswith('/'): fixed_path += '/' full_path = fixed_path # If Python 2, switch to working entirely in str as quote() has problems # with Unicode if PY2: full_path = full_path.encode('utf-8') safe_chars = safe_chars.encode('utf-8') qs = qs.encode('utf-8') # S3 seems to require unquoting first. 'host' service is used in # amz_testsuite tests if self.service in ['s3', 'host']: full_path = unquote(full_path) full_path = quote(full_path, safe=safe_chars) if qs: qm = b'?' if PY2 else '?' full_path = qm.join((full_path, qs)) if PY2: full_path = unicode(full_path) return full_path
[ "Generate", "the", "canonical", "path", "as", "per", "AWS4", "auth", "requirements", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L592-L628
[ "def", "amz_cano_path", "(", "self", ",", "path", ")", ":", "safe_chars", "=", "'/~'", "qs", "=", "''", "fixed_path", "=", "path", "if", "'?'", "in", "fixed_path", ":", "fixed_path", ",", "qs", "=", "fixed_path", ".", "split", "(", "'?'", ",", "1", ")", "fixed_path", "=", "posixpath", ".", "normpath", "(", "fixed_path", ")", "fixed_path", "=", "re", ".", "sub", "(", "'/+'", ",", "'/'", ",", "fixed_path", ")", "if", "path", ".", "endswith", "(", "'/'", ")", "and", "not", "fixed_path", ".", "endswith", "(", "'/'", ")", ":", "fixed_path", "+=", "'/'", "full_path", "=", "fixed_path", "# If Python 2, switch to working entirely in str as quote() has problems", "# with Unicode", "if", "PY2", ":", "full_path", "=", "full_path", ".", "encode", "(", "'utf-8'", ")", "safe_chars", "=", "safe_chars", ".", "encode", "(", "'utf-8'", ")", "qs", "=", "qs", ".", "encode", "(", "'utf-8'", ")", "# S3 seems to require unquoting first. 'host' service is used in", "# amz_testsuite tests", "if", "self", ".", "service", "in", "[", "'s3'", ",", "'host'", "]", ":", "full_path", "=", "unquote", "(", "full_path", ")", "full_path", "=", "quote", "(", "full_path", ",", "safe", "=", "safe_chars", ")", "if", "qs", ":", "qm", "=", "b'?'", "if", "PY2", "else", "'?'", "full_path", "=", "qm", ".", "join", "(", "(", "full_path", ",", "qs", ")", ")", "if", "PY2", ":", "full_path", "=", "unicode", "(", "full_path", ")", "return", "full_path" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4Auth.amz_cano_querystring
Parse and format querystring as per AWS4 auth requirements. Perform percent quoting as needed. qs -- querystring
requests_aws4auth/aws4auth.py
def amz_cano_querystring(qs): """ Parse and format querystring as per AWS4 auth requirements. Perform percent quoting as needed. qs -- querystring """ safe_qs_amz_chars = '&=+' safe_qs_unresvd = '-_.~' # If Python 2, switch to working entirely in str # as quote() has problems with Unicode if PY2: qs = qs.encode('utf-8') safe_qs_amz_chars = safe_qs_amz_chars.encode() safe_qs_unresvd = safe_qs_unresvd.encode() qs = unquote(qs) space = b' ' if PY2 else ' ' qs = qs.split(space)[0] qs = quote(qs, safe=safe_qs_amz_chars) qs_items = {} for name, vals in parse_qs(qs, keep_blank_values=True).items(): name = quote(name, safe=safe_qs_unresvd) vals = [quote(val, safe=safe_qs_unresvd) for val in vals] qs_items[name] = vals qs_strings = [] for name, vals in qs_items.items(): for val in vals: qs_strings.append('='.join([name, val])) qs = '&'.join(sorted(qs_strings)) if PY2: qs = unicode(qs) return qs
def amz_cano_querystring(qs): """ Parse and format querystring as per AWS4 auth requirements. Perform percent quoting as needed. qs -- querystring """ safe_qs_amz_chars = '&=+' safe_qs_unresvd = '-_.~' # If Python 2, switch to working entirely in str # as quote() has problems with Unicode if PY2: qs = qs.encode('utf-8') safe_qs_amz_chars = safe_qs_amz_chars.encode() safe_qs_unresvd = safe_qs_unresvd.encode() qs = unquote(qs) space = b' ' if PY2 else ' ' qs = qs.split(space)[0] qs = quote(qs, safe=safe_qs_amz_chars) qs_items = {} for name, vals in parse_qs(qs, keep_blank_values=True).items(): name = quote(name, safe=safe_qs_unresvd) vals = [quote(val, safe=safe_qs_unresvd) for val in vals] qs_items[name] = vals qs_strings = [] for name, vals in qs_items.items(): for val in vals: qs_strings.append('='.join([name, val])) qs = '&'.join(sorted(qs_strings)) if PY2: qs = unicode(qs) return qs
[ "Parse", "and", "format", "querystring", "as", "per", "AWS4", "auth", "requirements", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4auth.py#L631-L664
[ "def", "amz_cano_querystring", "(", "qs", ")", ":", "safe_qs_amz_chars", "=", "'&=+'", "safe_qs_unresvd", "=", "'-_.~'", "# If Python 2, switch to working entirely in str", "# as quote() has problems with Unicode", "if", "PY2", ":", "qs", "=", "qs", ".", "encode", "(", "'utf-8'", ")", "safe_qs_amz_chars", "=", "safe_qs_amz_chars", ".", "encode", "(", ")", "safe_qs_unresvd", "=", "safe_qs_unresvd", ".", "encode", "(", ")", "qs", "=", "unquote", "(", "qs", ")", "space", "=", "b' '", "if", "PY2", "else", "' '", "qs", "=", "qs", ".", "split", "(", "space", ")", "[", "0", "]", "qs", "=", "quote", "(", "qs", ",", "safe", "=", "safe_qs_amz_chars", ")", "qs_items", "=", "{", "}", "for", "name", ",", "vals", "in", "parse_qs", "(", "qs", ",", "keep_blank_values", "=", "True", ")", ".", "items", "(", ")", ":", "name", "=", "quote", "(", "name", ",", "safe", "=", "safe_qs_unresvd", ")", "vals", "=", "[", "quote", "(", "val", ",", "safe", "=", "safe_qs_unresvd", ")", "for", "val", "in", "vals", "]", "qs_items", "[", "name", "]", "=", "vals", "qs_strings", "=", "[", "]", "for", "name", ",", "vals", "in", "qs_items", ".", "items", "(", ")", ":", "for", "val", "in", "vals", ":", "qs_strings", ".", "append", "(", "'='", ".", "join", "(", "[", "name", ",", "val", "]", ")", ")", "qs", "=", "'&'", ".", "join", "(", "sorted", "(", "qs_strings", ")", ")", "if", "PY2", ":", "qs", "=", "unicode", "(", "qs", ")", "return", "qs" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4SigningKey.generate_key
Generate the signing key string as bytes. If intermediate is set to True, returns a 4-tuple containing the key and the intermediate keys: ( signing_key, date_key, region_key, service_key ) The intermediate keys can be used for testing against examples from Amazon.
requests_aws4auth/aws4signingkey.py
def generate_key(cls, secret_key, region, service, date, intermediates=False): """ Generate the signing key string as bytes. If intermediate is set to True, returns a 4-tuple containing the key and the intermediate keys: ( signing_key, date_key, region_key, service_key ) The intermediate keys can be used for testing against examples from Amazon. """ init_key = ('AWS4' + secret_key).encode('utf-8') date_key = cls.sign_sha256(init_key, date) region_key = cls.sign_sha256(date_key, region) service_key = cls.sign_sha256(region_key, service) key = cls.sign_sha256(service_key, 'aws4_request') if intermediates: return (key, date_key, region_key, service_key) else: return key
def generate_key(cls, secret_key, region, service, date, intermediates=False): """ Generate the signing key string as bytes. If intermediate is set to True, returns a 4-tuple containing the key and the intermediate keys: ( signing_key, date_key, region_key, service_key ) The intermediate keys can be used for testing against examples from Amazon. """ init_key = ('AWS4' + secret_key).encode('utf-8') date_key = cls.sign_sha256(init_key, date) region_key = cls.sign_sha256(date_key, region) service_key = cls.sign_sha256(region_key, service) key = cls.sign_sha256(service_key, 'aws4_request') if intermediates: return (key, date_key, region_key, service_key) else: return key
[ "Generate", "the", "signing", "key", "string", "as", "bytes", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4signingkey.py#L100-L122
[ "def", "generate_key", "(", "cls", ",", "secret_key", ",", "region", ",", "service", ",", "date", ",", "intermediates", "=", "False", ")", ":", "init_key", "=", "(", "'AWS4'", "+", "secret_key", ")", ".", "encode", "(", "'utf-8'", ")", "date_key", "=", "cls", ".", "sign_sha256", "(", "init_key", ",", "date", ")", "region_key", "=", "cls", ".", "sign_sha256", "(", "date_key", ",", "region", ")", "service_key", "=", "cls", ".", "sign_sha256", "(", "region_key", ",", "service", ")", "key", "=", "cls", ".", "sign_sha256", "(", "service_key", ",", "'aws4_request'", ")", "if", "intermediates", ":", "return", "(", "key", ",", "date_key", ",", "region_key", ",", "service_key", ")", "else", ":", "return", "key" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
AWS4SigningKey.sign_sha256
Generate an SHA256 HMAC, encoding msg to UTF-8 if not already encoded. key -- signing key. bytes. msg -- message to sign. unicode or bytes.
requests_aws4auth/aws4signingkey.py
def sign_sha256(key, msg): """ Generate an SHA256 HMAC, encoding msg to UTF-8 if not already encoded. key -- signing key. bytes. msg -- message to sign. unicode or bytes. """ if isinstance(msg, text_type): msg = msg.encode('utf-8') return hmac.new(key, msg, hashlib.sha256).digest()
def sign_sha256(key, msg): """ Generate an SHA256 HMAC, encoding msg to UTF-8 if not already encoded. key -- signing key. bytes. msg -- message to sign. unicode or bytes. """ if isinstance(msg, text_type): msg = msg.encode('utf-8') return hmac.new(key, msg, hashlib.sha256).digest()
[ "Generate", "an", "SHA256", "HMAC", "encoding", "msg", "to", "UTF", "-", "8", "if", "not", "already", "encoded", "." ]
sam-washington/requests-aws4auth
python
https://github.com/sam-washington/requests-aws4auth/blob/1201e470c6d5847b7fe42e937a55755e1895e72c/requests_aws4auth/aws4signingkey.py#L125-L136
[ "def", "sign_sha256", "(", "key", ",", "msg", ")", ":", "if", "isinstance", "(", "msg", ",", "text_type", ")", ":", "msg", "=", "msg", ".", "encode", "(", "'utf-8'", ")", "return", "hmac", ".", "new", "(", "key", ",", "msg", ",", "hashlib", ".", "sha256", ")", ".", "digest", "(", ")" ]
1201e470c6d5847b7fe42e937a55755e1895e72c
valid
_format_datetime
Convert a datetime object into a valid STIX timestamp string. 1. Convert to timezone-aware 2. Convert to UTC 3. Format in ISO format 4. Ensure correct precision a. Add subsecond value if non-zero and precision not defined 5. Add "Z"
taxii2client/__init__.py
def _format_datetime(dttm): """Convert a datetime object into a valid STIX timestamp string. 1. Convert to timezone-aware 2. Convert to UTC 3. Format in ISO format 4. Ensure correct precision a. Add subsecond value if non-zero and precision not defined 5. Add "Z" """ if dttm.tzinfo is None or dttm.tzinfo.utcoffset(dttm) is None: # dttm is timezone-naive; assume UTC zoned = pytz.utc.localize(dttm) else: zoned = dttm.astimezone(pytz.utc) ts = zoned.strftime("%Y-%m-%dT%H:%M:%S") ms = zoned.strftime("%f") precision = getattr(dttm, "precision", None) if precision == "second": pass # Already precise to the second elif precision == "millisecond": ts = ts + "." + ms[:3] elif zoned.microsecond > 0: ts = ts + "." + ms.rstrip("0") return ts + "Z"
def _format_datetime(dttm): """Convert a datetime object into a valid STIX timestamp string. 1. Convert to timezone-aware 2. Convert to UTC 3. Format in ISO format 4. Ensure correct precision a. Add subsecond value if non-zero and precision not defined 5. Add "Z" """ if dttm.tzinfo is None or dttm.tzinfo.utcoffset(dttm) is None: # dttm is timezone-naive; assume UTC zoned = pytz.utc.localize(dttm) else: zoned = dttm.astimezone(pytz.utc) ts = zoned.strftime("%Y-%m-%dT%H:%M:%S") ms = zoned.strftime("%f") precision = getattr(dttm, "precision", None) if precision == "second": pass # Already precise to the second elif precision == "millisecond": ts = ts + "." + ms[:3] elif zoned.microsecond > 0: ts = ts + "." + ms.rstrip("0") return ts + "Z"
[ "Convert", "a", "datetime", "object", "into", "a", "valid", "STIX", "timestamp", "string", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L48-L74
[ "def", "_format_datetime", "(", "dttm", ")", ":", "if", "dttm", ".", "tzinfo", "is", "None", "or", "dttm", ".", "tzinfo", ".", "utcoffset", "(", "dttm", ")", "is", "None", ":", "# dttm is timezone-naive; assume UTC", "zoned", "=", "pytz", ".", "utc", ".", "localize", "(", "dttm", ")", "else", ":", "zoned", "=", "dttm", ".", "astimezone", "(", "pytz", ".", "utc", ")", "ts", "=", "zoned", ".", "strftime", "(", "\"%Y-%m-%dT%H:%M:%S\"", ")", "ms", "=", "zoned", ".", "strftime", "(", "\"%f\"", ")", "precision", "=", "getattr", "(", "dttm", ",", "\"precision\"", ",", "None", ")", "if", "precision", "==", "\"second\"", ":", "pass", "# Already precise to the second", "elif", "precision", "==", "\"millisecond\"", ":", "ts", "=", "ts", "+", "\".\"", "+", "ms", "[", ":", "3", "]", "elif", "zoned", ".", "microsecond", ">", "0", ":", "ts", "=", "ts", "+", "\".\"", "+", "ms", ".", "rstrip", "(", "\"0\"", ")", "return", "ts", "+", "\"Z\"" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_ensure_datetime_to_string
If maybe_dttm is a datetime instance, convert to a STIX-compliant string representation. Otherwise return the value unchanged.
taxii2client/__init__.py
def _ensure_datetime_to_string(maybe_dttm): """If maybe_dttm is a datetime instance, convert to a STIX-compliant string representation. Otherwise return the value unchanged.""" if isinstance(maybe_dttm, datetime.datetime): maybe_dttm = _format_datetime(maybe_dttm) return maybe_dttm
def _ensure_datetime_to_string(maybe_dttm): """If maybe_dttm is a datetime instance, convert to a STIX-compliant string representation. Otherwise return the value unchanged.""" if isinstance(maybe_dttm, datetime.datetime): maybe_dttm = _format_datetime(maybe_dttm) return maybe_dttm
[ "If", "maybe_dttm", "is", "a", "datetime", "instance", "convert", "to", "a", "STIX", "-", "compliant", "string", "representation", ".", "Otherwise", "return", "the", "value", "unchanged", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L77-L82
[ "def", "_ensure_datetime_to_string", "(", "maybe_dttm", ")", ":", "if", "isinstance", "(", "maybe_dttm", ",", "datetime", ".", "datetime", ")", ":", "maybe_dttm", "=", "_format_datetime", "(", "maybe_dttm", ")", "return", "maybe_dttm" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_filter_kwargs_to_query_params
Convert API keyword args to a mapping of URL query parameters. Except for "added_after", all keywords are mapped to match filters, i.e. to a query parameter of the form "match[<kwarg>]". "added_after" is left alone, since it's a special filter, as defined in the spec. Each value can be a single value or iterable of values. "version" and "added_after" get special treatment, since they are timestamp-valued: datetime.datetime instances are supported and automatically converted to STIX-compliant strings. Other than that, all values must be strings. None values, empty lists, etc are silently ignored. Args: filter_kwargs: The filter information, as a mapping. Returns: query_params (dict): The query parameter map, mapping strings to strings.
taxii2client/__init__.py
def _filter_kwargs_to_query_params(filter_kwargs): """ Convert API keyword args to a mapping of URL query parameters. Except for "added_after", all keywords are mapped to match filters, i.e. to a query parameter of the form "match[<kwarg>]". "added_after" is left alone, since it's a special filter, as defined in the spec. Each value can be a single value or iterable of values. "version" and "added_after" get special treatment, since they are timestamp-valued: datetime.datetime instances are supported and automatically converted to STIX-compliant strings. Other than that, all values must be strings. None values, empty lists, etc are silently ignored. Args: filter_kwargs: The filter information, as a mapping. Returns: query_params (dict): The query parameter map, mapping strings to strings. """ query_params = {} for kwarg, arglist in six.iteritems(filter_kwargs): # If user passes an empty list, None, etc, silently skip? if not arglist: continue # force iterability, for the sake of code uniformity if not hasattr(arglist, "__iter__") or \ isinstance(arglist, six.string_types): arglist = arglist, if kwarg == "version": query_params["match[version]"] = ",".join( _ensure_datetime_to_string(val) for val in arglist ) elif kwarg == "added_after": if len(arglist) > 1: raise InvalidArgumentsError("No more than one value for filter" " 'added_after' may be given") query_params["added_after"] = ",".join( _ensure_datetime_to_string(val) for val in arglist ) else: query_params["match[" + kwarg + "]"] = ",".join(arglist) return query_params
def _filter_kwargs_to_query_params(filter_kwargs): """ Convert API keyword args to a mapping of URL query parameters. Except for "added_after", all keywords are mapped to match filters, i.e. to a query parameter of the form "match[<kwarg>]". "added_after" is left alone, since it's a special filter, as defined in the spec. Each value can be a single value or iterable of values. "version" and "added_after" get special treatment, since they are timestamp-valued: datetime.datetime instances are supported and automatically converted to STIX-compliant strings. Other than that, all values must be strings. None values, empty lists, etc are silently ignored. Args: filter_kwargs: The filter information, as a mapping. Returns: query_params (dict): The query parameter map, mapping strings to strings. """ query_params = {} for kwarg, arglist in six.iteritems(filter_kwargs): # If user passes an empty list, None, etc, silently skip? if not arglist: continue # force iterability, for the sake of code uniformity if not hasattr(arglist, "__iter__") or \ isinstance(arglist, six.string_types): arglist = arglist, if kwarg == "version": query_params["match[version]"] = ",".join( _ensure_datetime_to_string(val) for val in arglist ) elif kwarg == "added_after": if len(arglist) > 1: raise InvalidArgumentsError("No more than one value for filter" " 'added_after' may be given") query_params["added_after"] = ",".join( _ensure_datetime_to_string(val) for val in arglist ) else: query_params["match[" + kwarg + "]"] = ",".join(arglist) return query_params
[ "Convert", "API", "keyword", "args", "to", "a", "mapping", "of", "URL", "query", "parameters", ".", "Except", "for", "added_after", "all", "keywords", "are", "mapped", "to", "match", "filters", "i", ".", "e", ".", "to", "a", "query", "parameter", "of", "the", "form", "match", "[", "<kwarg", ">", "]", ".", "added_after", "is", "left", "alone", "since", "it", "s", "a", "special", "filter", "as", "defined", "in", "the", "spec", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L85-L134
[ "def", "_filter_kwargs_to_query_params", "(", "filter_kwargs", ")", ":", "query_params", "=", "{", "}", "for", "kwarg", ",", "arglist", "in", "six", ".", "iteritems", "(", "filter_kwargs", ")", ":", "# If user passes an empty list, None, etc, silently skip?", "if", "not", "arglist", ":", "continue", "# force iterability, for the sake of code uniformity", "if", "not", "hasattr", "(", "arglist", ",", "\"__iter__\"", ")", "or", "isinstance", "(", "arglist", ",", "six", ".", "string_types", ")", ":", "arglist", "=", "arglist", ",", "if", "kwarg", "==", "\"version\"", ":", "query_params", "[", "\"match[version]\"", "]", "=", "\",\"", ".", "join", "(", "_ensure_datetime_to_string", "(", "val", ")", "for", "val", "in", "arglist", ")", "elif", "kwarg", "==", "\"added_after\"", ":", "if", "len", "(", "arglist", ")", ">", "1", ":", "raise", "InvalidArgumentsError", "(", "\"No more than one value for filter\"", "\" 'added_after' may be given\"", ")", "query_params", "[", "\"added_after\"", "]", "=", "\",\"", ".", "join", "(", "_ensure_datetime_to_string", "(", "val", ")", "for", "val", "in", "arglist", ")", "else", ":", "query_params", "[", "\"match[\"", "+", "kwarg", "+", "\"]\"", "]", "=", "\",\"", ".", "join", "(", "arglist", ")", "return", "query_params" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_to_json
Factors out some JSON parse code with error handling, to hopefully improve error messages. :param resp: A "requests" library response :return: Parsed JSON. :raises: InvalidJSONError If JSON parsing failed.
taxii2client/__init__.py
def _to_json(resp): """ Factors out some JSON parse code with error handling, to hopefully improve error messages. :param resp: A "requests" library response :return: Parsed JSON. :raises: InvalidJSONError If JSON parsing failed. """ try: return resp.json() except ValueError as e: # Maybe better to report the original request URL? six.raise_from(InvalidJSONError( "Invalid JSON was received from " + resp.request.url ), e)
def _to_json(resp): """ Factors out some JSON parse code with error handling, to hopefully improve error messages. :param resp: A "requests" library response :return: Parsed JSON. :raises: InvalidJSONError If JSON parsing failed. """ try: return resp.json() except ValueError as e: # Maybe better to report the original request URL? six.raise_from(InvalidJSONError( "Invalid JSON was received from " + resp.request.url ), e)
[ "Factors", "out", "some", "JSON", "parse", "code", "with", "error", "handling", "to", "hopefully", "improve", "error", "messages", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L1023-L1038
[ "def", "_to_json", "(", "resp", ")", ":", "try", ":", "return", "resp", ".", "json", "(", ")", "except", "ValueError", "as", "e", ":", "# Maybe better to report the original request URL?", "six", ".", "raise_from", "(", "InvalidJSONError", "(", "\"Invalid JSON was received from \"", "+", "resp", ".", "request", ".", "url", ")", ",", "e", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Status.refresh
Updates Status information
taxii2client/__init__.py
def refresh(self, accept=MEDIA_TYPE_TAXII_V20): """Updates Status information""" response = self.__raw = self._conn.get(self.url, headers={"Accept": accept}) self._populate_fields(**response)
def refresh(self, accept=MEDIA_TYPE_TAXII_V20): """Updates Status information""" response = self.__raw = self._conn.get(self.url, headers={"Accept": accept}) self._populate_fields(**response)
[ "Updates", "Status", "information" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L235-L239
[ "def", "refresh", "(", "self", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ")", ":", "response", "=", "self", ".", "__raw", "=", "self", ".", "_conn", ".", "get", "(", "self", ".", "url", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ")", "self", ".", "_populate_fields", "(", "*", "*", "response", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Status.wait_until_final
It will poll the URL to grab the latest status resource in a given timeout and time interval. Args: poll_interval (int): how often to poll the status service. timeout (int): how long to poll the URL until giving up. Use <= 0 to wait forever
taxii2client/__init__.py
def wait_until_final(self, poll_interval=1, timeout=60): """It will poll the URL to grab the latest status resource in a given timeout and time interval. Args: poll_interval (int): how often to poll the status service. timeout (int): how long to poll the URL until giving up. Use <= 0 to wait forever """ start_time = time.time() elapsed = 0 while (self.status != "complete" and (timeout <= 0 or elapsed < timeout)): time.sleep(poll_interval) self.refresh() elapsed = time.time() - start_time
def wait_until_final(self, poll_interval=1, timeout=60): """It will poll the URL to grab the latest status resource in a given timeout and time interval. Args: poll_interval (int): how often to poll the status service. timeout (int): how long to poll the URL until giving up. Use <= 0 to wait forever """ start_time = time.time() elapsed = 0 while (self.status != "complete" and (timeout <= 0 or elapsed < timeout)): time.sleep(poll_interval) self.refresh() elapsed = time.time() - start_time
[ "It", "will", "poll", "the", "URL", "to", "grab", "the", "latest", "status", "resource", "in", "a", "given", "timeout", "and", "time", "interval", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L241-L257
[ "def", "wait_until_final", "(", "self", ",", "poll_interval", "=", "1", ",", "timeout", "=", "60", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "elapsed", "=", "0", "while", "(", "self", ".", "status", "!=", "\"complete\"", "and", "(", "timeout", "<=", "0", "or", "elapsed", "<", "timeout", ")", ")", ":", "time", ".", "sleep", "(", "poll_interval", ")", "self", ".", "refresh", "(", ")", "elapsed", "=", "time", ".", "time", "(", ")", "-", "start_time" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Status._validate_status
Validates Status information. Raises errors for required properties.
taxii2client/__init__.py
def _validate_status(self): """Validates Status information. Raises errors for required properties.""" if not self.id: msg = "No 'id' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if not self.status: msg = "No 'status' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.total_count is None: msg = "No 'total_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.success_count is None: msg = "No 'success_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.failure_count is None: msg = "No 'failure_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.pending_count is None: msg = "No 'pending_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if len(self.successes) != self.success_count: msg = "Found successes={}, but success_count={} in status '{}'" raise ValidationError(msg.format(self.successes, self.success_count, self.id)) if len(self.pendings) != self.pending_count: msg = "Found pendings={}, but pending_count={} in status '{}'" raise ValidationError(msg.format(self.pendings, self.pending_count, self.id)) if len(self.failures) != self.failure_count: msg = "Found failures={}, but failure_count={} in status '{}'" raise ValidationError(msg.format(self.failures, self.failure_count, self.id)) if (self.success_count + self.pending_count + self.failure_count != self.total_count): msg = ("(success_count={} + pending_count={} + " "failure_count={}) != total_count={} in status '{}'") raise ValidationError(msg.format(self.success_count, self.pending_count, self.failure_count, self.total_count, self.id))
def _validate_status(self): """Validates Status information. Raises errors for required properties.""" if not self.id: msg = "No 'id' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if not self.status: msg = "No 'status' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.total_count is None: msg = "No 'total_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.success_count is None: msg = "No 'success_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.failure_count is None: msg = "No 'failure_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if self.pending_count is None: msg = "No 'pending_count' in Status for request '{}'" raise ValidationError(msg.format(self.url)) if len(self.successes) != self.success_count: msg = "Found successes={}, but success_count={} in status '{}'" raise ValidationError(msg.format(self.successes, self.success_count, self.id)) if len(self.pendings) != self.pending_count: msg = "Found pendings={}, but pending_count={} in status '{}'" raise ValidationError(msg.format(self.pendings, self.pending_count, self.id)) if len(self.failures) != self.failure_count: msg = "Found failures={}, but failure_count={} in status '{}'" raise ValidationError(msg.format(self.failures, self.failure_count, self.id)) if (self.success_count + self.pending_count + self.failure_count != self.total_count): msg = ("(success_count={} + pending_count={} + " "failure_count={}) != total_count={} in status '{}'") raise ValidationError(msg.format(self.success_count, self.pending_count, self.failure_count, self.total_count, self.id))
[ "Validates", "Status", "information", ".", "Raises", "errors", "for", "required", "properties", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L280-L333
[ "def", "_validate_status", "(", "self", ")", ":", "if", "not", "self", ".", "id", ":", "msg", "=", "\"No 'id' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "not", "self", ".", "status", ":", "msg", "=", "\"No 'status' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "total_count", "is", "None", ":", "msg", "=", "\"No 'total_count' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "success_count", "is", "None", ":", "msg", "=", "\"No 'success_count' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "failure_count", "is", "None", ":", "msg", "=", "\"No 'failure_count' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "pending_count", "is", "None", ":", "msg", "=", "\"No 'pending_count' in Status for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "len", "(", "self", ".", "successes", ")", "!=", "self", ".", "success_count", ":", "msg", "=", "\"Found successes={}, but success_count={} in status '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "successes", ",", "self", ".", "success_count", ",", "self", ".", "id", ")", ")", "if", "len", "(", "self", ".", "pendings", ")", "!=", "self", ".", "pending_count", ":", "msg", "=", "\"Found pendings={}, but pending_count={} in status '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "pendings", ",", "self", ".", "pending_count", ",", "self", ".", "id", ")", ")", "if", "len", "(", "self", ".", "failures", ")", "!=", "self", ".", "failure_count", ":", "msg", "=", "\"Found failures={}, but failure_count={} in status '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "failures", ",", "self", ".", "failure_count", ",", "self", ".", "id", ")", ")", "if", "(", "self", ".", "success_count", "+", "self", ".", "pending_count", "+", "self", ".", "failure_count", "!=", "self", ".", "total_count", ")", ":", "msg", "=", "(", "\"(success_count={} + pending_count={} + \"", "\"failure_count={}) != total_count={} in status '{}'\"", ")", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "success_count", ",", "self", ".", "pending_count", ",", "self", ".", "failure_count", ",", "self", ".", "total_count", ",", "self", ".", "id", ")", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Collection._validate_collection
Validates Collection information. Raises errors for required properties.
taxii2client/__init__.py
def _validate_collection(self): """Validates Collection information. Raises errors for required properties.""" if not self._id: msg = "No 'id' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if not self._title: msg = "No 'title' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._can_read is None: msg = "No 'can_read' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._can_write is None: msg = "No 'can_write' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._id not in self.url: msg = "The collection '{}' does not match the url for queries '{}'" raise ValidationError(msg.format(self._id, self.url))
def _validate_collection(self): """Validates Collection information. Raises errors for required properties.""" if not self._id: msg = "No 'id' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if not self._title: msg = "No 'title' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._can_read is None: msg = "No 'can_read' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._can_write is None: msg = "No 'can_write' in Collection for request '{}'" raise ValidationError(msg.format(self.url)) if self._id not in self.url: msg = "The collection '{}' does not match the url for queries '{}'" raise ValidationError(msg.format(self._id, self.url))
[ "Validates", "Collection", "information", ".", "Raises", "errors", "for", "required", "properties", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L455-L476
[ "def", "_validate_collection", "(", "self", ")", ":", "if", "not", "self", ".", "_id", ":", "msg", "=", "\"No 'id' in Collection for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "not", "self", ".", "_title", ":", "msg", "=", "\"No 'title' in Collection for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "_can_read", "is", "None", ":", "msg", "=", "\"No 'can_read' in Collection for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "_can_write", "is", "None", ":", "msg", "=", "\"No 'can_write' in Collection for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "_id", "not", "in", "self", ".", "url", ":", "msg", "=", "\"The collection '{}' does not match the url for queries '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "_id", ",", "self", ".", "url", ")", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Collection.get_objects
Implement the ``Get Objects`` endpoint (section 5.3)
taxii2client/__init__.py
def get_objects(self, accept=MEDIA_TYPE_STIX_V20, **filter_kwargs): """Implement the ``Get Objects`` endpoint (section 5.3)""" self._verify_can_read() query_params = _filter_kwargs_to_query_params(filter_kwargs) return self._conn.get(self.objects_url, headers={"Accept": accept}, params=query_params)
def get_objects(self, accept=MEDIA_TYPE_STIX_V20, **filter_kwargs): """Implement the ``Get Objects`` endpoint (section 5.3)""" self._verify_can_read() query_params = _filter_kwargs_to_query_params(filter_kwargs) return self._conn.get(self.objects_url, headers={"Accept": accept}, params=query_params)
[ "Implement", "the", "Get", "Objects", "endpoint", "(", "section", "5", ".", "3", ")" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L499-L504
[ "def", "get_objects", "(", "self", ",", "accept", "=", "MEDIA_TYPE_STIX_V20", ",", "*", "*", "filter_kwargs", ")", ":", "self", ".", "_verify_can_read", "(", ")", "query_params", "=", "_filter_kwargs_to_query_params", "(", "filter_kwargs", ")", "return", "self", ".", "_conn", ".", "get", "(", "self", ".", "objects_url", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ",", "params", "=", "query_params", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Collection.get_object
Implement the ``Get an Object`` endpoint (section 5.5)
taxii2client/__init__.py
def get_object(self, obj_id, version=None, accept=MEDIA_TYPE_STIX_V20): """Implement the ``Get an Object`` endpoint (section 5.5)""" self._verify_can_read() url = self.objects_url + str(obj_id) + "/" query_params = None if version: query_params = _filter_kwargs_to_query_params({"version": version}) return self._conn.get(url, headers={"Accept": accept}, params=query_params)
def get_object(self, obj_id, version=None, accept=MEDIA_TYPE_STIX_V20): """Implement the ``Get an Object`` endpoint (section 5.5)""" self._verify_can_read() url = self.objects_url + str(obj_id) + "/" query_params = None if version: query_params = _filter_kwargs_to_query_params({"version": version}) return self._conn.get(url, headers={"Accept": accept}, params=query_params)
[ "Implement", "the", "Get", "an", "Object", "endpoint", "(", "section", "5", ".", "5", ")" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L506-L514
[ "def", "get_object", "(", "self", ",", "obj_id", ",", "version", "=", "None", ",", "accept", "=", "MEDIA_TYPE_STIX_V20", ")", ":", "self", ".", "_verify_can_read", "(", ")", "url", "=", "self", ".", "objects_url", "+", "str", "(", "obj_id", ")", "+", "\"/\"", "query_params", "=", "None", "if", "version", ":", "query_params", "=", "_filter_kwargs_to_query_params", "(", "{", "\"version\"", ":", "version", "}", ")", "return", "self", ".", "_conn", ".", "get", "(", "url", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ",", "params", "=", "query_params", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Collection.add_objects
Implement the ``Add Objects`` endpoint (section 5.4) Add objects to the collection. This may be performed either synchronously or asynchronously. To add asynchronously, set wait_for_completion to False. If False, the latter two args are unused. If the caller wishes to monitor the status of the addition, it may do so in its own way. To add synchronously, set wait_for_completion to True, and optionally set the poll and timeout intervals. After initiating the addition, the caller will block, and the TAXII "status" service will be polled until the timeout expires, or the operation completes. Args: bundle: A STIX bundle with the objects to add (string, dict, binary) wait_for_completion (bool): Whether to wait for the add operation to complete before returning poll_interval (int): If waiting for completion, how often to poll the status service (seconds) timeout (int): If waiting for completion, how long to poll until giving up (seconds). Use <= 0 to wait forever accept (str): media type to include in the ``Accept:`` header. content_type (str): media type to include in the ``Content-Type:`` header. Returns: If ``wait_for_completion`` is False, a Status object corresponding to the initial status data returned from the service, is returned. The status may not yet be complete at this point. If ``wait_for_completion`` is True, a Status object corresponding to the completed operation is returned if it didn't time out; otherwise a Status object corresponding to the most recent data obtained before the timeout, is returned.
taxii2client/__init__.py
def add_objects(self, bundle, wait_for_completion=True, poll_interval=1, timeout=60, accept=MEDIA_TYPE_TAXII_V20, content_type=MEDIA_TYPE_STIX_V20): """Implement the ``Add Objects`` endpoint (section 5.4) Add objects to the collection. This may be performed either synchronously or asynchronously. To add asynchronously, set wait_for_completion to False. If False, the latter two args are unused. If the caller wishes to monitor the status of the addition, it may do so in its own way. To add synchronously, set wait_for_completion to True, and optionally set the poll and timeout intervals. After initiating the addition, the caller will block, and the TAXII "status" service will be polled until the timeout expires, or the operation completes. Args: bundle: A STIX bundle with the objects to add (string, dict, binary) wait_for_completion (bool): Whether to wait for the add operation to complete before returning poll_interval (int): If waiting for completion, how often to poll the status service (seconds) timeout (int): If waiting for completion, how long to poll until giving up (seconds). Use <= 0 to wait forever accept (str): media type to include in the ``Accept:`` header. content_type (str): media type to include in the ``Content-Type:`` header. Returns: If ``wait_for_completion`` is False, a Status object corresponding to the initial status data returned from the service, is returned. The status may not yet be complete at this point. If ``wait_for_completion`` is True, a Status object corresponding to the completed operation is returned if it didn't time out; otherwise a Status object corresponding to the most recent data obtained before the timeout, is returned. """ self._verify_can_write() headers = { "Accept": accept, "Content-Type": content_type, } if isinstance(bundle, dict): json_text = json.dumps(bundle, ensure_ascii=False) data = json_text.encode("utf-8") elif isinstance(bundle, six.text_type): data = bundle.encode("utf-8") elif isinstance(bundle, six.binary_type): data = bundle else: raise TypeError("Don't know how to handle type '{}'".format( type(bundle).__name__)) status_json = self._conn.post(self.objects_url, headers=headers, data=data) status_url = urlparse.urljoin( self.url, "../../status/{}".format(status_json["id"]) ) status = Status(url=status_url, conn=self._conn, status_info=status_json) if not wait_for_completion or status.status == "complete": return status status.wait_until_final(poll_interval, timeout) return status
def add_objects(self, bundle, wait_for_completion=True, poll_interval=1, timeout=60, accept=MEDIA_TYPE_TAXII_V20, content_type=MEDIA_TYPE_STIX_V20): """Implement the ``Add Objects`` endpoint (section 5.4) Add objects to the collection. This may be performed either synchronously or asynchronously. To add asynchronously, set wait_for_completion to False. If False, the latter two args are unused. If the caller wishes to monitor the status of the addition, it may do so in its own way. To add synchronously, set wait_for_completion to True, and optionally set the poll and timeout intervals. After initiating the addition, the caller will block, and the TAXII "status" service will be polled until the timeout expires, or the operation completes. Args: bundle: A STIX bundle with the objects to add (string, dict, binary) wait_for_completion (bool): Whether to wait for the add operation to complete before returning poll_interval (int): If waiting for completion, how often to poll the status service (seconds) timeout (int): If waiting for completion, how long to poll until giving up (seconds). Use <= 0 to wait forever accept (str): media type to include in the ``Accept:`` header. content_type (str): media type to include in the ``Content-Type:`` header. Returns: If ``wait_for_completion`` is False, a Status object corresponding to the initial status data returned from the service, is returned. The status may not yet be complete at this point. If ``wait_for_completion`` is True, a Status object corresponding to the completed operation is returned if it didn't time out; otherwise a Status object corresponding to the most recent data obtained before the timeout, is returned. """ self._verify_can_write() headers = { "Accept": accept, "Content-Type": content_type, } if isinstance(bundle, dict): json_text = json.dumps(bundle, ensure_ascii=False) data = json_text.encode("utf-8") elif isinstance(bundle, six.text_type): data = bundle.encode("utf-8") elif isinstance(bundle, six.binary_type): data = bundle else: raise TypeError("Don't know how to handle type '{}'".format( type(bundle).__name__)) status_json = self._conn.post(self.objects_url, headers=headers, data=data) status_url = urlparse.urljoin( self.url, "../../status/{}".format(status_json["id"]) ) status = Status(url=status_url, conn=self._conn, status_info=status_json) if not wait_for_completion or status.status == "complete": return status status.wait_until_final(poll_interval, timeout) return status
[ "Implement", "the", "Add", "Objects", "endpoint", "(", "section", "5", ".", "4", ")" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L516-L591
[ "def", "add_objects", "(", "self", ",", "bundle", ",", "wait_for_completion", "=", "True", ",", "poll_interval", "=", "1", ",", "timeout", "=", "60", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ",", "content_type", "=", "MEDIA_TYPE_STIX_V20", ")", ":", "self", ".", "_verify_can_write", "(", ")", "headers", "=", "{", "\"Accept\"", ":", "accept", ",", "\"Content-Type\"", ":", "content_type", ",", "}", "if", "isinstance", "(", "bundle", ",", "dict", ")", ":", "json_text", "=", "json", ".", "dumps", "(", "bundle", ",", "ensure_ascii", "=", "False", ")", "data", "=", "json_text", ".", "encode", "(", "\"utf-8\"", ")", "elif", "isinstance", "(", "bundle", ",", "six", ".", "text_type", ")", ":", "data", "=", "bundle", ".", "encode", "(", "\"utf-8\"", ")", "elif", "isinstance", "(", "bundle", ",", "six", ".", "binary_type", ")", ":", "data", "=", "bundle", "else", ":", "raise", "TypeError", "(", "\"Don't know how to handle type '{}'\"", ".", "format", "(", "type", "(", "bundle", ")", ".", "__name__", ")", ")", "status_json", "=", "self", ".", "_conn", ".", "post", "(", "self", ".", "objects_url", ",", "headers", "=", "headers", ",", "data", "=", "data", ")", "status_url", "=", "urlparse", ".", "urljoin", "(", "self", ".", "url", ",", "\"../../status/{}\"", ".", "format", "(", "status_json", "[", "\"id\"", "]", ")", ")", "status", "=", "Status", "(", "url", "=", "status_url", ",", "conn", "=", "self", ".", "_conn", ",", "status_info", "=", "status_json", ")", "if", "not", "wait_for_completion", "or", "status", ".", "status", "==", "\"complete\"", ":", "return", "status", "status", ".", "wait_until_final", "(", "poll_interval", ",", "timeout", ")", "return", "status" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Collection.get_manifest
Implement the ``Get Object Manifests`` endpoint (section 5.6).
taxii2client/__init__.py
def get_manifest(self, accept=MEDIA_TYPE_TAXII_V20, **filter_kwargs): """Implement the ``Get Object Manifests`` endpoint (section 5.6).""" self._verify_can_read() query_params = _filter_kwargs_to_query_params(filter_kwargs) return self._conn.get(self.url + "manifest/", headers={"Accept": accept}, params=query_params)
def get_manifest(self, accept=MEDIA_TYPE_TAXII_V20, **filter_kwargs): """Implement the ``Get Object Manifests`` endpoint (section 5.6).""" self._verify_can_read() query_params = _filter_kwargs_to_query_params(filter_kwargs) return self._conn.get(self.url + "manifest/", headers={"Accept": accept}, params=query_params)
[ "Implement", "the", "Get", "Object", "Manifests", "endpoint", "(", "section", "5", ".", "6", ")", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L593-L599
[ "def", "get_manifest", "(", "self", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ",", "*", "*", "filter_kwargs", ")", ":", "self", ".", "_verify_can_read", "(", ")", "query_params", "=", "_filter_kwargs_to_query_params", "(", "filter_kwargs", ")", "return", "self", ".", "_conn", ".", "get", "(", "self", ".", "url", "+", "\"manifest/\"", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ",", "params", "=", "query_params", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
ApiRoot._validate_api_root
Validates API Root information. Raises errors for required properties.
taxii2client/__init__.py
def _validate_api_root(self): """Validates API Root information. Raises errors for required properties.""" if not self._title: msg = "No 'title' in API Root for request '{}'" raise ValidationError(msg.format(self.url)) if not self._versions: msg = "No 'versions' in API Root for request '{}'" raise ValidationError(msg.format(self.url)) if self._max_content_length is None: msg = "No 'max_content_length' in API Root for request '{}'" raise ValidationError(msg.format(self.url))
def _validate_api_root(self): """Validates API Root information. Raises errors for required properties.""" if not self._title: msg = "No 'title' in API Root for request '{}'" raise ValidationError(msg.format(self.url)) if not self._versions: msg = "No 'versions' in API Root for request '{}'" raise ValidationError(msg.format(self.url)) if self._max_content_length is None: msg = "No 'max_content_length' in API Root for request '{}'" raise ValidationError(msg.format(self.url))
[ "Validates", "API", "Root", "information", ".", "Raises", "errors", "for", "required", "properties", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L681-L694
[ "def", "_validate_api_root", "(", "self", ")", ":", "if", "not", "self", ".", "_title", ":", "msg", "=", "\"No 'title' in API Root for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "not", "self", ".", "_versions", ":", "msg", "=", "\"No 'versions' in API Root for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")", "if", "self", ".", "_max_content_length", "is", "None", ":", "msg", "=", "\"No 'max_content_length' in API Root for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
ApiRoot.refresh
Update the API Root's information and list of Collections
taxii2client/__init__.py
def refresh(self, accept=MEDIA_TYPE_TAXII_V20): """Update the API Root's information and list of Collections""" self.refresh_information(accept) self.refresh_collections(accept)
def refresh(self, accept=MEDIA_TYPE_TAXII_V20): """Update the API Root's information and list of Collections""" self.refresh_information(accept) self.refresh_collections(accept)
[ "Update", "the", "API", "Root", "s", "information", "and", "list", "of", "Collections" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L708-L711
[ "def", "refresh", "(", "self", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ")", ":", "self", ".", "refresh_information", "(", "accept", ")", "self", ".", "refresh_collections", "(", "accept", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
ApiRoot.refresh_information
Update the properties of this API Root. This invokes the ``Get API Root Information`` endpoint.
taxii2client/__init__.py
def refresh_information(self, accept=MEDIA_TYPE_TAXII_V20): """Update the properties of this API Root. This invokes the ``Get API Root Information`` endpoint. """ response = self.__raw = self._conn.get(self.url, headers={"Accept": accept}) self._populate_fields(**response) self._loaded_information = True
def refresh_information(self, accept=MEDIA_TYPE_TAXII_V20): """Update the properties of this API Root. This invokes the ``Get API Root Information`` endpoint. """ response = self.__raw = self._conn.get(self.url, headers={"Accept": accept}) self._populate_fields(**response) self._loaded_information = True
[ "Update", "the", "properties", "of", "this", "API", "Root", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L713-L721
[ "def", "refresh_information", "(", "self", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ")", ":", "response", "=", "self", ".", "__raw", "=", "self", ".", "_conn", ".", "get", "(", "self", ".", "url", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ")", "self", ".", "_populate_fields", "(", "*", "*", "response", ")", "self", ".", "_loaded_information", "=", "True" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
ApiRoot.refresh_collections
Update the list of Collections contained by this API Root. This invokes the ``Get Collections`` endpoint.
taxii2client/__init__.py
def refresh_collections(self, accept=MEDIA_TYPE_TAXII_V20): """Update the list of Collections contained by this API Root. This invokes the ``Get Collections`` endpoint. """ url = self.url + "collections/" response = self._conn.get(url, headers={"Accept": accept}) self._collections = [] for item in response.get("collections", []): # optional collection_url = url + item["id"] + "/" collection = Collection(collection_url, conn=self._conn, collection_info=item) self._collections.append(collection) self._loaded_collections = True
def refresh_collections(self, accept=MEDIA_TYPE_TAXII_V20): """Update the list of Collections contained by this API Root. This invokes the ``Get Collections`` endpoint. """ url = self.url + "collections/" response = self._conn.get(url, headers={"Accept": accept}) self._collections = [] for item in response.get("collections", []): # optional collection_url = url + item["id"] + "/" collection = Collection(collection_url, conn=self._conn, collection_info=item) self._collections.append(collection) self._loaded_collections = True
[ "Update", "the", "list", "of", "Collections", "contained", "by", "this", "API", "Root", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L723-L738
[ "def", "refresh_collections", "(", "self", ",", "accept", "=", "MEDIA_TYPE_TAXII_V20", ")", ":", "url", "=", "self", ".", "url", "+", "\"collections/\"", "response", "=", "self", ".", "_conn", ".", "get", "(", "url", ",", "headers", "=", "{", "\"Accept\"", ":", "accept", "}", ")", "self", ".", "_collections", "=", "[", "]", "for", "item", "in", "response", ".", "get", "(", "\"collections\"", ",", "[", "]", ")", ":", "# optional", "collection_url", "=", "url", "+", "item", "[", "\"id\"", "]", "+", "\"/\"", "collection", "=", "Collection", "(", "collection_url", ",", "conn", "=", "self", ".", "_conn", ",", "collection_info", "=", "item", ")", "self", ".", "_collections", ".", "append", "(", "collection", ")", "self", ".", "_loaded_collections", "=", "True" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Server._validate_server
Validates server information. Raises errors for required properties.
taxii2client/__init__.py
def _validate_server(self): """Validates server information. Raises errors for required properties. """ if not self._title: msg = "No 'title' in Server Discovery for request '{}'" raise ValidationError(msg.format(self.url))
def _validate_server(self): """Validates server information. Raises errors for required properties. """ if not self._title: msg = "No 'title' in Server Discovery for request '{}'" raise ValidationError(msg.format(self.url))
[ "Validates", "server", "information", ".", "Raises", "errors", "for", "required", "properties", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L826-L831
[ "def", "_validate_server", "(", "self", ")", ":", "if", "not", "self", ".", "_title", ":", "msg", "=", "\"No 'title' in Server Discovery for request '{}'\"", "raise", "ValidationError", "(", "msg", ".", "format", "(", "self", ".", "url", ")", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
Server.refresh
Update the Server information and list of API Roots
taxii2client/__init__.py
def refresh(self): """Update the Server information and list of API Roots""" response = self.__raw = self._conn.get(self.url) self._populate_fields(**response) self._loaded = True
def refresh(self): """Update the Server information and list of API Roots""" response = self.__raw = self._conn.get(self.url) self._populate_fields(**response) self._loaded = True
[ "Update", "the", "Server", "information", "and", "list", "of", "API", "Roots" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L856-L860
[ "def", "refresh", "(", "self", ")", ":", "response", "=", "self", ".", "__raw", "=", "self", ".", "_conn", ".", "get", "(", "self", ".", "url", ")", "self", ".", "_populate_fields", "(", "*", "*", "response", ")", "self", ".", "_loaded", "=", "True" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_HTTPConnection.valid_content_type
Check that the server is returning a valid Content-Type Args: content_type (str): ``Content-Type:`` header value accept (str): media type to include in the ``Accept:`` header.
taxii2client/__init__.py
def valid_content_type(self, content_type, accept): """Check that the server is returning a valid Content-Type Args: content_type (str): ``Content-Type:`` header value accept (str): media type to include in the ``Accept:`` header. """ accept_tokens = accept.replace(' ', '').split(';') content_type_tokens = content_type.replace(' ', '').split(';') return ( all(elem in content_type_tokens for elem in accept_tokens) and (content_type_tokens[0] == 'application/vnd.oasis.taxii+json' or content_type_tokens[0] == 'application/vnd.oasis.stix+json') )
def valid_content_type(self, content_type, accept): """Check that the server is returning a valid Content-Type Args: content_type (str): ``Content-Type:`` header value accept (str): media type to include in the ``Accept:`` header. """ accept_tokens = accept.replace(' ', '').split(';') content_type_tokens = content_type.replace(' ', '').split(';') return ( all(elem in content_type_tokens for elem in accept_tokens) and (content_type_tokens[0] == 'application/vnd.oasis.taxii+json' or content_type_tokens[0] == 'application/vnd.oasis.stix+json') )
[ "Check", "that", "the", "server", "is", "returning", "a", "valid", "Content", "-", "Type" ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L902-L917
[ "def", "valid_content_type", "(", "self", ",", "content_type", ",", "accept", ")", ":", "accept_tokens", "=", "accept", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "';'", ")", "content_type_tokens", "=", "content_type", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "';'", ")", "return", "(", "all", "(", "elem", "in", "content_type_tokens", "for", "elem", "in", "accept_tokens", ")", "and", "(", "content_type_tokens", "[", "0", "]", "==", "'application/vnd.oasis.taxii+json'", "or", "content_type_tokens", "[", "0", "]", "==", "'application/vnd.oasis.stix+json'", ")", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_HTTPConnection.get
Perform an HTTP GET, using the saved requests.Session and auth info. If "Accept" isn't one of the given headers, a default TAXII mime type is used. Regardless, the response type is checked against the accept header value, and an exception is raised if they don't match. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional)
taxii2client/__init__.py
def get(self, url, headers=None, params=None): """Perform an HTTP GET, using the saved requests.Session and auth info. If "Accept" isn't one of the given headers, a default TAXII mime type is used. Regardless, the response type is checked against the accept header value, and an exception is raised if they don't match. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional) """ merged_headers = self._merge_headers(headers) if "Accept" not in merged_headers: merged_headers["Accept"] = MEDIA_TYPE_TAXII_V20 accept = merged_headers["Accept"] resp = self.session.get(url, headers=merged_headers, params=params) resp.raise_for_status() content_type = resp.headers["Content-Type"] if not self.valid_content_type(content_type=content_type, accept=accept): msg = "Unexpected Response. Got Content-Type: '{}' for Accept: '{}'" raise TAXIIServiceException(msg.format(content_type, accept)) return _to_json(resp)
def get(self, url, headers=None, params=None): """Perform an HTTP GET, using the saved requests.Session and auth info. If "Accept" isn't one of the given headers, a default TAXII mime type is used. Regardless, the response type is checked against the accept header value, and an exception is raised if they don't match. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional) """ merged_headers = self._merge_headers(headers) if "Accept" not in merged_headers: merged_headers["Accept"] = MEDIA_TYPE_TAXII_V20 accept = merged_headers["Accept"] resp = self.session.get(url, headers=merged_headers, params=params) resp.raise_for_status() content_type = resp.headers["Content-Type"] if not self.valid_content_type(content_type=content_type, accept=accept): msg = "Unexpected Response. Got Content-Type: '{}' for Accept: '{}'" raise TAXIIServiceException(msg.format(content_type, accept)) return _to_json(resp)
[ "Perform", "an", "HTTP", "GET", "using", "the", "saved", "requests", ".", "Session", "and", "auth", "info", ".", "If", "Accept", "isn", "t", "one", "of", "the", "given", "headers", "a", "default", "TAXII", "mime", "type", "is", "used", ".", "Regardless", "the", "response", "type", "is", "checked", "against", "the", "accept", "header", "value", "and", "an", "exception", "is", "raised", "if", "they", "don", "t", "match", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L919-L949
[ "def", "get", "(", "self", ",", "url", ",", "headers", "=", "None", ",", "params", "=", "None", ")", ":", "merged_headers", "=", "self", ".", "_merge_headers", "(", "headers", ")", "if", "\"Accept\"", "not", "in", "merged_headers", ":", "merged_headers", "[", "\"Accept\"", "]", "=", "MEDIA_TYPE_TAXII_V20", "accept", "=", "merged_headers", "[", "\"Accept\"", "]", "resp", "=", "self", ".", "session", ".", "get", "(", "url", ",", "headers", "=", "merged_headers", ",", "params", "=", "params", ")", "resp", ".", "raise_for_status", "(", ")", "content_type", "=", "resp", ".", "headers", "[", "\"Content-Type\"", "]", "if", "not", "self", ".", "valid_content_type", "(", "content_type", "=", "content_type", ",", "accept", "=", "accept", ")", ":", "msg", "=", "\"Unexpected Response. Got Content-Type: '{}' for Accept: '{}'\"", "raise", "TAXIIServiceException", "(", "msg", ".", "format", "(", "content_type", ",", "accept", ")", ")", "return", "_to_json", "(", "resp", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_HTTPConnection.post
Send a JSON POST request with the given request headers, additional URL query parameters, and the given JSON in the request body. The extra query parameters are merged with any which already exist in the URL. The 'json' and 'data' parameters may not both be given. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional) json: json to send in the body of the Request. This must be a JSON-serializable object. (optional) data: raw request body data. May be a dictionary, list of tuples, bytes, or file-like object to send in the body of the Request. (optional)
taxii2client/__init__.py
def post(self, url, headers=None, params=None, **kwargs): """Send a JSON POST request with the given request headers, additional URL query parameters, and the given JSON in the request body. The extra query parameters are merged with any which already exist in the URL. The 'json' and 'data' parameters may not both be given. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional) json: json to send in the body of the Request. This must be a JSON-serializable object. (optional) data: raw request body data. May be a dictionary, list of tuples, bytes, or file-like object to send in the body of the Request. (optional) """ if len(kwargs) > 1: raise InvalidArgumentsError("Too many extra args ({} > 1)".format( len(kwargs))) if kwargs: kwarg = next(iter(kwargs)) if kwarg not in ("json", "data"): raise InvalidArgumentsError("Invalid kwarg: " + kwarg) resp = self.session.post(url, headers=headers, params=params, **kwargs) resp.raise_for_status() return _to_json(resp)
def post(self, url, headers=None, params=None, **kwargs): """Send a JSON POST request with the given request headers, additional URL query parameters, and the given JSON in the request body. The extra query parameters are merged with any which already exist in the URL. The 'json' and 'data' parameters may not both be given. Args: url (str): URL to retrieve headers (dict): Any other headers to be added to the request. params: dictionary or bytes to be sent in the query string for the request. (optional) json: json to send in the body of the Request. This must be a JSON-serializable object. (optional) data: raw request body data. May be a dictionary, list of tuples, bytes, or file-like object to send in the body of the Request. (optional) """ if len(kwargs) > 1: raise InvalidArgumentsError("Too many extra args ({} > 1)".format( len(kwargs))) if kwargs: kwarg = next(iter(kwargs)) if kwarg not in ("json", "data"): raise InvalidArgumentsError("Invalid kwarg: " + kwarg) resp = self.session.post(url, headers=headers, params=params, **kwargs) resp.raise_for_status() return _to_json(resp)
[ "Send", "a", "JSON", "POST", "request", "with", "the", "given", "request", "headers", "additional", "URL", "query", "parameters", "and", "the", "given", "JSON", "in", "the", "request", "body", ".", "The", "extra", "query", "parameters", "are", "merged", "with", "any", "which", "already", "exist", "in", "the", "URL", ".", "The", "json", "and", "data", "parameters", "may", "not", "both", "be", "given", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L951-L980
[ "def", "post", "(", "self", ",", "url", ",", "headers", "=", "None", ",", "params", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "kwargs", ")", ">", "1", ":", "raise", "InvalidArgumentsError", "(", "\"Too many extra args ({} > 1)\"", ".", "format", "(", "len", "(", "kwargs", ")", ")", ")", "if", "kwargs", ":", "kwarg", "=", "next", "(", "iter", "(", "kwargs", ")", ")", "if", "kwarg", "not", "in", "(", "\"json\"", ",", "\"data\"", ")", ":", "raise", "InvalidArgumentsError", "(", "\"Invalid kwarg: \"", "+", "kwarg", ")", "resp", "=", "self", ".", "session", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "*", "*", "kwargs", ")", "resp", ".", "raise_for_status", "(", ")", "return", "_to_json", "(", "resp", ")" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
_HTTPConnection._merge_headers
Merge headers from different sources together. Headers passed to the post/get methods have highest priority, then headers associated with the connection object itself have next priority. :param call_specific_headers: A header dict from the get/post call, or None (the default for those methods). :return: A key-case-insensitive MutableMapping object which contains the merged headers. (This doesn't actually return a dict.)
taxii2client/__init__.py
def _merge_headers(self, call_specific_headers): """ Merge headers from different sources together. Headers passed to the post/get methods have highest priority, then headers associated with the connection object itself have next priority. :param call_specific_headers: A header dict from the get/post call, or None (the default for those methods). :return: A key-case-insensitive MutableMapping object which contains the merged headers. (This doesn't actually return a dict.) """ # A case-insensitive mapping is necessary here so that there is # predictable behavior. If a plain dict were used, you'd get keys in # the merged dict which differ only in case. The requests library # would merge them internally, and it would be unpredictable which key # is chosen for the final set of headers. Another possible approach # would be to upper/lower-case everything, but this seemed easier. On # the other hand, I don't know if CaseInsensitiveDict is public API...? # First establish defaults merged_headers = requests.structures.CaseInsensitiveDict({ "User-Agent": self.user_agent }) # Then overlay with specifics from post/get methods if call_specific_headers: merged_headers.update(call_specific_headers) # Special "User-Agent" header check, to ensure one is always sent. # The call-specific overlay could have null'd out that header. if not merged_headers.get("User-Agent"): merged_headers["User-Agent"] = self.user_agent return merged_headers
def _merge_headers(self, call_specific_headers): """ Merge headers from different sources together. Headers passed to the post/get methods have highest priority, then headers associated with the connection object itself have next priority. :param call_specific_headers: A header dict from the get/post call, or None (the default for those methods). :return: A key-case-insensitive MutableMapping object which contains the merged headers. (This doesn't actually return a dict.) """ # A case-insensitive mapping is necessary here so that there is # predictable behavior. If a plain dict were used, you'd get keys in # the merged dict which differ only in case. The requests library # would merge them internally, and it would be unpredictable which key # is chosen for the final set of headers. Another possible approach # would be to upper/lower-case everything, but this seemed easier. On # the other hand, I don't know if CaseInsensitiveDict is public API...? # First establish defaults merged_headers = requests.structures.CaseInsensitiveDict({ "User-Agent": self.user_agent }) # Then overlay with specifics from post/get methods if call_specific_headers: merged_headers.update(call_specific_headers) # Special "User-Agent" header check, to ensure one is always sent. # The call-specific overlay could have null'd out that header. if not merged_headers.get("User-Agent"): merged_headers["User-Agent"] = self.user_agent return merged_headers
[ "Merge", "headers", "from", "different", "sources", "together", ".", "Headers", "passed", "to", "the", "post", "/", "get", "methods", "have", "highest", "priority", "then", "headers", "associated", "with", "the", "connection", "object", "itself", "have", "next", "priority", "." ]
oasis-open/cti-taxii-client
python
https://github.com/oasis-open/cti-taxii-client/blob/b4c037fb61d8b8892af34423e2c67c81218d6f8e/taxii2client/__init__.py#L986-L1020
[ "def", "_merge_headers", "(", "self", ",", "call_specific_headers", ")", ":", "# A case-insensitive mapping is necessary here so that there is", "# predictable behavior. If a plain dict were used, you'd get keys in", "# the merged dict which differ only in case. The requests library", "# would merge them internally, and it would be unpredictable which key", "# is chosen for the final set of headers. Another possible approach", "# would be to upper/lower-case everything, but this seemed easier. On", "# the other hand, I don't know if CaseInsensitiveDict is public API...?", "# First establish defaults", "merged_headers", "=", "requests", ".", "structures", ".", "CaseInsensitiveDict", "(", "{", "\"User-Agent\"", ":", "self", ".", "user_agent", "}", ")", "# Then overlay with specifics from post/get methods", "if", "call_specific_headers", ":", "merged_headers", ".", "update", "(", "call_specific_headers", ")", "# Special \"User-Agent\" header check, to ensure one is always sent.", "# The call-specific overlay could have null'd out that header.", "if", "not", "merged_headers", ".", "get", "(", "\"User-Agent\"", ")", ":", "merged_headers", "[", "\"User-Agent\"", "]", "=", "self", ".", "user_agent", "return", "merged_headers" ]
b4c037fb61d8b8892af34423e2c67c81218d6f8e
valid
total_memory
Returns the the amount of memory available for use. The memory is obtained from MemTotal entry in /proc/meminfo. Notes ===== This function is not very useful and not very portable.
sharedmem/sharedmem.py
def total_memory(): """ Returns the the amount of memory available for use. The memory is obtained from MemTotal entry in /proc/meminfo. Notes ===== This function is not very useful and not very portable. """ with file('/proc/meminfo', 'r') as f: for line in f: words = line.split() if words[0].upper() == 'MEMTOTAL:': return int(words[1]) * 1024 raise IOError('MemTotal unknown')
def total_memory(): """ Returns the the amount of memory available for use. The memory is obtained from MemTotal entry in /proc/meminfo. Notes ===== This function is not very useful and not very portable. """ with file('/proc/meminfo', 'r') as f: for line in f: words = line.split() if words[0].upper() == 'MEMTOTAL:': return int(words[1]) * 1024 raise IOError('MemTotal unknown')
[ "Returns", "the", "the", "amount", "of", "memory", "available", "for", "use", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L186-L201
[ "def", "total_memory", "(", ")", ":", "with", "file", "(", "'/proc/meminfo'", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "words", "=", "line", ".", "split", "(", ")", "if", "words", "[", "0", "]", ".", "upper", "(", ")", "==", "'MEMTOTAL:'", ":", "return", "int", "(", "words", "[", "1", "]", ")", "*", "1024", "raise", "IOError", "(", "'MemTotal unknown'", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
cpu_count
Returns the default number of slave processes to be spawned. The default value is the number of physical cpu cores seen by python. :code:`OMP_NUM_THREADS` environment variable overrides it. On PBS/torque systems if OMP_NUM_THREADS is empty, we try to use the value of :code:`PBS_NUM_PPN` variable. Notes ----- On some machines the physical number of cores does not equal the number of cpus shall be used. PSC Blacklight for example.
sharedmem/sharedmem.py
def cpu_count(): """ Returns the default number of slave processes to be spawned. The default value is the number of physical cpu cores seen by python. :code:`OMP_NUM_THREADS` environment variable overrides it. On PBS/torque systems if OMP_NUM_THREADS is empty, we try to use the value of :code:`PBS_NUM_PPN` variable. Notes ----- On some machines the physical number of cores does not equal the number of cpus shall be used. PSC Blacklight for example. """ num = os.getenv("OMP_NUM_THREADS") if num is None: num = os.getenv("PBS_NUM_PPN") try: return int(num) except: return multiprocessing.cpu_count()
def cpu_count(): """ Returns the default number of slave processes to be spawned. The default value is the number of physical cpu cores seen by python. :code:`OMP_NUM_THREADS` environment variable overrides it. On PBS/torque systems if OMP_NUM_THREADS is empty, we try to use the value of :code:`PBS_NUM_PPN` variable. Notes ----- On some machines the physical number of cores does not equal the number of cpus shall be used. PSC Blacklight for example. """ num = os.getenv("OMP_NUM_THREADS") if num is None: num = os.getenv("PBS_NUM_PPN") try: return int(num) except: return multiprocessing.cpu_count()
[ "Returns", "the", "default", "number", "of", "slave", "processes", "to", "be", "spawned", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L203-L224
[ "def", "cpu_count", "(", ")", ":", "num", "=", "os", ".", "getenv", "(", "\"OMP_NUM_THREADS\"", ")", "if", "num", "is", "None", ":", "num", "=", "os", ".", "getenv", "(", "\"PBS_NUM_PPN\"", ")", "try", ":", "return", "int", "(", "num", ")", "except", ":", "return", "multiprocessing", ".", "cpu_count", "(", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
empty_like
Create a shared memory array from the shape of array.
sharedmem/sharedmem.py
def empty_like(array, dtype=None): """ Create a shared memory array from the shape of array. """ array = numpy.asarray(array) if dtype is None: dtype = array.dtype return anonymousmemmap(array.shape, dtype)
def empty_like(array, dtype=None): """ Create a shared memory array from the shape of array. """ array = numpy.asarray(array) if dtype is None: dtype = array.dtype return anonymousmemmap(array.shape, dtype)
[ "Create", "a", "shared", "memory", "array", "from", "the", "shape", "of", "array", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L785-L791
[ "def", "empty_like", "(", "array", ",", "dtype", "=", "None", ")", ":", "array", "=", "numpy", ".", "asarray", "(", "array", ")", "if", "dtype", "is", "None", ":", "dtype", "=", "array", ".", "dtype", "return", "anonymousmemmap", "(", "array", ".", "shape", ",", "dtype", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
full_like
Create a shared memory array with the same shape and type as a given array, filled with `value`.
sharedmem/sharedmem.py
def full_like(array, value, dtype=None): """ Create a shared memory array with the same shape and type as a given array, filled with `value`. """ shared = empty_like(array, dtype) shared[:] = value return shared
def full_like(array, value, dtype=None): """ Create a shared memory array with the same shape and type as a given array, filled with `value`. """ shared = empty_like(array, dtype) shared[:] = value return shared
[ "Create", "a", "shared", "memory", "array", "with", "the", "same", "shape", "and", "type", "as", "a", "given", "array", "filled", "with", "value", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L798-L803
[ "def", "full_like", "(", "array", ",", "value", ",", "dtype", "=", "None", ")", ":", "shared", "=", "empty_like", "(", "array", ",", "dtype", ")", "shared", "[", ":", "]", "=", "value", "return", "shared" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
full
Create a shared memory array of given shape and type, filled with `value`.
sharedmem/sharedmem.py
def full(shape, value, dtype='f8'): """ Create a shared memory array of given shape and type, filled with `value`. """ shared = empty(shape, dtype) shared[:] = value return shared
def full(shape, value, dtype='f8'): """ Create a shared memory array of given shape and type, filled with `value`. """ shared = empty(shape, dtype) shared[:] = value return shared
[ "Create", "a", "shared", "memory", "array", "of", "given", "shape", "and", "type", "filled", "with", "value", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L805-L810
[ "def", "full", "(", "shape", ",", "value", ",", "dtype", "=", "'f8'", ")", ":", "shared", "=", "empty", "(", "shape", ",", "dtype", ")", "shared", "[", ":", "]", "=", "value", "return", "shared" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
copy
Copy an array to the shared memory. Notes ----- copy is not always necessary because the private memory is always copy-on-write. Use :code:`a = copy(a)` to immediately dereference the old 'a' on private memory
sharedmem/sharedmem.py
def copy(a): """ Copy an array to the shared memory. Notes ----- copy is not always necessary because the private memory is always copy-on-write. Use :code:`a = copy(a)` to immediately dereference the old 'a' on private memory """ shared = anonymousmemmap(a.shape, dtype=a.dtype) shared[:] = a[:] return shared
def copy(a): """ Copy an array to the shared memory. Notes ----- copy is not always necessary because the private memory is always copy-on-write. Use :code:`a = copy(a)` to immediately dereference the old 'a' on private memory """ shared = anonymousmemmap(a.shape, dtype=a.dtype) shared[:] = a[:] return shared
[ "Copy", "an", "array", "to", "the", "shared", "memory", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L812-L823
[ "def", "copy", "(", "a", ")", ":", "shared", "=", "anonymousmemmap", "(", "a", ".", "shape", ",", "dtype", "=", "a", ".", "dtype", ")", "shared", "[", ":", "]", "=", "a", "[", ":", "]", "return", "shared" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
ProcessGroup.get
Protected get. Get an item from Q. Will block. but if the process group has errors, raise an StopProcessGroup exception. A slave process will terminate upon StopProcessGroup. The master process shall read the error from the process group.
sharedmem/sharedmem.py
def get(self, Q): """ Protected get. Get an item from Q. Will block. but if the process group has errors, raise an StopProcessGroup exception. A slave process will terminate upon StopProcessGroup. The master process shall read the error from the process group. """ while self.Errors.empty(): try: return Q.get(timeout=1) except queue.Empty: # check if the process group is dead if not self.is_alive(): # todo : can be graceful, in which # case the last item shall have been # flushed to Q. try: return Q.get(timeout=0) except queue.Empty: raise StopProcessGroup else: continue else: raise StopProcessGroup
def get(self, Q): """ Protected get. Get an item from Q. Will block. but if the process group has errors, raise an StopProcessGroup exception. A slave process will terminate upon StopProcessGroup. The master process shall read the error from the process group. """ while self.Errors.empty(): try: return Q.get(timeout=1) except queue.Empty: # check if the process group is dead if not self.is_alive(): # todo : can be graceful, in which # case the last item shall have been # flushed to Q. try: return Q.get(timeout=0) except queue.Empty: raise StopProcessGroup else: continue else: raise StopProcessGroup
[ "Protected", "get", ".", "Get", "an", "item", "from", "Q", ".", "Will", "block", ".", "but", "if", "the", "process", "group", "has", "errors", "raise", "an", "StopProcessGroup", "exception", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L404-L429
[ "def", "get", "(", "self", ",", "Q", ")", ":", "while", "self", ".", "Errors", ".", "empty", "(", ")", ":", "try", ":", "return", "Q", ".", "get", "(", "timeout", "=", "1", ")", "except", "queue", ".", "Empty", ":", "# check if the process group is dead", "if", "not", "self", ".", "is_alive", "(", ")", ":", "# todo : can be graceful, in which", "# case the last item shall have been", "# flushed to Q.", "try", ":", "return", "Q", ".", "get", "(", "timeout", "=", "0", ")", "except", "queue", ".", "Empty", ":", "raise", "StopProcessGroup", "else", ":", "continue", "else", ":", "raise", "StopProcessGroup" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
background.wait
Wait and join the child process. The return value of the function call is returned. If any exception occurred it is wrapped and raised.
sharedmem/sharedmem.py
def wait(self): """ Wait and join the child process. The return value of the function call is returned. If any exception occurred it is wrapped and raised. """ e, r = self.result.get() self.slave.join() self.slave = None self.result = None if isinstance(e, Exception): raise SlaveException(e, r) return r
def wait(self): """ Wait and join the child process. The return value of the function call is returned. If any exception occurred it is wrapped and raised. """ e, r = self.result.get() self.slave.join() self.slave = None self.result = None if isinstance(e, Exception): raise SlaveException(e, r) return r
[ "Wait", "and", "join", "the", "child", "process", ".", "The", "return", "value", "of", "the", "function", "call", "is", "returned", ".", "If", "any", "exception", "occurred", "it", "is", "wrapped", "and", "raised", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L546-L557
[ "def", "wait", "(", "self", ")", ":", "e", ",", "r", "=", "self", ".", "result", ".", "get", "(", ")", "self", ".", "slave", ".", "join", "(", ")", "self", ".", "slave", "=", "None", "self", ".", "result", "=", "None", "if", "isinstance", "(", "e", ",", "Exception", ")", ":", "raise", "SlaveException", "(", "e", ",", "r", ")", "return", "r" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
MapReduce.map
Map-reduce with multile processes. Apply func to each item on the sequence, in parallel. As the results are collected, reduce is called on the result. The reduced result is returned as a list. Parameters ---------- func : callable The function to call. It must accept the same number of arguments as the length of an item in the sequence. .. warning:: func is not supposed to use exceptions for flow control. In non-debug mode all exceptions will be wrapped into a :py:class:`SlaveException`. sequence : list or array_like The sequence of arguments to be applied to func. reduce : callable, optional Apply an reduction operation on the return values of func. If func returns a tuple, they are treated as positional arguments of reduce. star : boolean if True, the items in sequence are treated as positional arguments of reduce. minlength: integer Minimal length of `sequence` to start parallel processing. if len(sequence) < minlength, fall back to sequential processing. This can be used to avoid the overhead of starting the worker processes when there is little work. Returns ------- results : list The list of reduced results from the map operation, in the order of the arguments of sequence. Raises ------ SlaveException If any of the slave process encounters an exception. Inspect :py:attr:`SlaveException.reason` for the underlying exception.
sharedmem/sharedmem.py
def map(self, func, sequence, reduce=None, star=False, minlength=0): """ Map-reduce with multile processes. Apply func to each item on the sequence, in parallel. As the results are collected, reduce is called on the result. The reduced result is returned as a list. Parameters ---------- func : callable The function to call. It must accept the same number of arguments as the length of an item in the sequence. .. warning:: func is not supposed to use exceptions for flow control. In non-debug mode all exceptions will be wrapped into a :py:class:`SlaveException`. sequence : list or array_like The sequence of arguments to be applied to func. reduce : callable, optional Apply an reduction operation on the return values of func. If func returns a tuple, they are treated as positional arguments of reduce. star : boolean if True, the items in sequence are treated as positional arguments of reduce. minlength: integer Minimal length of `sequence` to start parallel processing. if len(sequence) < minlength, fall back to sequential processing. This can be used to avoid the overhead of starting the worker processes when there is little work. Returns ------- results : list The list of reduced results from the map operation, in the order of the arguments of sequence. Raises ------ SlaveException If any of the slave process encounters an exception. Inspect :py:attr:`SlaveException.reason` for the underlying exception. """ def realreduce(r): if reduce: if isinstance(r, tuple): return reduce(*r) else: return reduce(r) return r def realfunc(i): if star: return func(*i) else: return func(i) if len(sequence) <= 0 or self.np == 0 or get_debug(): # Do this in serial self.local = lambda : None self.local.rank = 0 rt = [realreduce(realfunc(i)) for i in sequence] self.local = None return rt # never use more than len(sequence) processes np = min([self.np, len(sequence)]) Q = self.backend.QueueFactory(64) R = self.backend.QueueFactory(64) self.ordered.reset() pg = ProcessGroup(main=self._main, np=np, backend=self.backend, args=(Q, R, sequence, realfunc)) pg.start() L = [] N = [] def feeder(pg, Q, N): # will fail silently if any error occurs. j = 0 try: for i, work in enumerate(sequence): if not hasattr(sequence, '__getitem__'): pg.put(Q, (i, work)) else: pg.put(Q, (i, )) j = j + 1 N.append(j) for i in range(np): pg.put(Q, None) except StopProcessGroup: return finally: pass feeder = threading.Thread(None, feeder, args=(pg, Q, N)) feeder.start() # we run fetcher on main thread to catch exceptions # raised by reduce count = 0 try: while True: try: capsule = pg.get(R) except queue.Empty: continue except StopProcessGroup: raise pg.get_exception() capsule = capsule[0], realreduce(capsule[1]) heapq.heappush(L, capsule) count = count + 1 if len(N) > 0 and count == N[0]: # if finished feeding see if all # results have been obtained break rt = [] # R.close() # R.join_thread() while len(L) > 0: rt.append(heapq.heappop(L)[1]) pg.join() feeder.join() assert N[0] == len(rt) return rt except BaseException as e: pg.killall() pg.join() feeder.join() raise
def map(self, func, sequence, reduce=None, star=False, minlength=0): """ Map-reduce with multile processes. Apply func to each item on the sequence, in parallel. As the results are collected, reduce is called on the result. The reduced result is returned as a list. Parameters ---------- func : callable The function to call. It must accept the same number of arguments as the length of an item in the sequence. .. warning:: func is not supposed to use exceptions for flow control. In non-debug mode all exceptions will be wrapped into a :py:class:`SlaveException`. sequence : list or array_like The sequence of arguments to be applied to func. reduce : callable, optional Apply an reduction operation on the return values of func. If func returns a tuple, they are treated as positional arguments of reduce. star : boolean if True, the items in sequence are treated as positional arguments of reduce. minlength: integer Minimal length of `sequence` to start parallel processing. if len(sequence) < minlength, fall back to sequential processing. This can be used to avoid the overhead of starting the worker processes when there is little work. Returns ------- results : list The list of reduced results from the map operation, in the order of the arguments of sequence. Raises ------ SlaveException If any of the slave process encounters an exception. Inspect :py:attr:`SlaveException.reason` for the underlying exception. """ def realreduce(r): if reduce: if isinstance(r, tuple): return reduce(*r) else: return reduce(r) return r def realfunc(i): if star: return func(*i) else: return func(i) if len(sequence) <= 0 or self.np == 0 or get_debug(): # Do this in serial self.local = lambda : None self.local.rank = 0 rt = [realreduce(realfunc(i)) for i in sequence] self.local = None return rt # never use more than len(sequence) processes np = min([self.np, len(sequence)]) Q = self.backend.QueueFactory(64) R = self.backend.QueueFactory(64) self.ordered.reset() pg = ProcessGroup(main=self._main, np=np, backend=self.backend, args=(Q, R, sequence, realfunc)) pg.start() L = [] N = [] def feeder(pg, Q, N): # will fail silently if any error occurs. j = 0 try: for i, work in enumerate(sequence): if not hasattr(sequence, '__getitem__'): pg.put(Q, (i, work)) else: pg.put(Q, (i, )) j = j + 1 N.append(j) for i in range(np): pg.put(Q, None) except StopProcessGroup: return finally: pass feeder = threading.Thread(None, feeder, args=(pg, Q, N)) feeder.start() # we run fetcher on main thread to catch exceptions # raised by reduce count = 0 try: while True: try: capsule = pg.get(R) except queue.Empty: continue except StopProcessGroup: raise pg.get_exception() capsule = capsule[0], realreduce(capsule[1]) heapq.heappush(L, capsule) count = count + 1 if len(N) > 0 and count == N[0]: # if finished feeding see if all # results have been obtained break rt = [] # R.close() # R.join_thread() while len(L) > 0: rt.append(heapq.heappop(L)[1]) pg.join() feeder.join() assert N[0] == len(rt) return rt except BaseException as e: pg.killall() pg.join() feeder.join() raise
[ "Map", "-", "reduce", "with", "multile", "processes", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/sharedmem.py#L643-L782
[ "def", "map", "(", "self", ",", "func", ",", "sequence", ",", "reduce", "=", "None", ",", "star", "=", "False", ",", "minlength", "=", "0", ")", ":", "def", "realreduce", "(", "r", ")", ":", "if", "reduce", ":", "if", "isinstance", "(", "r", ",", "tuple", ")", ":", "return", "reduce", "(", "*", "r", ")", "else", ":", "return", "reduce", "(", "r", ")", "return", "r", "def", "realfunc", "(", "i", ")", ":", "if", "star", ":", "return", "func", "(", "*", "i", ")", "else", ":", "return", "func", "(", "i", ")", "if", "len", "(", "sequence", ")", "<=", "0", "or", "self", ".", "np", "==", "0", "or", "get_debug", "(", ")", ":", "# Do this in serial", "self", ".", "local", "=", "lambda", ":", "None", "self", ".", "local", ".", "rank", "=", "0", "rt", "=", "[", "realreduce", "(", "realfunc", "(", "i", ")", ")", "for", "i", "in", "sequence", "]", "self", ".", "local", "=", "None", "return", "rt", "# never use more than len(sequence) processes", "np", "=", "min", "(", "[", "self", ".", "np", ",", "len", "(", "sequence", ")", "]", ")", "Q", "=", "self", ".", "backend", ".", "QueueFactory", "(", "64", ")", "R", "=", "self", ".", "backend", ".", "QueueFactory", "(", "64", ")", "self", ".", "ordered", ".", "reset", "(", ")", "pg", "=", "ProcessGroup", "(", "main", "=", "self", ".", "_main", ",", "np", "=", "np", ",", "backend", "=", "self", ".", "backend", ",", "args", "=", "(", "Q", ",", "R", ",", "sequence", ",", "realfunc", ")", ")", "pg", ".", "start", "(", ")", "L", "=", "[", "]", "N", "=", "[", "]", "def", "feeder", "(", "pg", ",", "Q", ",", "N", ")", ":", "# will fail silently if any error occurs.", "j", "=", "0", "try", ":", "for", "i", ",", "work", "in", "enumerate", "(", "sequence", ")", ":", "if", "not", "hasattr", "(", "sequence", ",", "'__getitem__'", ")", ":", "pg", ".", "put", "(", "Q", ",", "(", "i", ",", "work", ")", ")", "else", ":", "pg", ".", "put", "(", "Q", ",", "(", "i", ",", ")", ")", "j", "=", "j", "+", "1", "N", ".", "append", "(", "j", ")", "for", "i", "in", "range", "(", "np", ")", ":", "pg", ".", "put", "(", "Q", ",", "None", ")", "except", "StopProcessGroup", ":", "return", "finally", ":", "pass", "feeder", "=", "threading", ".", "Thread", "(", "None", ",", "feeder", ",", "args", "=", "(", "pg", ",", "Q", ",", "N", ")", ")", "feeder", ".", "start", "(", ")", "# we run fetcher on main thread to catch exceptions", "# raised by reduce ", "count", "=", "0", "try", ":", "while", "True", ":", "try", ":", "capsule", "=", "pg", ".", "get", "(", "R", ")", "except", "queue", ".", "Empty", ":", "continue", "except", "StopProcessGroup", ":", "raise", "pg", ".", "get_exception", "(", ")", "capsule", "=", "capsule", "[", "0", "]", ",", "realreduce", "(", "capsule", "[", "1", "]", ")", "heapq", ".", "heappush", "(", "L", ",", "capsule", ")", "count", "=", "count", "+", "1", "if", "len", "(", "N", ")", ">", "0", "and", "count", "==", "N", "[", "0", "]", ":", "# if finished feeding see if all", "# results have been obtained", "break", "rt", "=", "[", "]", "# R.close()", "# R.join_thread()", "while", "len", "(", "L", ")", ">", "0", ":", "rt", ".", "append", "(", "heapq", ".", "heappop", "(", "L", ")", "[", "1", "]", ")", "pg", ".", "join", "(", ")", "feeder", ".", "join", "(", ")", "assert", "N", "[", "0", "]", "==", "len", "(", "rt", ")", "return", "rt", "except", "BaseException", "as", "e", ":", "pg", ".", "killall", "(", ")", "pg", ".", "join", "(", ")", "feeder", ".", "join", "(", ")", "raise" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
savetxt2
format of table header: # ID [type]:name(index) .... * number of items user's header is not prefixed by comment_character name of nested dtype elements are split by .
contrib/savetxt.py
def savetxt2(fname, X, delimiter=' ', newline='\n', comment_character='#', header='', save_dtype=False, fmt={}): """ format of table header: # ID [type]:name(index) .... * number of items user's header is not prefixed by comment_character name of nested dtype elements are split by . """ prefixfmt = {} for key in fmt: prefixfmt[key] = fmt[key] olddtype = X.dtype newdtype = flatten_dtype(numpy.dtype([('', (X.dtype, X.shape[1:]))])) X = X.view(dtype=newdtype) dtype = X.dtype X = numpy.atleast_1d(X.squeeze()) header2 = _mkheader(dtype) fmtstr = _mkfmtstr(dtype, prefixfmt, delimiter, _default_fmt) if hasattr(fname, 'write'): fh = fname cleanup = lambda : None else: fh = file(fname, 'w+') cleanup = lambda : fh.close() try: fh.write (header) if header[:-1] != newline: fh.write(newline) fh.write (comment_character) fh.write ('!') fh.write (header2) fh.write (delimiter) fh.write ('*%d' % len(X)) fh.write(newline) if save_dtype: fh.write (comment_character) fh.write ('?') fh.write (base64.b64encode(pickle.dumps(olddtype))) fh.write (newline) for row in X: fh.write(fmtstr % tuple(row)) fh.write(newline) if hasattr(fh, 'flush'): fh.flush() finally: cleanup()
def savetxt2(fname, X, delimiter=' ', newline='\n', comment_character='#', header='', save_dtype=False, fmt={}): """ format of table header: # ID [type]:name(index) .... * number of items user's header is not prefixed by comment_character name of nested dtype elements are split by . """ prefixfmt = {} for key in fmt: prefixfmt[key] = fmt[key] olddtype = X.dtype newdtype = flatten_dtype(numpy.dtype([('', (X.dtype, X.shape[1:]))])) X = X.view(dtype=newdtype) dtype = X.dtype X = numpy.atleast_1d(X.squeeze()) header2 = _mkheader(dtype) fmtstr = _mkfmtstr(dtype, prefixfmt, delimiter, _default_fmt) if hasattr(fname, 'write'): fh = fname cleanup = lambda : None else: fh = file(fname, 'w+') cleanup = lambda : fh.close() try: fh.write (header) if header[:-1] != newline: fh.write(newline) fh.write (comment_character) fh.write ('!') fh.write (header2) fh.write (delimiter) fh.write ('*%d' % len(X)) fh.write(newline) if save_dtype: fh.write (comment_character) fh.write ('?') fh.write (base64.b64encode(pickle.dumps(olddtype))) fh.write (newline) for row in X: fh.write(fmtstr % tuple(row)) fh.write(newline) if hasattr(fh, 'flush'): fh.flush() finally: cleanup()
[ "format", "of", "table", "header", ":" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/savetxt.py#L14-L64
[ "def", "savetxt2", "(", "fname", ",", "X", ",", "delimiter", "=", "' '", ",", "newline", "=", "'\\n'", ",", "comment_character", "=", "'#'", ",", "header", "=", "''", ",", "save_dtype", "=", "False", ",", "fmt", "=", "{", "}", ")", ":", "prefixfmt", "=", "{", "}", "for", "key", "in", "fmt", ":", "prefixfmt", "[", "key", "]", "=", "fmt", "[", "key", "]", "olddtype", "=", "X", ".", "dtype", "newdtype", "=", "flatten_dtype", "(", "numpy", ".", "dtype", "(", "[", "(", "''", ",", "(", "X", ".", "dtype", ",", "X", ".", "shape", "[", "1", ":", "]", ")", ")", "]", ")", ")", "X", "=", "X", ".", "view", "(", "dtype", "=", "newdtype", ")", "dtype", "=", "X", ".", "dtype", "X", "=", "numpy", ".", "atleast_1d", "(", "X", ".", "squeeze", "(", ")", ")", "header2", "=", "_mkheader", "(", "dtype", ")", "fmtstr", "=", "_mkfmtstr", "(", "dtype", ",", "prefixfmt", ",", "delimiter", ",", "_default_fmt", ")", "if", "hasattr", "(", "fname", ",", "'write'", ")", ":", "fh", "=", "fname", "cleanup", "=", "lambda", ":", "None", "else", ":", "fh", "=", "file", "(", "fname", ",", "'w+'", ")", "cleanup", "=", "lambda", ":", "fh", ".", "close", "(", ")", "try", ":", "fh", ".", "write", "(", "header", ")", "if", "header", "[", ":", "-", "1", "]", "!=", "newline", ":", "fh", ".", "write", "(", "newline", ")", "fh", ".", "write", "(", "comment_character", ")", "fh", ".", "write", "(", "'!'", ")", "fh", ".", "write", "(", "header2", ")", "fh", ".", "write", "(", "delimiter", ")", "fh", ".", "write", "(", "'*%d'", "%", "len", "(", "X", ")", ")", "fh", ".", "write", "(", "newline", ")", "if", "save_dtype", ":", "fh", ".", "write", "(", "comment_character", ")", "fh", ".", "write", "(", "'?'", ")", "fh", ".", "write", "(", "base64", ".", "b64encode", "(", "pickle", ".", "dumps", "(", "olddtype", ")", ")", ")", "fh", ".", "write", "(", "newline", ")", "for", "row", "in", "X", ":", "fh", ".", "write", "(", "fmtstr", "%", "tuple", "(", "row", ")", ")", "fh", ".", "write", "(", "newline", ")", "if", "hasattr", "(", "fh", ",", "'flush'", ")", ":", "fh", ".", "flush", "(", ")", "finally", ":", "cleanup", "(", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
loadtxt2
Known issues delimiter and newline is not respected. string quotation with space is broken.
contrib/savetxt.py
def loadtxt2(fname, dtype=None, delimiter=' ', newline='\n', comment_character='#', skiplines=0): """ Known issues delimiter and newline is not respected. string quotation with space is broken. """ dtypert = [None, None, None] def preparedtype(dtype): dtypert[0] = dtype flatten = flatten_dtype(dtype) dtypert[1] = flatten dtypert[2] = numpy.dtype([('a', (numpy.int8, flatten.itemsize))]) buf = numpy.empty((), dtype=dtypert[1]) converters = [_default_conv[flatten[name].char] for name in flatten.names] return buf, converters, flatten.names def fileiter(fh): converters = [] buf = None if dtype is not None: buf, converters, names = preparedtype(dtype) yield None for lineno, line in enumerate(fh): if lineno < skiplines: continue if line[0] in comment_character: if buf is None and line[1] == '?': ddtype = pickle.loads(base64.b64decode(line[2:])) buf, converters, names = preparedtype(ddtype) yield None continue for word, c, name in zip(line.split(), converters, names): buf[name] = c(word) buf2 = buf.copy().view(dtype=dtypert[2]) yield buf2 if isinstance(fname, basestring): fh = file(fh, 'r') cleanup = lambda : fh.close() else: fh = iter(fname) cleanup = lambda : None try: i = fileiter(fh) i.next() return numpy.fromiter(i, dtype=dtypert[2]).view(dtype=dtypert[0]) finally: cleanup()
def loadtxt2(fname, dtype=None, delimiter=' ', newline='\n', comment_character='#', skiplines=0): """ Known issues delimiter and newline is not respected. string quotation with space is broken. """ dtypert = [None, None, None] def preparedtype(dtype): dtypert[0] = dtype flatten = flatten_dtype(dtype) dtypert[1] = flatten dtypert[2] = numpy.dtype([('a', (numpy.int8, flatten.itemsize))]) buf = numpy.empty((), dtype=dtypert[1]) converters = [_default_conv[flatten[name].char] for name in flatten.names] return buf, converters, flatten.names def fileiter(fh): converters = [] buf = None if dtype is not None: buf, converters, names = preparedtype(dtype) yield None for lineno, line in enumerate(fh): if lineno < skiplines: continue if line[0] in comment_character: if buf is None and line[1] == '?': ddtype = pickle.loads(base64.b64decode(line[2:])) buf, converters, names = preparedtype(ddtype) yield None continue for word, c, name in zip(line.split(), converters, names): buf[name] = c(word) buf2 = buf.copy().view(dtype=dtypert[2]) yield buf2 if isinstance(fname, basestring): fh = file(fh, 'r') cleanup = lambda : fh.close() else: fh = iter(fname) cleanup = lambda : None try: i = fileiter(fh) i.next() return numpy.fromiter(i, dtype=dtypert[2]).view(dtype=dtypert[0]) finally: cleanup()
[ "Known", "issues", "delimiter", "and", "newline", "is", "not", "respected", ".", "string", "quotation", "with", "space", "is", "broken", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/savetxt.py#L66-L114
[ "def", "loadtxt2", "(", "fname", ",", "dtype", "=", "None", ",", "delimiter", "=", "' '", ",", "newline", "=", "'\\n'", ",", "comment_character", "=", "'#'", ",", "skiplines", "=", "0", ")", ":", "dtypert", "=", "[", "None", ",", "None", ",", "None", "]", "def", "preparedtype", "(", "dtype", ")", ":", "dtypert", "[", "0", "]", "=", "dtype", "flatten", "=", "flatten_dtype", "(", "dtype", ")", "dtypert", "[", "1", "]", "=", "flatten", "dtypert", "[", "2", "]", "=", "numpy", ".", "dtype", "(", "[", "(", "'a'", ",", "(", "numpy", ".", "int8", ",", "flatten", ".", "itemsize", ")", ")", "]", ")", "buf", "=", "numpy", ".", "empty", "(", "(", ")", ",", "dtype", "=", "dtypert", "[", "1", "]", ")", "converters", "=", "[", "_default_conv", "[", "flatten", "[", "name", "]", ".", "char", "]", "for", "name", "in", "flatten", ".", "names", "]", "return", "buf", ",", "converters", ",", "flatten", ".", "names", "def", "fileiter", "(", "fh", ")", ":", "converters", "=", "[", "]", "buf", "=", "None", "if", "dtype", "is", "not", "None", ":", "buf", ",", "converters", ",", "names", "=", "preparedtype", "(", "dtype", ")", "yield", "None", "for", "lineno", ",", "line", "in", "enumerate", "(", "fh", ")", ":", "if", "lineno", "<", "skiplines", ":", "continue", "if", "line", "[", "0", "]", "in", "comment_character", ":", "if", "buf", "is", "None", "and", "line", "[", "1", "]", "==", "'?'", ":", "ddtype", "=", "pickle", ".", "loads", "(", "base64", ".", "b64decode", "(", "line", "[", "2", ":", "]", ")", ")", "buf", ",", "converters", ",", "names", "=", "preparedtype", "(", "ddtype", ")", "yield", "None", "continue", "for", "word", ",", "c", ",", "name", "in", "zip", "(", "line", ".", "split", "(", ")", ",", "converters", ",", "names", ")", ":", "buf", "[", "name", "]", "=", "c", "(", "word", ")", "buf2", "=", "buf", ".", "copy", "(", ")", ".", "view", "(", "dtype", "=", "dtypert", "[", "2", "]", ")", "yield", "buf2", "if", "isinstance", "(", "fname", ",", "basestring", ")", ":", "fh", "=", "file", "(", "fh", ",", "'r'", ")", "cleanup", "=", "lambda", ":", "fh", ".", "close", "(", ")", "else", ":", "fh", "=", "iter", "(", "fname", ")", "cleanup", "=", "lambda", ":", "None", "try", ":", "i", "=", "fileiter", "(", "fh", ")", "i", ".", "next", "(", ")", "return", "numpy", ".", "fromiter", "(", "i", ",", "dtype", "=", "dtypert", "[", "2", "]", ")", ".", "view", "(", "dtype", "=", "dtypert", "[", "0", "]", ")", "finally", ":", "cleanup", "(", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
flatten_dtype
Unpack a structured data-type.
contrib/savetxt.py
def flatten_dtype(dtype, _next=None): """ Unpack a structured data-type. """ types = [] if _next is None: _next = [0, ''] primary = True else: primary = False prefix = _next[1] if dtype.names is None: for i in numpy.ndindex(dtype.shape): if dtype.base == dtype: types.append(('%s%s' % (prefix, simplerepr(i)), dtype)) _next[0] += 1 else: _next[1] = '%s%s' % (prefix, simplerepr(i)) types.extend(flatten_dtype(dtype.base, _next)) else: for field in dtype.names: typ_fields = dtype.fields[field] if len(prefix) > 0: _next[1] = prefix + '.' + field else: _next[1] = '' + field flat_dt = flatten_dtype(typ_fields[0], _next) types.extend(flat_dt) _next[1] = prefix if primary: return numpy.dtype(types) else: return types
def flatten_dtype(dtype, _next=None): """ Unpack a structured data-type. """ types = [] if _next is None: _next = [0, ''] primary = True else: primary = False prefix = _next[1] if dtype.names is None: for i in numpy.ndindex(dtype.shape): if dtype.base == dtype: types.append(('%s%s' % (prefix, simplerepr(i)), dtype)) _next[0] += 1 else: _next[1] = '%s%s' % (prefix, simplerepr(i)) types.extend(flatten_dtype(dtype.base, _next)) else: for field in dtype.names: typ_fields = dtype.fields[field] if len(prefix) > 0: _next[1] = prefix + '.' + field else: _next[1] = '' + field flat_dt = flatten_dtype(typ_fields[0], _next) types.extend(flat_dt) _next[1] = prefix if primary: return numpy.dtype(types) else: return types
[ "Unpack", "a", "structured", "data", "-", "type", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/savetxt.py#L199-L232
[ "def", "flatten_dtype", "(", "dtype", ",", "_next", "=", "None", ")", ":", "types", "=", "[", "]", "if", "_next", "is", "None", ":", "_next", "=", "[", "0", ",", "''", "]", "primary", "=", "True", "else", ":", "primary", "=", "False", "prefix", "=", "_next", "[", "1", "]", "if", "dtype", ".", "names", "is", "None", ":", "for", "i", "in", "numpy", ".", "ndindex", "(", "dtype", ".", "shape", ")", ":", "if", "dtype", ".", "base", "==", "dtype", ":", "types", ".", "append", "(", "(", "'%s%s'", "%", "(", "prefix", ",", "simplerepr", "(", "i", ")", ")", ",", "dtype", ")", ")", "_next", "[", "0", "]", "+=", "1", "else", ":", "_next", "[", "1", "]", "=", "'%s%s'", "%", "(", "prefix", ",", "simplerepr", "(", "i", ")", ")", "types", ".", "extend", "(", "flatten_dtype", "(", "dtype", ".", "base", ",", "_next", ")", ")", "else", ":", "for", "field", "in", "dtype", ".", "names", ":", "typ_fields", "=", "dtype", ".", "fields", "[", "field", "]", "if", "len", "(", "prefix", ")", ">", "0", ":", "_next", "[", "1", "]", "=", "prefix", "+", "'.'", "+", "field", "else", ":", "_next", "[", "1", "]", "=", "''", "+", "field", "flat_dt", "=", "flatten_dtype", "(", "typ_fields", "[", "0", "]", ",", "_next", ")", "types", ".", "extend", "(", "flat_dt", ")", "_next", "[", "1", "]", "=", "prefix", "if", "primary", ":", "return", "numpy", ".", "dtype", "(", "types", ")", "else", ":", "return", "types" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
MetaOrdered
meta class for Ordered construct.
sharedmem/parallel.py
def MetaOrdered(parallel, done, turnstile): """meta class for Ordered construct.""" class Ordered: def __init__(self, iterref): if parallel.master: done[...] = 0 self.iterref = iterref parallel.barrier() @classmethod def abort(self): turnstile.release() def __enter__(self): while self.iterref != done: pass turnstile.acquire() return self def __exit__(self, *args): done[...] += 1 turnstile.release() return Ordered
def MetaOrdered(parallel, done, turnstile): """meta class for Ordered construct.""" class Ordered: def __init__(self, iterref): if parallel.master: done[...] = 0 self.iterref = iterref parallel.barrier() @classmethod def abort(self): turnstile.release() def __enter__(self): while self.iterref != done: pass turnstile.acquire() return self def __exit__(self, *args): done[...] += 1 turnstile.release() return Ordered
[ "meta", "class", "for", "Ordered", "construct", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L508-L529
[ "def", "MetaOrdered", "(", "parallel", ",", "done", ",", "turnstile", ")", ":", "class", "Ordered", ":", "def", "__init__", "(", "self", ",", "iterref", ")", ":", "if", "parallel", ".", "master", ":", "done", "[", "...", "]", "=", "0", "self", ".", "iterref", "=", "iterref", "parallel", ".", "barrier", "(", ")", "@", "classmethod", "def", "abort", "(", "self", ")", ":", "turnstile", ".", "release", "(", ")", "def", "__enter__", "(", "self", ")", ":", "while", "self", ".", "iterref", "!=", "done", ":", "pass", "turnstile", ".", "acquire", "(", ")", "return", "self", "def", "__exit__", "(", "self", ",", "*", "args", ")", ":", "done", "[", "...", "]", "+=", "1", "turnstile", ".", "release", "(", ")", "return", "Ordered" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
SlaveMonitor.kill_all
kill all slaves and reap the monitor
sharedmem/parallel.py
def kill_all(self): """kill all slaves and reap the monitor """ for pid in self.children: try: os.kill(pid, signal.SIGTRAP) except OSError: continue self.join()
def kill_all(self): """kill all slaves and reap the monitor """ for pid in self.children: try: os.kill(pid, signal.SIGTRAP) except OSError: continue self.join()
[ "kill", "all", "slaves", "and", "reap", "the", "monitor" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L171-L178
[ "def", "kill_all", "(", "self", ")", ":", "for", "pid", "in", "self", ".", "children", ":", "try", ":", "os", ".", "kill", "(", "pid", ",", "signal", ".", "SIGTRAP", ")", "except", "OSError", ":", "continue", "self", ".", "join", "(", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
ErrorMonitor.join
master only
sharedmem/parallel.py
def join(self): """ master only """ try: self.pipe.put('Q') self.thread.join() except: pass finally: self.thread = None
def join(self): """ master only """ try: self.pipe.put('Q') self.thread.join() except: pass finally: self.thread = None
[ "master", "only" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L218-L226
[ "def", "join", "(", "self", ")", ":", "try", ":", "self", ".", "pipe", ".", "put", "(", "'Q'", ")", "self", ".", "thread", ".", "join", "(", ")", "except", ":", "pass", "finally", ":", "self", ".", "thread", "=", "None" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
ErrorMonitor.slaveraise
slave only
sharedmem/parallel.py
def slaveraise(self, type, error, traceback): """ slave only """ message = 'E' * 1 + pickle.dumps((type, ''.join(tb.format_exception(type, error, traceback)))) if self.pipe is not None: self.pipe.put(message)
def slaveraise(self, type, error, traceback): """ slave only """ message = 'E' * 1 + pickle.dumps((type, ''.join(tb.format_exception(type, error, traceback)))) if self.pipe is not None: self.pipe.put(message)
[ "slave", "only" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L228-L233
[ "def", "slaveraise", "(", "self", ",", "type", ",", "error", ",", "traceback", ")", ":", "message", "=", "'E'", "*", "1", "+", "pickle", ".", "dumps", "(", "(", "type", ",", "''", ".", "join", "(", "tb", ".", "format_exception", "(", "type", ",", "error", ",", "traceback", ")", ")", ")", ")", "if", "self", ".", "pipe", "is", "not", "None", ":", "self", ".", "pipe", ".", "put", "(", "message", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
Parallel.forloop
schedule can be (sch, chunk) or sch; sch is 'static', 'dynamic' or 'guided'. chunk defaults to 1 if ordered, create an ordred
sharedmem/parallel.py
def forloop(self, range, ordered=False, schedule=('static', 1)): """ schedule can be (sch, chunk) or sch; sch is 'static', 'dynamic' or 'guided'. chunk defaults to 1 if ordered, create an ordred """ if isinstance(schedule, tuple): schedule, chunk = schedule else: chunk = None if schedule == 'static': return self._StaticForLoop(range, ordered, chunk) elif schedule == 'dynamic': return self._DynamicForLoop(range, ordered, chunk, guided=False) elif schedule == 'guided': return self._DynamicForLoop(range, ordered, chunk, guided=True) else: raise "schedule unknown"
def forloop(self, range, ordered=False, schedule=('static', 1)): """ schedule can be (sch, chunk) or sch; sch is 'static', 'dynamic' or 'guided'. chunk defaults to 1 if ordered, create an ordred """ if isinstance(schedule, tuple): schedule, chunk = schedule else: chunk = None if schedule == 'static': return self._StaticForLoop(range, ordered, chunk) elif schedule == 'dynamic': return self._DynamicForLoop(range, ordered, chunk, guided=False) elif schedule == 'guided': return self._DynamicForLoop(range, ordered, chunk, guided=True) else: raise "schedule unknown"
[ "schedule", "can", "be", "(", "sch", "chunk", ")", "or", "sch", ";", "sch", "is", "static", "dynamic", "or", "guided", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L354-L375
[ "def", "forloop", "(", "self", ",", "range", ",", "ordered", "=", "False", ",", "schedule", "=", "(", "'static'", ",", "1", ")", ")", ":", "if", "isinstance", "(", "schedule", ",", "tuple", ")", ":", "schedule", ",", "chunk", "=", "schedule", "else", ":", "chunk", "=", "None", "if", "schedule", "==", "'static'", ":", "return", "self", ".", "_StaticForLoop", "(", "range", ",", "ordered", ",", "chunk", ")", "elif", "schedule", "==", "'dynamic'", ":", "return", "self", ".", "_DynamicForLoop", "(", "range", ",", "ordered", ",", "chunk", ",", "guided", "=", "False", ")", "elif", "schedule", "==", "'guided'", ":", "return", "self", ".", "_DynamicForLoop", "(", "range", ",", "ordered", ",", "chunk", ",", "guided", "=", "True", ")", "else", ":", "raise", "\"schedule unknown\"" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
Barrier.abort
ensure the master exit from Barrier
sharedmem/parallel.py
def abort(self): """ ensure the master exit from Barrier """ self.mutex.release() self.turnstile.release() self.mutex.release() self.turnstile2.release()
def abort(self): """ ensure the master exit from Barrier """ self.mutex.release() self.turnstile.release() self.mutex.release() self.turnstile2.release()
[ "ensure", "the", "master", "exit", "from", "Barrier" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/sharedmem/parallel.py#L387-L392
[ "def", "abort", "(", "self", ")", ":", "self", ".", "mutex", ".", "release", "(", ")", "self", ".", "turnstile", ".", "release", "(", ")", "self", ".", "mutex", ".", "release", "(", ")", "self", ".", "turnstile2", ".", "release", "(", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
MultiPartStream.read
return at most n array items, move the cursor.
contrib/multipartstream.py
def read(self, n): """ return at most n array items, move the cursor. """ while len(self.pool) < n: self.cur = self.files.next() self.pool = numpy.append(self.pool, self.fetch(self.cur), axis=0) rt = self.pool[:n] if n == len(self.pool): self.pool = self.fetch(None) else: self.pool = self.pool[n:] return rt
def read(self, n): """ return at most n array items, move the cursor. """ while len(self.pool) < n: self.cur = self.files.next() self.pool = numpy.append(self.pool, self.fetch(self.cur), axis=0) rt = self.pool[:n] if n == len(self.pool): self.pool = self.fetch(None) else: self.pool = self.pool[n:] return rt
[ "return", "at", "most", "n", "array", "items", "move", "the", "cursor", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/multipartstream.py#L31-L44
[ "def", "read", "(", "self", ",", "n", ")", ":", "while", "len", "(", "self", ".", "pool", ")", "<", "n", ":", "self", ".", "cur", "=", "self", ".", "files", ".", "next", "(", ")", "self", ".", "pool", "=", "numpy", ".", "append", "(", "self", ".", "pool", ",", "self", ".", "fetch", "(", "self", ".", "cur", ")", ",", "axis", "=", "0", ")", "rt", "=", "self", ".", "pool", "[", ":", "n", "]", "if", "n", "==", "len", "(", "self", ".", "pool", ")", ":", "self", ".", "pool", "=", "self", ".", "fetch", "(", "None", ")", "else", ":", "self", ".", "pool", "=", "self", ".", "pool", "[", "n", ":", "]", "return", "rt" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
pufunc.call
axis is the axis to chop it off. if self.altreduce is set, the results will be reduced with altreduce and returned otherwise will be saved to out, then return out.
contrib/array.py
def call(self, args, axis=0, out=None, chunksize=1024 * 1024, **kwargs): """ axis is the axis to chop it off. if self.altreduce is set, the results will be reduced with altreduce and returned otherwise will be saved to out, then return out. """ if self.altreduce is not None: ret = [None] else: if out is None : if self.outdtype is not None: dtype = self.outdtype else: try: dtype = numpy.result_type(*[args[i] for i in self.ins] * 2) except: dtype = None out = sharedmem.empty( numpy.broadcast(*[args[i] for i in self.ins] * 2).shape, dtype=dtype) if axis != 0: for i in self.ins: args[i] = numpy.rollaxis(args[i], axis) out = numpy.rollaxis(out, axis) size = numpy.max([len(args[i]) for i in self.ins]) with sharedmem.MapReduce() as pool: def work(i): sl = slice(i, i+chunksize) myargs = args[:] for j in self.ins: try: tmp = myargs[j][sl] a, b, c = sl.indices(len(args[j])) myargs[j] = tmp except Exception as e: print tmp print j, e pass if b == a: return None rt = self.ufunc(*myargs, **kwargs) if self.altreduce is not None: return rt else: out[sl] = rt def reduce(rt): if self.altreduce is None: return if ret[0] is None: ret[0] = rt elif rt is not None: ret[0] = self.altreduce(ret[0], rt) pool.map(work, range(0, size, chunksize), reduce=reduce) if self.altreduce is None: if axis != 0: out = numpy.rollaxis(out, 0, axis + 1) return out else: return ret[0]
def call(self, args, axis=0, out=None, chunksize=1024 * 1024, **kwargs): """ axis is the axis to chop it off. if self.altreduce is set, the results will be reduced with altreduce and returned otherwise will be saved to out, then return out. """ if self.altreduce is not None: ret = [None] else: if out is None : if self.outdtype is not None: dtype = self.outdtype else: try: dtype = numpy.result_type(*[args[i] for i in self.ins] * 2) except: dtype = None out = sharedmem.empty( numpy.broadcast(*[args[i] for i in self.ins] * 2).shape, dtype=dtype) if axis != 0: for i in self.ins: args[i] = numpy.rollaxis(args[i], axis) out = numpy.rollaxis(out, axis) size = numpy.max([len(args[i]) for i in self.ins]) with sharedmem.MapReduce() as pool: def work(i): sl = slice(i, i+chunksize) myargs = args[:] for j in self.ins: try: tmp = myargs[j][sl] a, b, c = sl.indices(len(args[j])) myargs[j] = tmp except Exception as e: print tmp print j, e pass if b == a: return None rt = self.ufunc(*myargs, **kwargs) if self.altreduce is not None: return rt else: out[sl] = rt def reduce(rt): if self.altreduce is None: return if ret[0] is None: ret[0] = rt elif rt is not None: ret[0] = self.altreduce(ret[0], rt) pool.map(work, range(0, size, chunksize), reduce=reduce) if self.altreduce is None: if axis != 0: out = numpy.rollaxis(out, 0, axis + 1) return out else: return ret[0]
[ "axis", "is", "the", "axis", "to", "chop", "it", "off", ".", "if", "self", ".", "altreduce", "is", "set", "the", "results", "will", "be", "reduced", "with", "altreduce", "and", "returned", "otherwise", "will", "be", "saved", "to", "out", "then", "return", "out", "." ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/array.py#L80-L139
[ "def", "call", "(", "self", ",", "args", ",", "axis", "=", "0", ",", "out", "=", "None", ",", "chunksize", "=", "1024", "*", "1024", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "altreduce", "is", "not", "None", ":", "ret", "=", "[", "None", "]", "else", ":", "if", "out", "is", "None", ":", "if", "self", ".", "outdtype", "is", "not", "None", ":", "dtype", "=", "self", ".", "outdtype", "else", ":", "try", ":", "dtype", "=", "numpy", ".", "result_type", "(", "*", "[", "args", "[", "i", "]", "for", "i", "in", "self", ".", "ins", "]", "*", "2", ")", "except", ":", "dtype", "=", "None", "out", "=", "sharedmem", ".", "empty", "(", "numpy", ".", "broadcast", "(", "*", "[", "args", "[", "i", "]", "for", "i", "in", "self", ".", "ins", "]", "*", "2", ")", ".", "shape", ",", "dtype", "=", "dtype", ")", "if", "axis", "!=", "0", ":", "for", "i", "in", "self", ".", "ins", ":", "args", "[", "i", "]", "=", "numpy", ".", "rollaxis", "(", "args", "[", "i", "]", ",", "axis", ")", "out", "=", "numpy", ".", "rollaxis", "(", "out", ",", "axis", ")", "size", "=", "numpy", ".", "max", "(", "[", "len", "(", "args", "[", "i", "]", ")", "for", "i", "in", "self", ".", "ins", "]", ")", "with", "sharedmem", ".", "MapReduce", "(", ")", "as", "pool", ":", "def", "work", "(", "i", ")", ":", "sl", "=", "slice", "(", "i", ",", "i", "+", "chunksize", ")", "myargs", "=", "args", "[", ":", "]", "for", "j", "in", "self", ".", "ins", ":", "try", ":", "tmp", "=", "myargs", "[", "j", "]", "[", "sl", "]", "a", ",", "b", ",", "c", "=", "sl", ".", "indices", "(", "len", "(", "args", "[", "j", "]", ")", ")", "myargs", "[", "j", "]", "=", "tmp", "except", "Exception", "as", "e", ":", "print", "tmp", "print", "j", ",", "e", "pass", "if", "b", "==", "a", ":", "return", "None", "rt", "=", "self", ".", "ufunc", "(", "*", "myargs", ",", "*", "*", "kwargs", ")", "if", "self", ".", "altreduce", "is", "not", "None", ":", "return", "rt", "else", ":", "out", "[", "sl", "]", "=", "rt", "def", "reduce", "(", "rt", ")", ":", "if", "self", ".", "altreduce", "is", "None", ":", "return", "if", "ret", "[", "0", "]", "is", "None", ":", "ret", "[", "0", "]", "=", "rt", "elif", "rt", "is", "not", "None", ":", "ret", "[", "0", "]", "=", "self", ".", "altreduce", "(", "ret", "[", "0", "]", ",", "rt", ")", "pool", ".", "map", "(", "work", ",", "range", "(", "0", ",", "size", ",", "chunksize", ")", ",", "reduce", "=", "reduce", ")", "if", "self", ".", "altreduce", "is", "None", ":", "if", "axis", "!=", "0", ":", "out", "=", "numpy", ".", "rollaxis", "(", "out", ",", "0", ",", "axis", "+", "1", ")", "return", "out", "else", ":", "return", "ret", "[", "0", "]" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
packarray.adapt
adapt source to a packarray according to the layout of template
contrib/array.py
def adapt(cls, source, template): """ adapt source to a packarray according to the layout of template """ if not isinstance(template, packarray): raise TypeError('template must be a packarray') return cls(source, template.start, template.end)
def adapt(cls, source, template): """ adapt source to a packarray according to the layout of template """ if not isinstance(template, packarray): raise TypeError('template must be a packarray') return cls(source, template.start, template.end)
[ "adapt", "source", "to", "a", "packarray", "according", "to", "the", "layout", "of", "template" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/array.py#L201-L205
[ "def", "adapt", "(", "cls", ",", "source", ",", "template", ")", ":", "if", "not", "isinstance", "(", "template", ",", "packarray", ")", ":", "raise", "TypeError", "(", "'template must be a packarray'", ")", "return", "cls", "(", "source", ",", "template", ".", "start", ",", "template", ".", "end", ")" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
argsort
parallel argsort, like numpy.argsort use sizeof(intp) * len(data) as scratch space use baseargsort for serial sort ind = baseargsort(data) use argmerge to merge def argmerge(data, A, B, out): ensure data[out] is sorted and out[:] = A join B TODO: shall try to use the inplace merge mentioned in http://keithschwarz.com/interesting/code/?dir=inplace-merge.
contrib/sort.py
def argsort(data, out=None, chunksize=None, baseargsort=None, argmerge=None, np=None): """ parallel argsort, like numpy.argsort use sizeof(intp) * len(data) as scratch space use baseargsort for serial sort ind = baseargsort(data) use argmerge to merge def argmerge(data, A, B, out): ensure data[out] is sorted and out[:] = A join B TODO: shall try to use the inplace merge mentioned in http://keithschwarz.com/interesting/code/?dir=inplace-merge. """ if baseargsort is None: baseargsort = lambda x:x.argsort() if argmerge is None: argmerge = default_argmerge if chunksize is None: chunksize = 1024 * 1024 * 16 if out is None: arg1 = numpy.empty(len(data), dtype='intp') out = arg1 else: assert out.dtype == numpy.dtype('intp') assert len(out) == len(data) arg1 = out if np is None: np = sharedmem.cpu_count() if np <= 1 or len(data) < chunksize: out[:] = baseargsort(data) return out CHK = [slice(i, i + chunksize) for i in range(0, len(data), chunksize)] DUMMY = slice(len(data), len(data)) if len(CHK) % 2: CHK.append(DUMMY) with sharedmem.TPool() as pool: def work(i): C = CHK[i] start, stop, step = C.indices(len(data)) arg1[C] = baseargsort(data[C]) arg1[C] += start pool.map(work, range(len(CHK))) arg2 = numpy.empty_like(arg1) flip = 0 while len(CHK) > 1: with sharedmem.TPool() as pool: def work(i): C1 = CHK[i] C2 = CHK[i+1] start1, stop1, step1 = C1.indices(len(data)) start2, stop2, step2 = C2.indices(len(data)) # print 'argmerge', start1, stop1, start2, stop2 assert start2 == stop1 argmerge(data, arg1[C1], arg1[C2], arg2[start1:stop2]) return slice(start1, stop2) CHK = pool.map(work, range(0, len(CHK), 2)) arg1, arg2 = arg2, arg1 flip = flip + 1 if len(CHK) == 1: break if len(CHK) % 2: CHK.append(DUMMY) if flip % 2 != 0: # only even flips out ends up pointing to arg2 and needs to be # copied out[:] = arg1 return out
def argsort(data, out=None, chunksize=None, baseargsort=None, argmerge=None, np=None): """ parallel argsort, like numpy.argsort use sizeof(intp) * len(data) as scratch space use baseargsort for serial sort ind = baseargsort(data) use argmerge to merge def argmerge(data, A, B, out): ensure data[out] is sorted and out[:] = A join B TODO: shall try to use the inplace merge mentioned in http://keithschwarz.com/interesting/code/?dir=inplace-merge. """ if baseargsort is None: baseargsort = lambda x:x.argsort() if argmerge is None: argmerge = default_argmerge if chunksize is None: chunksize = 1024 * 1024 * 16 if out is None: arg1 = numpy.empty(len(data), dtype='intp') out = arg1 else: assert out.dtype == numpy.dtype('intp') assert len(out) == len(data) arg1 = out if np is None: np = sharedmem.cpu_count() if np <= 1 or len(data) < chunksize: out[:] = baseargsort(data) return out CHK = [slice(i, i + chunksize) for i in range(0, len(data), chunksize)] DUMMY = slice(len(data), len(data)) if len(CHK) % 2: CHK.append(DUMMY) with sharedmem.TPool() as pool: def work(i): C = CHK[i] start, stop, step = C.indices(len(data)) arg1[C] = baseargsort(data[C]) arg1[C] += start pool.map(work, range(len(CHK))) arg2 = numpy.empty_like(arg1) flip = 0 while len(CHK) > 1: with sharedmem.TPool() as pool: def work(i): C1 = CHK[i] C2 = CHK[i+1] start1, stop1, step1 = C1.indices(len(data)) start2, stop2, step2 = C2.indices(len(data)) # print 'argmerge', start1, stop1, start2, stop2 assert start2 == stop1 argmerge(data, arg1[C1], arg1[C2], arg2[start1:stop2]) return slice(start1, stop2) CHK = pool.map(work, range(0, len(CHK), 2)) arg1, arg2 = arg2, arg1 flip = flip + 1 if len(CHK) == 1: break if len(CHK) % 2: CHK.append(DUMMY) if flip % 2 != 0: # only even flips out ends up pointing to arg2 and needs to be # copied out[:] = arg1 return out
[ "parallel", "argsort", "like", "numpy", ".", "argsort" ]
rainwoodman/sharedmem
python
https://github.com/rainwoodman/sharedmem/blob/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a/contrib/sort.py#L27-L104
[ "def", "argsort", "(", "data", ",", "out", "=", "None", ",", "chunksize", "=", "None", ",", "baseargsort", "=", "None", ",", "argmerge", "=", "None", ",", "np", "=", "None", ")", ":", "if", "baseargsort", "is", "None", ":", "baseargsort", "=", "lambda", "x", ":", "x", ".", "argsort", "(", ")", "if", "argmerge", "is", "None", ":", "argmerge", "=", "default_argmerge", "if", "chunksize", "is", "None", ":", "chunksize", "=", "1024", "*", "1024", "*", "16", "if", "out", "is", "None", ":", "arg1", "=", "numpy", ".", "empty", "(", "len", "(", "data", ")", ",", "dtype", "=", "'intp'", ")", "out", "=", "arg1", "else", ":", "assert", "out", ".", "dtype", "==", "numpy", ".", "dtype", "(", "'intp'", ")", "assert", "len", "(", "out", ")", "==", "len", "(", "data", ")", "arg1", "=", "out", "if", "np", "is", "None", ":", "np", "=", "sharedmem", ".", "cpu_count", "(", ")", "if", "np", "<=", "1", "or", "len", "(", "data", ")", "<", "chunksize", ":", "out", "[", ":", "]", "=", "baseargsort", "(", "data", ")", "return", "out", "CHK", "=", "[", "slice", "(", "i", ",", "i", "+", "chunksize", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "data", ")", ",", "chunksize", ")", "]", "DUMMY", "=", "slice", "(", "len", "(", "data", ")", ",", "len", "(", "data", ")", ")", "if", "len", "(", "CHK", ")", "%", "2", ":", "CHK", ".", "append", "(", "DUMMY", ")", "with", "sharedmem", ".", "TPool", "(", ")", "as", "pool", ":", "def", "work", "(", "i", ")", ":", "C", "=", "CHK", "[", "i", "]", "start", ",", "stop", ",", "step", "=", "C", ".", "indices", "(", "len", "(", "data", ")", ")", "arg1", "[", "C", "]", "=", "baseargsort", "(", "data", "[", "C", "]", ")", "arg1", "[", "C", "]", "+=", "start", "pool", ".", "map", "(", "work", ",", "range", "(", "len", "(", "CHK", ")", ")", ")", "arg2", "=", "numpy", ".", "empty_like", "(", "arg1", ")", "flip", "=", "0", "while", "len", "(", "CHK", ")", ">", "1", ":", "with", "sharedmem", ".", "TPool", "(", ")", "as", "pool", ":", "def", "work", "(", "i", ")", ":", "C1", "=", "CHK", "[", "i", "]", "C2", "=", "CHK", "[", "i", "+", "1", "]", "start1", ",", "stop1", ",", "step1", "=", "C1", ".", "indices", "(", "len", "(", "data", ")", ")", "start2", ",", "stop2", ",", "step2", "=", "C2", ".", "indices", "(", "len", "(", "data", ")", ")", "# print 'argmerge', start1, stop1, start2, stop2", "assert", "start2", "==", "stop1", "argmerge", "(", "data", ",", "arg1", "[", "C1", "]", ",", "arg1", "[", "C2", "]", ",", "arg2", "[", "start1", ":", "stop2", "]", ")", "return", "slice", "(", "start1", ",", "stop2", ")", "CHK", "=", "pool", ".", "map", "(", "work", ",", "range", "(", "0", ",", "len", "(", "CHK", ")", ",", "2", ")", ")", "arg1", ",", "arg2", "=", "arg2", ",", "arg1", "flip", "=", "flip", "+", "1", "if", "len", "(", "CHK", ")", "==", "1", ":", "break", "if", "len", "(", "CHK", ")", "%", "2", ":", "CHK", ".", "append", "(", "DUMMY", ")", "if", "flip", "%", "2", "!=", "0", ":", "# only even flips out ends up pointing to arg2 and needs to be", "# copied", "out", "[", ":", "]", "=", "arg1", "return", "out" ]
b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a
valid
day_of_week
Return a random (abbreviated if `abbr`) day of week name.
forgery_py/forgery/date.py
def day_of_week(abbr=False): """Return a random (abbreviated if `abbr`) day of week name.""" if abbr: return random.choice(DAYS_ABBR) else: return random.choice(DAYS)
def day_of_week(abbr=False): """Return a random (abbreviated if `abbr`) day of week name.""" if abbr: return random.choice(DAYS_ABBR) else: return random.choice(DAYS)
[ "Return", "a", "random", "(", "abbreviated", "if", "abbr", ")", "day", "of", "week", "name", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/date.py#L55-L60
[ "def", "day_of_week", "(", "abbr", "=", "False", ")", ":", "if", "abbr", ":", "return", "random", ".", "choice", "(", "DAYS_ABBR", ")", "else", ":", "return", "random", ".", "choice", "(", "DAYS", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
month
Return a random (abbreviated if `abbr`) month name or month number if `numerical`.
forgery_py/forgery/date.py
def month(abbr=False, numerical=False): """Return a random (abbreviated if `abbr`) month name or month number if `numerical`. """ if numerical: return random.randint(1, 12) else: if abbr: return random.choice(MONTHS_ABBR) else: return random.choice(MONTHS)
def month(abbr=False, numerical=False): """Return a random (abbreviated if `abbr`) month name or month number if `numerical`. """ if numerical: return random.randint(1, 12) else: if abbr: return random.choice(MONTHS_ABBR) else: return random.choice(MONTHS)
[ "Return", "a", "random", "(", "abbreviated", "if", "abbr", ")", "month", "name", "or", "month", "number", "if", "numerical", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/date.py#L63-L73
[ "def", "month", "(", "abbr", "=", "False", ",", "numerical", "=", "False", ")", ":", "if", "numerical", ":", "return", "random", ".", "randint", "(", "1", ",", "12", ")", "else", ":", "if", "abbr", ":", "return", "random", ".", "choice", "(", "MONTHS_ABBR", ")", "else", ":", "return", "random", ".", "choice", "(", "MONTHS", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
year
Return a random year.
forgery_py/forgery/date.py
def year(past=False, min_delta=0, max_delta=20): """Return a random year.""" return dt.date.today().year + _delta(past, min_delta, max_delta)
def year(past=False, min_delta=0, max_delta=20): """Return a random year.""" return dt.date.today().year + _delta(past, min_delta, max_delta)
[ "Return", "a", "random", "year", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/date.py#L85-L87
[ "def", "year", "(", "past", "=", "False", ",", "min_delta", "=", "0", ",", "max_delta", "=", "20", ")", ":", "return", "dt", ".", "date", ".", "today", "(", ")", ".", "year", "+", "_delta", "(", "past", ",", "min_delta", ",", "max_delta", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
date
Return a random `dt.date` object. Delta args are days.
forgery_py/forgery/date.py
def date(past=False, min_delta=0, max_delta=20): """Return a random `dt.date` object. Delta args are days.""" timedelta = dt.timedelta(days=_delta(past, min_delta, max_delta)) return dt.date.today() + timedelta
def date(past=False, min_delta=0, max_delta=20): """Return a random `dt.date` object. Delta args are days.""" timedelta = dt.timedelta(days=_delta(past, min_delta, max_delta)) return dt.date.today() + timedelta
[ "Return", "a", "random", "dt", ".", "date", "object", ".", "Delta", "args", "are", "days", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/date.py#L95-L98
[ "def", "date", "(", "past", "=", "False", ",", "min_delta", "=", "0", ",", "max_delta", "=", "20", ")", ":", "timedelta", "=", "dt", ".", "timedelta", "(", "days", "=", "_delta", "(", "past", ",", "min_delta", ",", "max_delta", ")", ")", "return", "dt", ".", "date", ".", "today", "(", ")", "+", "timedelta" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
get_dictionary
Load a dictionary file ``dict_name`` (if it's not cached) and return its contents as an array of strings.
forgery_py/dictionaries_loader.py
def get_dictionary(dict_name): """ Load a dictionary file ``dict_name`` (if it's not cached) and return its contents as an array of strings. """ global dictionaries_cache if dict_name not in dictionaries_cache: try: dictionary_file = codecs.open( join(DICTIONARIES_PATH, dict_name), 'r', 'utf-8' ) except IOError: None else: dictionaries_cache[dict_name] = dictionary_file.readlines() dictionary_file.close() return dictionaries_cache[dict_name]
def get_dictionary(dict_name): """ Load a dictionary file ``dict_name`` (if it's not cached) and return its contents as an array of strings. """ global dictionaries_cache if dict_name not in dictionaries_cache: try: dictionary_file = codecs.open( join(DICTIONARIES_PATH, dict_name), 'r', 'utf-8' ) except IOError: None else: dictionaries_cache[dict_name] = dictionary_file.readlines() dictionary_file.close() return dictionaries_cache[dict_name]
[ "Load", "a", "dictionary", "file", "dict_name", "(", "if", "it", "s", "not", "cached", ")", "and", "return", "its", "contents", "as", "an", "array", "of", "strings", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/dictionaries_loader.py#L32-L50
[ "def", "get_dictionary", "(", "dict_name", ")", ":", "global", "dictionaries_cache", "if", "dict_name", "not", "in", "dictionaries_cache", ":", "try", ":", "dictionary_file", "=", "codecs", ".", "open", "(", "join", "(", "DICTIONARIES_PATH", ",", "dict_name", ")", ",", "'r'", ",", "'utf-8'", ")", "except", "IOError", ":", "None", "else", ":", "dictionaries_cache", "[", "dict_name", "]", "=", "dictionary_file", ".", "readlines", "(", ")", "dictionary_file", ".", "close", "(", ")", "return", "dictionaries_cache", "[", "dict_name", "]" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
check_digit
Return a check digit of the given credit card number. Check digit calculated using Luhn algorithm ("modulus 10") See: http://www.darkcoding.net/credit-card/luhn-formula/
forgery_py/forgery/credit_card.py
def check_digit(num): """Return a check digit of the given credit card number. Check digit calculated using Luhn algorithm ("modulus 10") See: http://www.darkcoding.net/credit-card/luhn-formula/ """ sum = 0 # drop last digit, then reverse the number digits = str(num)[:-1][::-1] for i, n in enumerate(digits): # select all digits at odd positions starting from 1 if (i + 1) % 2 != 0: digit = int(n) * 2 if digit > 9: sum += (digit - 9) else: sum += digit else: sum += int(n) return ((divmod(sum, 10)[0] + 1) * 10 - sum) % 10
def check_digit(num): """Return a check digit of the given credit card number. Check digit calculated using Luhn algorithm ("modulus 10") See: http://www.darkcoding.net/credit-card/luhn-formula/ """ sum = 0 # drop last digit, then reverse the number digits = str(num)[:-1][::-1] for i, n in enumerate(digits): # select all digits at odd positions starting from 1 if (i + 1) % 2 != 0: digit = int(n) * 2 if digit > 9: sum += (digit - 9) else: sum += digit else: sum += int(n) return ((divmod(sum, 10)[0] + 1) * 10 - sum) % 10
[ "Return", "a", "check", "digit", "of", "the", "given", "credit", "card", "number", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/credit_card.py#L48-L70
[ "def", "check_digit", "(", "num", ")", ":", "sum", "=", "0", "# drop last digit, then reverse the number", "digits", "=", "str", "(", "num", ")", "[", ":", "-", "1", "]", "[", ":", ":", "-", "1", "]", "for", "i", ",", "n", "in", "enumerate", "(", "digits", ")", ":", "# select all digits at odd positions starting from 1", "if", "(", "i", "+", "1", ")", "%", "2", "!=", "0", ":", "digit", "=", "int", "(", "n", ")", "*", "2", "if", "digit", ">", "9", ":", "sum", "+=", "(", "digit", "-", "9", ")", "else", ":", "sum", "+=", "digit", "else", ":", "sum", "+=", "int", "(", "n", ")", "return", "(", "(", "divmod", "(", "sum", ",", "10", ")", "[", "0", "]", "+", "1", ")", "*", "10", "-", "sum", ")", "%", "10" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
number
Return a random credit card number. :param type: credit card type. Defaults to a random selection. :param length: length of the credit card number. Defaults to the length for the selected card type. :param prefixes: allowed prefixes for the card number. Defaults to prefixes for the selected card type. :return: credit card randomly generated number (int)
forgery_py/forgery/credit_card.py
def number(type=None, length=None, prefixes=None): """ Return a random credit card number. :param type: credit card type. Defaults to a random selection. :param length: length of the credit card number. Defaults to the length for the selected card type. :param prefixes: allowed prefixes for the card number. Defaults to prefixes for the selected card type. :return: credit card randomly generated number (int) """ # select credit card type if type and type in CARDS: card = type else: card = random.choice(list(CARDS.keys())) # select a credit card number's prefix if not prefixes: prefixes = CARDS[card]['prefixes'] prefix = random.choice(prefixes) # select length of the credit card number, if it's not set if not length: length = CARDS[card]['length'] # generate all digits but the last one result = str(prefix) for d in range(length - len(str(prefix))): result += str(basic.number()) last_digit = check_digit(int(result)) return int(result[:-1] + str(last_digit))
def number(type=None, length=None, prefixes=None): """ Return a random credit card number. :param type: credit card type. Defaults to a random selection. :param length: length of the credit card number. Defaults to the length for the selected card type. :param prefixes: allowed prefixes for the card number. Defaults to prefixes for the selected card type. :return: credit card randomly generated number (int) """ # select credit card type if type and type in CARDS: card = type else: card = random.choice(list(CARDS.keys())) # select a credit card number's prefix if not prefixes: prefixes = CARDS[card]['prefixes'] prefix = random.choice(prefixes) # select length of the credit card number, if it's not set if not length: length = CARDS[card]['length'] # generate all digits but the last one result = str(prefix) for d in range(length - len(str(prefix))): result += str(basic.number()) last_digit = check_digit(int(result)) return int(result[:-1] + str(last_digit))
[ "Return", "a", "random", "credit", "card", "number", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/credit_card.py#L73-L107
[ "def", "number", "(", "type", "=", "None", ",", "length", "=", "None", ",", "prefixes", "=", "None", ")", ":", "# select credit card type", "if", "type", "and", "type", "in", "CARDS", ":", "card", "=", "type", "else", ":", "card", "=", "random", ".", "choice", "(", "list", "(", "CARDS", ".", "keys", "(", ")", ")", ")", "# select a credit card number's prefix", "if", "not", "prefixes", ":", "prefixes", "=", "CARDS", "[", "card", "]", "[", "'prefixes'", "]", "prefix", "=", "random", ".", "choice", "(", "prefixes", ")", "# select length of the credit card number, if it's not set", "if", "not", "length", ":", "length", "=", "CARDS", "[", "card", "]", "[", "'length'", "]", "# generate all digits but the last one", "result", "=", "str", "(", "prefix", ")", "for", "d", "in", "range", "(", "length", "-", "len", "(", "str", "(", "prefix", ")", ")", ")", ":", "result", "+=", "str", "(", "basic", ".", "number", "(", ")", ")", "last_digit", "=", "check_digit", "(", "int", "(", "result", ")", ")", "return", "int", "(", "result", "[", ":", "-", "1", "]", "+", "str", "(", "last_digit", ")", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
street_number
Return a random street number.
forgery_py/forgery/address.py
def street_number(): """Return a random street number.""" length = int(random.choice(string.digits[1:6])) return ''.join(random.sample(string.digits, length))
def street_number(): """Return a random street number.""" length = int(random.choice(string.digits[1:6])) return ''.join(random.sample(string.digits, length))
[ "Return", "a", "random", "street", "number", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/address.py#L45-L48
[ "def", "street_number", "(", ")", ":", "length", "=", "int", "(", "random", ".", "choice", "(", "string", ".", "digits", "[", "1", ":", "6", "]", ")", ")", "return", "''", ".", "join", "(", "random", ".", "sample", "(", "string", ".", "digits", ",", "length", ")", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
zip_code
Return a random ZIP code, either in `#####` or `#####-####` format.
forgery_py/forgery/address.py
def zip_code(): """Return a random ZIP code, either in `#####` or `#####-####` format.""" format = '#####' if random.random() >= 0.5: format = '#####-####' result = '' for item in format: if item == '#': result += str(random.randint(0, 9)) else: result += item return result
def zip_code(): """Return a random ZIP code, either in `#####` or `#####-####` format.""" format = '#####' if random.random() >= 0.5: format = '#####-####' result = '' for item in format: if item == '#': result += str(random.randint(0, 9)) else: result += item return result
[ "Return", "a", "random", "ZIP", "code", "either", "in", "#####", "or", "#####", "-", "####", "format", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/address.py#L90-L103
[ "def", "zip_code", "(", ")", ":", "format", "=", "'#####'", "if", "random", ".", "random", "(", ")", ">=", "0.5", ":", "format", "=", "'#####-####'", "result", "=", "''", "for", "item", "in", "format", ":", "if", "item", "==", "'#'", ":", "result", "+=", "str", "(", "random", ".", "randint", "(", "0", ",", "9", ")", ")", "else", ":", "result", "+=", "item", "return", "result" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
phone
Return a random phone number in `#-(###)###-####` format.
forgery_py/forgery/address.py
def phone(): """Return a random phone number in `#-(###)###-####` format.""" format = '#-(###)###-####' result = '' for item in format: if item == '#': result += str(random.randint(0, 9)) else: result += item return result
def phone(): """Return a random phone number in `#-(###)###-####` format.""" format = '#-(###)###-####' result = '' for item in format: if item == '#': result += str(random.randint(0, 9)) else: result += item return result
[ "Return", "a", "random", "phone", "number", "in", "#", "-", "(", "###", ")", "###", "-", "####", "format", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/address.py#L106-L117
[ "def", "phone", "(", ")", ":", "format", "=", "'#-(###)###-####'", "result", "=", "''", "for", "item", "in", "format", ":", "if", "item", "==", "'#'", ":", "result", "+=", "str", "(", "random", ".", "randint", "(", "0", ",", "9", ")", ")", "else", ":", "result", "+=", "item", "return", "result" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
job_title
Return a random job title.
forgery_py/forgery/name.py
def job_title(): """Return a random job title.""" result = random.choice(get_dictionary('job_titles')).strip() result = result.replace('#{N}', job_title_suffix()) return result
def job_title(): """Return a random job title.""" result = random.choice(get_dictionary('job_titles')).strip() result = result.replace('#{N}', job_title_suffix()) return result
[ "Return", "a", "random", "job", "title", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/name.py#L74-L78
[ "def", "job_title", "(", ")", ":", "result", "=", "random", ".", "choice", "(", "get_dictionary", "(", "'job_titles'", ")", ")", ".", "strip", "(", ")", "result", "=", "result", ".", "replace", "(", "'#{N}'", ",", "job_title_suffix", "(", ")", ")", "return", "result" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
body
Return a random email text.
forgery_py/forgery/email.py
def body(quantity=2, separator='\n\n', wrap_start='', wrap_end='', html=False, sentences_quantity=3, as_list=False): """Return a random email text.""" return lorem_ipsum.paragraphs(quantity=quantity, separator=separator, wrap_start=wrap_start, wrap_end=wrap_end, html=html, sentences_quantity=sentences_quantity, as_list=as_list)
def body(quantity=2, separator='\n\n', wrap_start='', wrap_end='', html=False, sentences_quantity=3, as_list=False): """Return a random email text.""" return lorem_ipsum.paragraphs(quantity=quantity, separator=separator, wrap_start=wrap_start, wrap_end=wrap_end, html=html, sentences_quantity=sentences_quantity, as_list=as_list)
[ "Return", "a", "random", "email", "text", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/email.py#L36-L43
[ "def", "body", "(", "quantity", "=", "2", ",", "separator", "=", "'\\n\\n'", ",", "wrap_start", "=", "''", ",", "wrap_end", "=", "''", ",", "html", "=", "False", ",", "sentences_quantity", "=", "3", ",", "as_list", "=", "False", ")", ":", "return", "lorem_ipsum", ".", "paragraphs", "(", "quantity", "=", "quantity", ",", "separator", "=", "separator", ",", "wrap_start", "=", "wrap_start", ",", "wrap_end", "=", "wrap_end", ",", "html", "=", "html", ",", "sentences_quantity", "=", "sentences_quantity", ",", "as_list", "=", "as_list", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
money
Return a str of decimal with two digits after a decimal mark.
forgery_py/forgery/monetary.py
def money(min=0, max=10): """Return a str of decimal with two digits after a decimal mark.""" value = random.choice(range(min * 100, max * 100)) return "%1.2f" % (float(value) / 100)
def money(min=0, max=10): """Return a str of decimal with two digits after a decimal mark.""" value = random.choice(range(min * 100, max * 100)) return "%1.2f" % (float(value) / 100)
[ "Return", "a", "str", "of", "decimal", "with", "two", "digits", "after", "a", "decimal", "mark", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/monetary.py#L33-L36
[ "def", "money", "(", "min", "=", "0", ",", "max", "=", "10", ")", ":", "value", "=", "random", ".", "choice", "(", "range", "(", "min", "*", "100", ",", "max", "*", "100", ")", ")", "return", "\"%1.2f\"", "%", "(", "float", "(", "value", ")", "/", "100", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
words
Return random words.
forgery_py/forgery/lorem_ipsum.py
def words(quantity=10, as_list=False): """Return random words.""" global _words if not _words: _words = ' '.join(get_dictionary('lorem_ipsum')).lower().\ replace('\n', '') _words = re.sub(r'\.|,|;/', '', _words) _words = _words.split(' ') result = random.sample(_words, quantity) if as_list: return result else: return ' '.join(result)
def words(quantity=10, as_list=False): """Return random words.""" global _words if not _words: _words = ' '.join(get_dictionary('lorem_ipsum')).lower().\ replace('\n', '') _words = re.sub(r'\.|,|;/', '', _words) _words = _words.split(' ') result = random.sample(_words, quantity) if as_list: return result else: return ' '.join(result)
[ "Return", "random", "words", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/lorem_ipsum.py#L47-L62
[ "def", "words", "(", "quantity", "=", "10", ",", "as_list", "=", "False", ")", ":", "global", "_words", "if", "not", "_words", ":", "_words", "=", "' '", ".", "join", "(", "get_dictionary", "(", "'lorem_ipsum'", ")", ")", ".", "lower", "(", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", "_words", "=", "re", ".", "sub", "(", "r'\\.|,|;/'", ",", "''", ",", "_words", ")", "_words", "=", "_words", ".", "split", "(", "' '", ")", "result", "=", "random", ".", "sample", "(", "_words", ",", "quantity", ")", "if", "as_list", ":", "return", "result", "else", ":", "return", "' '", ".", "join", "(", "result", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450
valid
title
Return a random sentence to be used as e.g. an e-mail subject.
forgery_py/forgery/lorem_ipsum.py
def title(words_quantity=4): """Return a random sentence to be used as e.g. an e-mail subject.""" result = words(quantity=words_quantity) result += random.choice('?.!') return result.capitalize()
def title(words_quantity=4): """Return a random sentence to be used as e.g. an e-mail subject.""" result = words(quantity=words_quantity) result += random.choice('?.!') return result.capitalize()
[ "Return", "a", "random", "sentence", "to", "be", "used", "as", "e", ".", "g", ".", "an", "e", "-", "mail", "subject", "." ]
pilosus/ForgeryPy3
python
https://github.com/pilosus/ForgeryPy3/blob/e15f2e59538deb4cbfceaac314f5ea897f2d5450/forgery_py/forgery/lorem_ipsum.py#L65-L69
[ "def", "title", "(", "words_quantity", "=", "4", ")", ":", "result", "=", "words", "(", "quantity", "=", "words_quantity", ")", "result", "+=", "random", ".", "choice", "(", "'?.!'", ")", "return", "result", ".", "capitalize", "(", ")" ]
e15f2e59538deb4cbfceaac314f5ea897f2d5450