partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
MochadDriver._x10_command
Real implementation
x10_any/__init__.py
def _x10_command(self, house_code, unit_number, state): """Real implementation""" # log = log or default_logger log = default_logger if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'): raise NotImplementedError('xdim/dim/bright %r' % ((house_code, unit_num, state), )) if unit_number is not None: house_and_unit = '%s%d' % (house_code, unit_number) else: raise NotImplementedError('mochad all ON/OFF %r' % ((house_code, unit_number, state), )) house_and_unit = house_code house_and_unit = to_bytes(house_and_unit) # TODO normalize/validate state state = to_bytes(state) mochad_cmd = self.default_type + b' ' + house_and_unit + b' ' + state + b'\n' # byte concat works with older Python 3.4 log.debug('mochad send: %r', mochad_cmd) mochad_host, mochad_port = self.device_address result = netcat(mochad_host, mochad_port, mochad_cmd) log.debug('mochad received: %r', result)
def _x10_command(self, house_code, unit_number, state): """Real implementation""" # log = log or default_logger log = default_logger if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'): raise NotImplementedError('xdim/dim/bright %r' % ((house_code, unit_num, state), )) if unit_number is not None: house_and_unit = '%s%d' % (house_code, unit_number) else: raise NotImplementedError('mochad all ON/OFF %r' % ((house_code, unit_number, state), )) house_and_unit = house_code house_and_unit = to_bytes(house_and_unit) # TODO normalize/validate state state = to_bytes(state) mochad_cmd = self.default_type + b' ' + house_and_unit + b' ' + state + b'\n' # byte concat works with older Python 3.4 log.debug('mochad send: %r', mochad_cmd) mochad_host, mochad_port = self.device_address result = netcat(mochad_host, mochad_port, mochad_cmd) log.debug('mochad received: %r', result)
[ "Real", "implementation" ]
clach04/x10_any
python
https://github.com/clach04/x10_any/blob/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc/x10_any/__init__.py#L241-L262
[ "def", "_x10_command", "(", "self", ",", "house_code", ",", "unit_number", ",", "state", ")", ":", "# log = log or default_logger", "log", "=", "default_logger", "if", "state", ".", "startswith", "(", "'xdim'", ")", "or", "state", ".", "startswith", "(", "'dim'", ")", "or", "state", ".", "startswith", "(", "'bright'", ")", ":", "raise", "NotImplementedError", "(", "'xdim/dim/bright %r'", "%", "(", "(", "house_code", ",", "unit_num", ",", "state", ")", ",", ")", ")", "if", "unit_number", "is", "not", "None", ":", "house_and_unit", "=", "'%s%d'", "%", "(", "house_code", ",", "unit_number", ")", "else", ":", "raise", "NotImplementedError", "(", "'mochad all ON/OFF %r'", "%", "(", "(", "house_code", ",", "unit_number", ",", "state", ")", ",", ")", ")", "house_and_unit", "=", "house_code", "house_and_unit", "=", "to_bytes", "(", "house_and_unit", ")", "# TODO normalize/validate state", "state", "=", "to_bytes", "(", "state", ")", "mochad_cmd", "=", "self", ".", "default_type", "+", "b' '", "+", "house_and_unit", "+", "b' '", "+", "state", "+", "b'\\n'", "# byte concat works with older Python 3.4", "log", ".", "debug", "(", "'mochad send: %r'", ",", "mochad_cmd", ")", "mochad_host", ",", "mochad_port", "=", "self", ".", "device_address", "result", "=", "netcat", "(", "mochad_host", ",", "mochad_port", ",", "mochad_cmd", ")", "log", ".", "debug", "(", "'mochad received: %r'", ",", "result", ")" ]
5b90a543b127ab9e6112fd547929b5ef4b8f0cbc
valid
FirecrackerDriver._x10_command
Real implementation
x10_any/__init__.py
def _x10_command(self, house_code, unit_number, state): """Real implementation""" # log = log or default_logger log = default_logger # FIXME move these functions? def scale_255_to_8(x): """Scale x from 0..255 to 0..7 0 is considered OFF 8 is considered fully on """ factor = x / 255.0 return 8 - int(abs(round(8 * factor))) def scale_31_to_8(x): """Scale x from 0..31 to 0..7 0 is considered OFF 8 is considered fully on """ factor = x / 31.0 return 8 - int(abs(round(8 * factor))) serial_port_name = self.device_address house_code = normalize_housecode(house_code) if unit_number is not None: unit_number = normalize_unitnumber(unit_number) else: # command is intended for the entire house code, not a single unit number if firecracker: log.error('using python-x10-firecracker-interface NO support for all ON/OFF') # TODO normalize/validate state, sort of implemented below if firecracker: log.debug('firecracker send: %r', (serial_port_name, house_code, unit_number, state)) firecracker.send_command(serial_port_name, house_code, unit_number, state) else: if unit_number is not None: if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'): dim_count = int(state.split()[-1]) if state.startswith('xdim'): dim_count = scale_255_to_8(dim_count) else: # assumed dim or bright dim_count = scale_31_to_8(dim_count) dim_str = ', %s dim' % (house_code, ) dim_list = [] for _ in range(dim_count): dim_list.append(dim_str) dim_str = ''.join(dim_list) if dim_count == 0: # No dim x10_command_str = '%s%s %s' % (house_code, unit_number, 'on') else: # If lamp is already dimmed, need to turn it off and then back on x10_command_str = '%s%s %s, %s%s %s%s' % (house_code, unit_number, 'off', house_code, unit_number, 'on', dim_str) else: x10_command_str = '%s%s %s' % (house_code, unit_number, state) else: # Assume a command for house not a specific unit state = x10_mapping[state] x10_command_str = '%s %s' % (house_code, state) log.debug('x10_command_str send: %r', x10_command_str) x10.sendCommands(serial_port_name, x10_command_str)
def _x10_command(self, house_code, unit_number, state): """Real implementation""" # log = log or default_logger log = default_logger # FIXME move these functions? def scale_255_to_8(x): """Scale x from 0..255 to 0..7 0 is considered OFF 8 is considered fully on """ factor = x / 255.0 return 8 - int(abs(round(8 * factor))) def scale_31_to_8(x): """Scale x from 0..31 to 0..7 0 is considered OFF 8 is considered fully on """ factor = x / 31.0 return 8 - int(abs(round(8 * factor))) serial_port_name = self.device_address house_code = normalize_housecode(house_code) if unit_number is not None: unit_number = normalize_unitnumber(unit_number) else: # command is intended for the entire house code, not a single unit number if firecracker: log.error('using python-x10-firecracker-interface NO support for all ON/OFF') # TODO normalize/validate state, sort of implemented below if firecracker: log.debug('firecracker send: %r', (serial_port_name, house_code, unit_number, state)) firecracker.send_command(serial_port_name, house_code, unit_number, state) else: if unit_number is not None: if state.startswith('xdim') or state.startswith('dim') or state.startswith('bright'): dim_count = int(state.split()[-1]) if state.startswith('xdim'): dim_count = scale_255_to_8(dim_count) else: # assumed dim or bright dim_count = scale_31_to_8(dim_count) dim_str = ', %s dim' % (house_code, ) dim_list = [] for _ in range(dim_count): dim_list.append(dim_str) dim_str = ''.join(dim_list) if dim_count == 0: # No dim x10_command_str = '%s%s %s' % (house_code, unit_number, 'on') else: # If lamp is already dimmed, need to turn it off and then back on x10_command_str = '%s%s %s, %s%s %s%s' % (house_code, unit_number, 'off', house_code, unit_number, 'on', dim_str) else: x10_command_str = '%s%s %s' % (house_code, unit_number, state) else: # Assume a command for house not a specific unit state = x10_mapping[state] x10_command_str = '%s %s' % (house_code, state) log.debug('x10_command_str send: %r', x10_command_str) x10.sendCommands(serial_port_name, x10_command_str)
[ "Real", "implementation" ]
clach04/x10_any
python
https://github.com/clach04/x10_any/blob/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc/x10_any/__init__.py#L290-L354
[ "def", "_x10_command", "(", "self", ",", "house_code", ",", "unit_number", ",", "state", ")", ":", "# log = log or default_logger", "log", "=", "default_logger", "# FIXME move these functions?", "def", "scale_255_to_8", "(", "x", ")", ":", "\"\"\"Scale x from 0..255 to 0..7\n 0 is considered OFF\n 8 is considered fully on\n \"\"\"", "factor", "=", "x", "/", "255.0", "return", "8", "-", "int", "(", "abs", "(", "round", "(", "8", "*", "factor", ")", ")", ")", "def", "scale_31_to_8", "(", "x", ")", ":", "\"\"\"Scale x from 0..31 to 0..7\n 0 is considered OFF\n 8 is considered fully on\n \"\"\"", "factor", "=", "x", "/", "31.0", "return", "8", "-", "int", "(", "abs", "(", "round", "(", "8", "*", "factor", ")", ")", ")", "serial_port_name", "=", "self", ".", "device_address", "house_code", "=", "normalize_housecode", "(", "house_code", ")", "if", "unit_number", "is", "not", "None", ":", "unit_number", "=", "normalize_unitnumber", "(", "unit_number", ")", "else", ":", "# command is intended for the entire house code, not a single unit number", "if", "firecracker", ":", "log", ".", "error", "(", "'using python-x10-firecracker-interface NO support for all ON/OFF'", ")", "# TODO normalize/validate state, sort of implemented below", "if", "firecracker", ":", "log", ".", "debug", "(", "'firecracker send: %r'", ",", "(", "serial_port_name", ",", "house_code", ",", "unit_number", ",", "state", ")", ")", "firecracker", ".", "send_command", "(", "serial_port_name", ",", "house_code", ",", "unit_number", ",", "state", ")", "else", ":", "if", "unit_number", "is", "not", "None", ":", "if", "state", ".", "startswith", "(", "'xdim'", ")", "or", "state", ".", "startswith", "(", "'dim'", ")", "or", "state", ".", "startswith", "(", "'bright'", ")", ":", "dim_count", "=", "int", "(", "state", ".", "split", "(", ")", "[", "-", "1", "]", ")", "if", "state", ".", "startswith", "(", "'xdim'", ")", ":", "dim_count", "=", "scale_255_to_8", "(", "dim_count", ")", "else", ":", "# assumed dim or bright", "dim_count", "=", "scale_31_to_8", "(", "dim_count", ")", "dim_str", "=", "', %s dim'", "%", "(", "house_code", ",", ")", "dim_list", "=", "[", "]", "for", "_", "in", "range", "(", "dim_count", ")", ":", "dim_list", ".", "append", "(", "dim_str", ")", "dim_str", "=", "''", ".", "join", "(", "dim_list", ")", "if", "dim_count", "==", "0", ":", "# No dim", "x10_command_str", "=", "'%s%s %s'", "%", "(", "house_code", ",", "unit_number", ",", "'on'", ")", "else", ":", "# If lamp is already dimmed, need to turn it off and then back on", "x10_command_str", "=", "'%s%s %s, %s%s %s%s'", "%", "(", "house_code", ",", "unit_number", ",", "'off'", ",", "house_code", ",", "unit_number", ",", "'on'", ",", "dim_str", ")", "else", ":", "x10_command_str", "=", "'%s%s %s'", "%", "(", "house_code", ",", "unit_number", ",", "state", ")", "else", ":", "# Assume a command for house not a specific unit", "state", "=", "x10_mapping", "[", "state", "]", "x10_command_str", "=", "'%s %s'", "%", "(", "house_code", ",", "state", ")", "log", ".", "debug", "(", "'x10_command_str send: %r'", ",", "x10_command_str", ")", "x10", ".", "sendCommands", "(", "serial_port_name", ",", "x10_command_str", ")" ]
5b90a543b127ab9e6112fd547929b5ef4b8f0cbc
valid
get_parser
Generate an appropriate parser. :returns: an argument parser :rtype: `ArgumentParser`
check.py
def get_parser(): """ Generate an appropriate parser. :returns: an argument parser :rtype: `ArgumentParser` """ parser = argparse.ArgumentParser() parser.add_argument( "package", choices=arg_map.keys(), help="designates the package to test") parser.add_argument("--ignore", help="ignore these files") return parser
def get_parser(): """ Generate an appropriate parser. :returns: an argument parser :rtype: `ArgumentParser` """ parser = argparse.ArgumentParser() parser.add_argument( "package", choices=arg_map.keys(), help="designates the package to test") parser.add_argument("--ignore", help="ignore these files") return parser
[ "Generate", "an", "appropriate", "parser", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/check.py#L19-L32
[ "def", "get_parser", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "\"package\"", ",", "choices", "=", "arg_map", ".", "keys", "(", ")", ",", "help", "=", "\"designates the package to test\"", ")", "parser", ".", "add_argument", "(", "\"--ignore\"", ",", "help", "=", "\"ignore these files\"", ")", "return", "parser" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
get_command
Get the pylint command for these arguments. :param `Namespace` namespace: the namespace
check.py
def get_command(namespace): """ Get the pylint command for these arguments. :param `Namespace` namespace: the namespace """ cmd = ["pylint", namespace.package] + arg_map[namespace.package] if namespace.ignore: cmd.append("--ignore=%s" % namespace.ignore) return cmd
def get_command(namespace): """ Get the pylint command for these arguments. :param `Namespace` namespace: the namespace """ cmd = ["pylint", namespace.package] + arg_map[namespace.package] if namespace.ignore: cmd.append("--ignore=%s" % namespace.ignore) return cmd
[ "Get", "the", "pylint", "command", "for", "these", "arguments", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/check.py#L35-L44
[ "def", "get_command", "(", "namespace", ")", ":", "cmd", "=", "[", "\"pylint\"", ",", "namespace", ".", "package", "]", "+", "arg_map", "[", "namespace", ".", "package", "]", "if", "namespace", ".", "ignore", ":", "cmd", ".", "append", "(", "\"--ignore=%s\"", "%", "namespace", ".", "ignore", ")", "return", "cmd" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_wrapper
Wraps a generated function so that it catches all Type- and ValueErrors and raises IntoDPValueErrors. :param func: the transforming function
src/into_dbus_python/_xformer.py
def _wrapper(func): """ Wraps a generated function so that it catches all Type- and ValueErrors and raises IntoDPValueErrors. :param func: the transforming function """ @functools.wraps(func) def the_func(expr): """ The actual function. :param object expr: the expression to be xformed to dbus-python types """ try: return func(expr) except (TypeError, ValueError) as err: raise IntoDPValueError(expr, "expr", "could not be transformed") \ from err return the_func
def _wrapper(func): """ Wraps a generated function so that it catches all Type- and ValueErrors and raises IntoDPValueErrors. :param func: the transforming function """ @functools.wraps(func) def the_func(expr): """ The actual function. :param object expr: the expression to be xformed to dbus-python types """ try: return func(expr) except (TypeError, ValueError) as err: raise IntoDPValueError(expr, "expr", "could not be transformed") \ from err return the_func
[ "Wraps", "a", "generated", "function", "so", "that", "it", "catches", "all", "Type", "-", "and", "ValueErrors", "and", "raises", "IntoDPValueErrors", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L27-L48
[ "def", "_wrapper", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "the_func", "(", "expr", ")", ":", "\"\"\"\n The actual function.\n\n :param object expr: the expression to be xformed to dbus-python types\n \"\"\"", "try", ":", "return", "func", "(", "expr", ")", "except", "(", "TypeError", ",", "ValueError", ")", "as", "err", ":", "raise", "IntoDPValueError", "(", "expr", ",", "\"expr\"", ",", "\"could not be transformed\"", ")", "from", "err", "return", "the_func" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
xformers
Get the list of xformer functions for the given signature. :param str sig: a signature :returns: a list of xformer functions for the given signature. :rtype: list of tuple of a function * str Each function catches all TypeErrors it encounters and raises corresponding IntoDPValueError exceptions.
src/into_dbus_python/_xformer.py
def xformers(sig): """ Get the list of xformer functions for the given signature. :param str sig: a signature :returns: a list of xformer functions for the given signature. :rtype: list of tuple of a function * str Each function catches all TypeErrors it encounters and raises corresponding IntoDPValueError exceptions. """ return \ [(_wrapper(f), l) for (f, l) in \ _XFORMER.PARSER.parseString(sig, parseAll=True)]
def xformers(sig): """ Get the list of xformer functions for the given signature. :param str sig: a signature :returns: a list of xformer functions for the given signature. :rtype: list of tuple of a function * str Each function catches all TypeErrors it encounters and raises corresponding IntoDPValueError exceptions. """ return \ [(_wrapper(f), l) for (f, l) in \ _XFORMER.PARSER.parseString(sig, parseAll=True)]
[ "Get", "the", "list", "of", "xformer", "functions", "for", "the", "given", "signature", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L283-L296
[ "def", "xformers", "(", "sig", ")", ":", "return", "[", "(", "_wrapper", "(", "f", ")", ",", "l", ")", "for", "(", "f", ",", "l", ")", "in", "_XFORMER", ".", "PARSER", ".", "parseString", "(", "sig", ",", "parseAll", "=", "True", ")", "]" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
xformer
Returns a transformer function for the given signature. :param str signature: a dbus signature :returns: a function to transform a list of objects to inhabit the signature :rtype: (list of object) -> (list of object)
src/into_dbus_python/_xformer.py
def xformer(signature): """ Returns a transformer function for the given signature. :param str signature: a dbus signature :returns: a function to transform a list of objects to inhabit the signature :rtype: (list of object) -> (list of object) """ funcs = [f for (f, _) in xformers(signature)] def the_func(objects): """ Returns the a list of objects, transformed. :param objects: a list of objects :type objects: list of object :returns: transformed objects :rtype: list of object (in dbus types) """ if len(objects) != len(funcs): raise IntoDPValueError( objects, "objects", "must have exactly %u items, has %u" % \ (len(funcs), len(objects)) ) return [x for (x, _) in (f(a) for (f, a) in zip(funcs, objects))] return the_func
def xformer(signature): """ Returns a transformer function for the given signature. :param str signature: a dbus signature :returns: a function to transform a list of objects to inhabit the signature :rtype: (list of object) -> (list of object) """ funcs = [f for (f, _) in xformers(signature)] def the_func(objects): """ Returns the a list of objects, transformed. :param objects: a list of objects :type objects: list of object :returns: transformed objects :rtype: list of object (in dbus types) """ if len(objects) != len(funcs): raise IntoDPValueError( objects, "objects", "must have exactly %u items, has %u" % \ (len(funcs), len(objects)) ) return [x for (x, _) in (f(a) for (f, a) in zip(funcs, objects))] return the_func
[ "Returns", "a", "transformer", "function", "for", "the", "given", "signature", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L299-L329
[ "def", "xformer", "(", "signature", ")", ":", "funcs", "=", "[", "f", "for", "(", "f", ",", "_", ")", "in", "xformers", "(", "signature", ")", "]", "def", "the_func", "(", "objects", ")", ":", "\"\"\"\n Returns the a list of objects, transformed.\n\n :param objects: a list of objects\n :type objects: list of object\n\n :returns: transformed objects\n :rtype: list of object (in dbus types)\n \"\"\"", "if", "len", "(", "objects", ")", "!=", "len", "(", "funcs", ")", ":", "raise", "IntoDPValueError", "(", "objects", ",", "\"objects\"", ",", "\"must have exactly %u items, has %u\"", "%", "(", "len", "(", "funcs", ")", ",", "len", "(", "objects", ")", ")", ")", "return", "[", "x", "for", "(", "x", ",", "_", ")", "in", "(", "f", "(", "a", ")", "for", "(", "f", ",", "a", ")", "in", "zip", "(", "funcs", ",", "objects", ")", ")", "]", "return", "the_func" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_ToDbusXformer._variant_levels
Gets the level for the variant. :param int level: the current variant level :param int variant: the value for this level if variant :returns: a level for the object and one for the function :rtype: int * int
src/into_dbus_python/_xformer.py
def _variant_levels(level, variant): """ Gets the level for the variant. :param int level: the current variant level :param int variant: the value for this level if variant :returns: a level for the object and one for the function :rtype: int * int """ return (level + variant, level + variant) \ if variant != 0 else (variant, level)
def _variant_levels(level, variant): """ Gets the level for the variant. :param int level: the current variant level :param int variant: the value for this level if variant :returns: a level for the object and one for the function :rtype: int * int """ return (level + variant, level + variant) \ if variant != 0 else (variant, level)
[ "Gets", "the", "level", "for", "the", "variant", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L65-L76
[ "def", "_variant_levels", "(", "level", ",", "variant", ")", ":", "return", "(", "level", "+", "variant", ",", "level", "+", "variant", ")", "if", "variant", "!=", "0", "else", "(", "variant", ",", "level", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_ToDbusXformer._handle_variant
Generate the correct function for a variant signature. :returns: function that returns an appropriate value :rtype: ((str * object) or list)-> object
src/into_dbus_python/_xformer.py
def _handle_variant(self): """ Generate the correct function for a variant signature. :returns: function that returns an appropriate value :rtype: ((str * object) or list)-> object """ def the_func(a_tuple, variant=0): """ Function for generating a variant value from a tuple. :param a_tuple: the parts of the variant :type a_tuple: (str * object) or list :param int variant: object's variant index :returns: a value of the correct type with correct variant level :rtype: object * int """ # pylint: disable=unused-argument (signature, an_obj) = a_tuple (func, sig) = self.COMPLETE.parseString(signature)[0] assert sig == signature (xformed, _) = func(an_obj, variant=variant + 1) return (xformed, xformed.variant_level) return (the_func, 'v')
def _handle_variant(self): """ Generate the correct function for a variant signature. :returns: function that returns an appropriate value :rtype: ((str * object) or list)-> object """ def the_func(a_tuple, variant=0): """ Function for generating a variant value from a tuple. :param a_tuple: the parts of the variant :type a_tuple: (str * object) or list :param int variant: object's variant index :returns: a value of the correct type with correct variant level :rtype: object * int """ # pylint: disable=unused-argument (signature, an_obj) = a_tuple (func, sig) = self.COMPLETE.parseString(signature)[0] assert sig == signature (xformed, _) = func(an_obj, variant=variant + 1) return (xformed, xformed.variant_level) return (the_func, 'v')
[ "Generate", "the", "correct", "function", "for", "a", "variant", "signature", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L78-L103
[ "def", "_handle_variant", "(", "self", ")", ":", "def", "the_func", "(", "a_tuple", ",", "variant", "=", "0", ")", ":", "\"\"\"\n Function for generating a variant value from a tuple.\n\n :param a_tuple: the parts of the variant\n :type a_tuple: (str * object) or list\n :param int variant: object's variant index\n :returns: a value of the correct type with correct variant level\n :rtype: object * int\n \"\"\"", "# pylint: disable=unused-argument", "(", "signature", ",", "an_obj", ")", "=", "a_tuple", "(", "func", ",", "sig", ")", "=", "self", ".", "COMPLETE", ".", "parseString", "(", "signature", ")", "[", "0", "]", "assert", "sig", "==", "signature", "(", "xformed", ",", "_", ")", "=", "func", "(", "an_obj", ",", "variant", "=", "variant", "+", "1", ")", "return", "(", "xformed", ",", "xformed", ".", "variant_level", ")", "return", "(", "the_func", ",", "'v'", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_ToDbusXformer._handle_array
Generate the correct function for an array signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((or list dict) -> ((or Array Dictionary) * int)) * str
src/into_dbus_python/_xformer.py
def _handle_array(toks): """ Generate the correct function for an array signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((or list dict) -> ((or Array Dictionary) * int)) * str """ if len(toks) == 5 and toks[1] == '{' and toks[4] == '}': subtree = toks[2:4] signature = ''.join(s for (_, s) in subtree) [key_func, value_func] = [f for (f, _) in subtree] def the_dict_func(a_dict, variant=0): """ Function for generating a Dictionary from a dict. :param a_dict: the dictionary to transform :type a_dict: dict of (`a * `b) :param int variant: variant level :returns: a dbus dictionary of transformed values and level :rtype: Dictionary * int """ elements = \ [(key_func(x), value_func(y)) for (x, y) in a_dict.items()] level = 0 if elements == [] \ else max(max(x, y) for ((_, x), (_, y)) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Dictionary( ((x, y) for ((x, _), (y, _)) in elements), signature=signature, variant_level=obj_level), func_level) return (the_dict_func, 'a{' + signature + '}') if len(toks) == 2: (func, sig) = toks[1] def the_array_func(a_list, variant=0): """ Function for generating an Array from a list. :param a_list: the list to transform :type a_list: list of `a :param int variant: variant level of the value :returns: a dbus Array of transformed values and variant level :rtype: Array * int """ if isinstance(a_list, dict): raise IntoDPValueError(a_list, "a_list", "is a dict, must be an array") elements = [func(x) for x in a_list] level = 0 if elements == [] else max(x for (_, x) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Array( (x for (x, _) in elements), signature=sig, variant_level=obj_level), func_level) return (the_array_func, 'a' + sig) raise IntoDPValueError(toks, "toks", "unexpected tokens")
def _handle_array(toks): """ Generate the correct function for an array signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((or list dict) -> ((or Array Dictionary) * int)) * str """ if len(toks) == 5 and toks[1] == '{' and toks[4] == '}': subtree = toks[2:4] signature = ''.join(s for (_, s) in subtree) [key_func, value_func] = [f for (f, _) in subtree] def the_dict_func(a_dict, variant=0): """ Function for generating a Dictionary from a dict. :param a_dict: the dictionary to transform :type a_dict: dict of (`a * `b) :param int variant: variant level :returns: a dbus dictionary of transformed values and level :rtype: Dictionary * int """ elements = \ [(key_func(x), value_func(y)) for (x, y) in a_dict.items()] level = 0 if elements == [] \ else max(max(x, y) for ((_, x), (_, y)) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Dictionary( ((x, y) for ((x, _), (y, _)) in elements), signature=signature, variant_level=obj_level), func_level) return (the_dict_func, 'a{' + signature + '}') if len(toks) == 2: (func, sig) = toks[1] def the_array_func(a_list, variant=0): """ Function for generating an Array from a list. :param a_list: the list to transform :type a_list: list of `a :param int variant: variant level of the value :returns: a dbus Array of transformed values and variant level :rtype: Array * int """ if isinstance(a_list, dict): raise IntoDPValueError(a_list, "a_list", "is a dict, must be an array") elements = [func(x) for x in a_list] level = 0 if elements == [] else max(x for (_, x) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Array( (x for (x, _) in elements), signature=sig, variant_level=obj_level), func_level) return (the_array_func, 'a' + sig) raise IntoDPValueError(toks, "toks", "unexpected tokens")
[ "Generate", "the", "correct", "function", "for", "an", "array", "signature", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L106-L173
[ "def", "_handle_array", "(", "toks", ")", ":", "if", "len", "(", "toks", ")", "==", "5", "and", "toks", "[", "1", "]", "==", "'{'", "and", "toks", "[", "4", "]", "==", "'}'", ":", "subtree", "=", "toks", "[", "2", ":", "4", "]", "signature", "=", "''", ".", "join", "(", "s", "for", "(", "_", ",", "s", ")", "in", "subtree", ")", "[", "key_func", ",", "value_func", "]", "=", "[", "f", "for", "(", "f", ",", "_", ")", "in", "subtree", "]", "def", "the_dict_func", "(", "a_dict", ",", "variant", "=", "0", ")", ":", "\"\"\"\n Function for generating a Dictionary from a dict.\n\n :param a_dict: the dictionary to transform\n :type a_dict: dict of (`a * `b)\n :param int variant: variant level\n\n :returns: a dbus dictionary of transformed values and level\n :rtype: Dictionary * int\n \"\"\"", "elements", "=", "[", "(", "key_func", "(", "x", ")", ",", "value_func", "(", "y", ")", ")", "for", "(", "x", ",", "y", ")", "in", "a_dict", ".", "items", "(", ")", "]", "level", "=", "0", "if", "elements", "==", "[", "]", "else", "max", "(", "max", "(", "x", ",", "y", ")", "for", "(", "(", "_", ",", "x", ")", ",", "(", "_", ",", "y", ")", ")", "in", "elements", ")", "(", "obj_level", ",", "func_level", ")", "=", "_ToDbusXformer", ".", "_variant_levels", "(", "level", ",", "variant", ")", "return", "(", "dbus", ".", "types", ".", "Dictionary", "(", "(", "(", "x", ",", "y", ")", "for", "(", "(", "x", ",", "_", ")", ",", "(", "y", ",", "_", ")", ")", "in", "elements", ")", ",", "signature", "=", "signature", ",", "variant_level", "=", "obj_level", ")", ",", "func_level", ")", "return", "(", "the_dict_func", ",", "'a{'", "+", "signature", "+", "'}'", ")", "if", "len", "(", "toks", ")", "==", "2", ":", "(", "func", ",", "sig", ")", "=", "toks", "[", "1", "]", "def", "the_array_func", "(", "a_list", ",", "variant", "=", "0", ")", ":", "\"\"\"\n Function for generating an Array from a list.\n\n :param a_list: the list to transform\n :type a_list: list of `a\n :param int variant: variant level of the value\n :returns: a dbus Array of transformed values and variant level\n :rtype: Array * int\n \"\"\"", "if", "isinstance", "(", "a_list", ",", "dict", ")", ":", "raise", "IntoDPValueError", "(", "a_list", ",", "\"a_list\"", ",", "\"is a dict, must be an array\"", ")", "elements", "=", "[", "func", "(", "x", ")", "for", "x", "in", "a_list", "]", "level", "=", "0", "if", "elements", "==", "[", "]", "else", "max", "(", "x", "for", "(", "_", ",", "x", ")", "in", "elements", ")", "(", "obj_level", ",", "func_level", ")", "=", "_ToDbusXformer", ".", "_variant_levels", "(", "level", ",", "variant", ")", "return", "(", "dbus", ".", "types", ".", "Array", "(", "(", "x", "for", "(", "x", ",", "_", ")", "in", "elements", ")", ",", "signature", "=", "sig", ",", "variant_level", "=", "obj_level", ")", ",", "func_level", ")", "return", "(", "the_array_func", ",", "'a'", "+", "sig", ")", "raise", "IntoDPValueError", "(", "toks", ",", "\"toks\"", ",", "\"unexpected tokens\"", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_ToDbusXformer._handle_struct
Generate the correct function for a struct signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((list or tuple) -> (Struct * int)) * str
src/into_dbus_python/_xformer.py
def _handle_struct(toks): """ Generate the correct function for a struct signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((list or tuple) -> (Struct * int)) * str """ subtrees = toks[1:-1] signature = ''.join(s for (_, s) in subtrees) funcs = [f for (f, _) in subtrees] def the_func(a_list, variant=0): """ Function for generating a Struct from a list. :param a_list: the list to transform :type a_list: list or tuple :param int variant: variant index :returns: a dbus Struct of transformed values and variant level :rtype: Struct * int :raises IntoDPValueError: """ if isinstance(a_list, dict): raise IntoDPValueError(a_list, "a_list", "must be a simple sequence, is a dict") if len(a_list) != len(funcs): raise IntoDPValueError( a_list, "a_list", "must have exactly %u items, has %u" % \ (len(funcs), len(a_list)) ) elements = [f(x) for (f, x) in zip(funcs, a_list)] level = 0 if elements == [] else max(x for (_, x) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Struct( (x for (x, _) in elements), signature=signature, variant_level=obj_level), func_level) return (the_func, '(' + signature + ')')
def _handle_struct(toks): """ Generate the correct function for a struct signature. :param toks: the list of parsed tokens :returns: function that returns an Array or Dictionary value :rtype: ((list or tuple) -> (Struct * int)) * str """ subtrees = toks[1:-1] signature = ''.join(s for (_, s) in subtrees) funcs = [f for (f, _) in subtrees] def the_func(a_list, variant=0): """ Function for generating a Struct from a list. :param a_list: the list to transform :type a_list: list or tuple :param int variant: variant index :returns: a dbus Struct of transformed values and variant level :rtype: Struct * int :raises IntoDPValueError: """ if isinstance(a_list, dict): raise IntoDPValueError(a_list, "a_list", "must be a simple sequence, is a dict") if len(a_list) != len(funcs): raise IntoDPValueError( a_list, "a_list", "must have exactly %u items, has %u" % \ (len(funcs), len(a_list)) ) elements = [f(x) for (f, x) in zip(funcs, a_list)] level = 0 if elements == [] else max(x for (_, x) in elements) (obj_level, func_level) = \ _ToDbusXformer._variant_levels(level, variant) return (dbus.types.Struct( (x for (x, _) in elements), signature=signature, variant_level=obj_level), func_level) return (the_func, '(' + signature + ')')
[ "Generate", "the", "correct", "function", "for", "a", "struct", "signature", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L176-L218
[ "def", "_handle_struct", "(", "toks", ")", ":", "subtrees", "=", "toks", "[", "1", ":", "-", "1", "]", "signature", "=", "''", ".", "join", "(", "s", "for", "(", "_", ",", "s", ")", "in", "subtrees", ")", "funcs", "=", "[", "f", "for", "(", "f", ",", "_", ")", "in", "subtrees", "]", "def", "the_func", "(", "a_list", ",", "variant", "=", "0", ")", ":", "\"\"\"\n Function for generating a Struct from a list.\n\n :param a_list: the list to transform\n :type a_list: list or tuple\n :param int variant: variant index\n :returns: a dbus Struct of transformed values and variant level\n :rtype: Struct * int\n :raises IntoDPValueError:\n \"\"\"", "if", "isinstance", "(", "a_list", ",", "dict", ")", ":", "raise", "IntoDPValueError", "(", "a_list", ",", "\"a_list\"", ",", "\"must be a simple sequence, is a dict\"", ")", "if", "len", "(", "a_list", ")", "!=", "len", "(", "funcs", ")", ":", "raise", "IntoDPValueError", "(", "a_list", ",", "\"a_list\"", ",", "\"must have exactly %u items, has %u\"", "%", "(", "len", "(", "funcs", ")", ",", "len", "(", "a_list", ")", ")", ")", "elements", "=", "[", "f", "(", "x", ")", "for", "(", "f", ",", "x", ")", "in", "zip", "(", "funcs", ",", "a_list", ")", "]", "level", "=", "0", "if", "elements", "==", "[", "]", "else", "max", "(", "x", "for", "(", "_", ",", "x", ")", "in", "elements", ")", "(", "obj_level", ",", "func_level", ")", "=", "_ToDbusXformer", ".", "_variant_levels", "(", "level", ",", "variant", ")", "return", "(", "dbus", ".", "types", ".", "Struct", "(", "(", "x", "for", "(", "x", ",", "_", ")", "in", "elements", ")", ",", "signature", "=", "signature", ",", "variant_level", "=", "obj_level", ")", ",", "func_level", ")", "return", "(", "the_func", ",", "'('", "+", "signature", "+", "')'", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
_ToDbusXformer._handle_base_case
Handle a base case. :param type klass: the class constructor :param str symbol: the type code
src/into_dbus_python/_xformer.py
def _handle_base_case(klass, symbol): """ Handle a base case. :param type klass: the class constructor :param str symbol: the type code """ def the_func(value, variant=0): """ Base case. :param int variant: variant level for this object :returns: a tuple of a dbus object and the variant level :rtype: dbus object * int """ (obj_level, func_level) = _ToDbusXformer._variant_levels( 0, variant) return (klass(value, variant_level=obj_level), func_level) return lambda: (the_func, symbol)
def _handle_base_case(klass, symbol): """ Handle a base case. :param type klass: the class constructor :param str symbol: the type code """ def the_func(value, variant=0): """ Base case. :param int variant: variant level for this object :returns: a tuple of a dbus object and the variant level :rtype: dbus object * int """ (obj_level, func_level) = _ToDbusXformer._variant_levels( 0, variant) return (klass(value, variant_level=obj_level), func_level) return lambda: (the_func, symbol)
[ "Handle", "a", "base", "case", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_xformer.py#L221-L241
[ "def", "_handle_base_case", "(", "klass", ",", "symbol", ")", ":", "def", "the_func", "(", "value", ",", "variant", "=", "0", ")", ":", "\"\"\"\n Base case.\n\n :param int variant: variant level for this object\n :returns: a tuple of a dbus object and the variant level\n :rtype: dbus object * int\n \"\"\"", "(", "obj_level", ",", "func_level", ")", "=", "_ToDbusXformer", ".", "_variant_levels", "(", "0", ",", "variant", ")", "return", "(", "klass", "(", "value", ",", "variant_level", "=", "obj_level", ")", ",", "func_level", ")", "return", "lambda", ":", "(", "the_func", ",", "symbol", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
signature
Get the signature of a dbus object. :param dbus_object: the object :type dbus_object: a dbus object :param bool unpack: if True, unpack from enclosing variant type :returns: the corresponding signature :rtype: str
src/into_dbus_python/_signature.py
def signature(dbus_object, unpack=False): """ Get the signature of a dbus object. :param dbus_object: the object :type dbus_object: a dbus object :param bool unpack: if True, unpack from enclosing variant type :returns: the corresponding signature :rtype: str """ # pylint: disable=too-many-return-statements # pylint: disable=too-many-branches if dbus_object.variant_level != 0 and not unpack: return 'v' if isinstance(dbus_object, dbus.Array): sigs = frozenset(signature(x) for x in dbus_object) len_sigs = len(sigs) if len_sigs > 1: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_sigs == 0: return 'a' + dbus_object.signature return 'a' + [x for x in sigs][0] if isinstance(dbus_object, dbus.Struct): sigs = (signature(x) for x in dbus_object) return '(' + "".join(x for x in sigs) + ')' if isinstance(dbus_object, dbus.Dictionary): key_sigs = frozenset(signature(x) for x in dbus_object.keys()) value_sigs = frozenset(signature(x) for x in dbus_object.values()) len_key_sigs = len(key_sigs) len_value_sigs = len(value_sigs) if len_key_sigs != len_value_sigs: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_key_sigs > 1: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_key_sigs == 0: return 'a{' + dbus_object.signature + '}' return 'a{' + [x for x in key_sigs][0] + [x for x in value_sigs][0] + '}' if isinstance(dbus_object, dbus.Boolean): return 'b' if isinstance(dbus_object, dbus.Byte): return 'y' if isinstance(dbus_object, dbus.Double): return 'd' if isinstance(dbus_object, dbus.Int16): return 'n' if isinstance(dbus_object, dbus.Int32): return 'i' if isinstance(dbus_object, dbus.Int64): return 'x' if isinstance(dbus_object, dbus.ObjectPath): return 'o' if isinstance(dbus_object, dbus.Signature): return 'g' if isinstance(dbus_object, dbus.String): return 's' if isinstance(dbus_object, dbus.UInt16): return 'q' if isinstance(dbus_object, dbus.UInt32): return 'u' if isinstance(dbus_object, dbus.UInt64): return 't' if isinstance(dbus_object, dbus.types.UnixFd): # pragma: no cover return 'h' raise IntoDPValueError(dbus_object, "dbus_object", "has no signature")
def signature(dbus_object, unpack=False): """ Get the signature of a dbus object. :param dbus_object: the object :type dbus_object: a dbus object :param bool unpack: if True, unpack from enclosing variant type :returns: the corresponding signature :rtype: str """ # pylint: disable=too-many-return-statements # pylint: disable=too-many-branches if dbus_object.variant_level != 0 and not unpack: return 'v' if isinstance(dbus_object, dbus.Array): sigs = frozenset(signature(x) for x in dbus_object) len_sigs = len(sigs) if len_sigs > 1: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_sigs == 0: return 'a' + dbus_object.signature return 'a' + [x for x in sigs][0] if isinstance(dbus_object, dbus.Struct): sigs = (signature(x) for x in dbus_object) return '(' + "".join(x for x in sigs) + ')' if isinstance(dbus_object, dbus.Dictionary): key_sigs = frozenset(signature(x) for x in dbus_object.keys()) value_sigs = frozenset(signature(x) for x in dbus_object.values()) len_key_sigs = len(key_sigs) len_value_sigs = len(value_sigs) if len_key_sigs != len_value_sigs: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_key_sigs > 1: # pragma: no cover raise IntoDPValueError(dbus_object, "dbus_object", "has bad signature") if len_key_sigs == 0: return 'a{' + dbus_object.signature + '}' return 'a{' + [x for x in key_sigs][0] + [x for x in value_sigs][0] + '}' if isinstance(dbus_object, dbus.Boolean): return 'b' if isinstance(dbus_object, dbus.Byte): return 'y' if isinstance(dbus_object, dbus.Double): return 'd' if isinstance(dbus_object, dbus.Int16): return 'n' if isinstance(dbus_object, dbus.Int32): return 'i' if isinstance(dbus_object, dbus.Int64): return 'x' if isinstance(dbus_object, dbus.ObjectPath): return 'o' if isinstance(dbus_object, dbus.Signature): return 'g' if isinstance(dbus_object, dbus.String): return 's' if isinstance(dbus_object, dbus.UInt16): return 'q' if isinstance(dbus_object, dbus.UInt32): return 'u' if isinstance(dbus_object, dbus.UInt64): return 't' if isinstance(dbus_object, dbus.types.UnixFd): # pragma: no cover return 'h' raise IntoDPValueError(dbus_object, "dbus_object", "has no signature")
[ "Get", "the", "signature", "of", "a", "dbus", "object", "." ]
stratis-storage/into-dbus-python
python
https://github.com/stratis-storage/into-dbus-python/blob/81366049671f79116bbb81c97bf621800a2f6315/src/into_dbus_python/_signature.py#L23-L116
[ "def", "signature", "(", "dbus_object", ",", "unpack", "=", "False", ")", ":", "# pylint: disable=too-many-return-statements", "# pylint: disable=too-many-branches", "if", "dbus_object", ".", "variant_level", "!=", "0", "and", "not", "unpack", ":", "return", "'v'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Array", ")", ":", "sigs", "=", "frozenset", "(", "signature", "(", "x", ")", "for", "x", "in", "dbus_object", ")", "len_sigs", "=", "len", "(", "sigs", ")", "if", "len_sigs", ">", "1", ":", "# pragma: no cover", "raise", "IntoDPValueError", "(", "dbus_object", ",", "\"dbus_object\"", ",", "\"has bad signature\"", ")", "if", "len_sigs", "==", "0", ":", "return", "'a'", "+", "dbus_object", ".", "signature", "return", "'a'", "+", "[", "x", "for", "x", "in", "sigs", "]", "[", "0", "]", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Struct", ")", ":", "sigs", "=", "(", "signature", "(", "x", ")", "for", "x", "in", "dbus_object", ")", "return", "'('", "+", "\"\"", ".", "join", "(", "x", "for", "x", "in", "sigs", ")", "+", "')'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Dictionary", ")", ":", "key_sigs", "=", "frozenset", "(", "signature", "(", "x", ")", "for", "x", "in", "dbus_object", ".", "keys", "(", ")", ")", "value_sigs", "=", "frozenset", "(", "signature", "(", "x", ")", "for", "x", "in", "dbus_object", ".", "values", "(", ")", ")", "len_key_sigs", "=", "len", "(", "key_sigs", ")", "len_value_sigs", "=", "len", "(", "value_sigs", ")", "if", "len_key_sigs", "!=", "len_value_sigs", ":", "# pragma: no cover", "raise", "IntoDPValueError", "(", "dbus_object", ",", "\"dbus_object\"", ",", "\"has bad signature\"", ")", "if", "len_key_sigs", ">", "1", ":", "# pragma: no cover", "raise", "IntoDPValueError", "(", "dbus_object", ",", "\"dbus_object\"", ",", "\"has bad signature\"", ")", "if", "len_key_sigs", "==", "0", ":", "return", "'a{'", "+", "dbus_object", ".", "signature", "+", "'}'", "return", "'a{'", "+", "[", "x", "for", "x", "in", "key_sigs", "]", "[", "0", "]", "+", "[", "x", "for", "x", "in", "value_sigs", "]", "[", "0", "]", "+", "'}'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Boolean", ")", ":", "return", "'b'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Byte", ")", ":", "return", "'y'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Double", ")", ":", "return", "'d'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Int16", ")", ":", "return", "'n'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Int32", ")", ":", "return", "'i'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Int64", ")", ":", "return", "'x'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "ObjectPath", ")", ":", "return", "'o'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "Signature", ")", ":", "return", "'g'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "String", ")", ":", "return", "'s'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "UInt16", ")", ":", "return", "'q'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "UInt32", ")", ":", "return", "'u'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "UInt64", ")", ":", "return", "'t'", "if", "isinstance", "(", "dbus_object", ",", "dbus", ".", "types", ".", "UnixFd", ")", ":", "# pragma: no cover", "return", "'h'", "raise", "IntoDPValueError", "(", "dbus_object", ",", "\"dbus_object\"", ",", "\"has no signature\"", ")" ]
81366049671f79116bbb81c97bf621800a2f6315
valid
plot_indices
Plot multi-index set :param mis: Multi-index set :type mis: Iterable of SparseIndices :param dims: Which dimensions to use for plotting :type dims: List of integers. :param weights: Weights associated with each multi-index :type weights: Dictionary :param quantiles: Number of groups plotted in different colors :type quantiles: Integer>=1 or list of colors TODO: exchange index_labels and dims, exchange quantiles and dims
swutil/plots.py
def plot_indices(mis, dims=None, weights=None, groups=1,legend = True,index_labels=None, colors = None,axis_labels = None,size_exponent=0.1,ax=None): ''' Plot multi-index set :param mis: Multi-index set :type mis: Iterable of SparseIndices :param dims: Which dimensions to use for plotting :type dims: List of integers. :param weights: Weights associated with each multi-index :type weights: Dictionary :param quantiles: Number of groups plotted in different colors :type quantiles: Integer>=1 or list of colors TODO: exchange index_labels and dims, exchange quantiles and dims ''' if weights is None: weights = {mi: 1 for mi in mis} if Function.valid(weights): weights = {mi:weights(mi) for mi in mis} values = list(weights.values()) if Integer.valid(groups): N_g = groups groups = [[mi for mi in mis if (weights[mi] > np.percentile(values, 100/groups*g) or g==0) and weights[mi] <= np.percentile(values, 100/groups*(g+1))] for g in range(N_g)] group_names = ['{:.0f} -- {:.0f} percentile'.format(100/N_g*(N_g-i-1),100/N_g*(N_g-i)) for i in reversed(range(N_g))] else: if Function.valid(groups): groups = {mi:groups(mi) for mi in mis} group_names = unique(list(groups.values())) groups = [[mi for mi in mis if groups[mi]==name] for name in group_names] N_g = len(group_names) if colors is None: colors = matplotlib.cm.rainbow(np.linspace(0, 1, N_g)) # @UndefinedVariable if Dict.valid(mis): if index_labels is None or weights is None: temp = list(mis.keys()) if (List|Tuple).valid(temp[0]): if not (index_labels is None and weights is None): raise ValueError('mis cannot be dictionary with tuple entries if both index_labels and weights are specified separately') weights = {mi:mis[mi][0] for mi in mis} index_labels= {mi:mis[mi][1] for mi in mis} else: if weights is None: weights = mis else: index_labels = mis mis = temp else: raise ValueError('mis cannot be dictionary if index_labels are specified separately') if dims is None: try: dims = len(mis[0]) except TypeError: dims = sorted(list(set.union(*(set(mi.active_dims()) for mi in mis)))) if len(dims) > 3: raise ValueError('Cannot plot in more than three dimensions.') if len(dims) < 1: warnings.warn('Sure you don\'t want to plot anything?') return if ax is None: fig = plt.figure() # Creates new figure, because adding onto old axes doesn't work if they were created without 3d if len(dims) == 3: ax = fig.gca(projection='3d') else: ax = fig.gca() size_function = lambda mi: sum([weights[mi2] for mi2 in mis if mi.equal_mod(mi2, lambda dim: dim not in dims)]) sizes = {mi: np.power(size_function(mi), size_exponent) for mi in mis} for i,plot_indices in enumerate(groups): X = np.array([mi[dims[0]] for mi in plot_indices]) if len(dims) > 1: Y = np.array([mi[dims[1]] for mi in plot_indices]) else: Y = np.array([0 for mi in plot_indices]) if len(dims) > 2: Z = np.array([mi[dims[2]] for mi in plot_indices]) else: Z = np.array([0 for mi in plot_indices]) sizes_plot = np.array([sizes[mi] for mi in plot_indices]) if weights: if len(dims) == 3: ax.scatter(X, Y, Z, s = 50 * sizes_plot / max(sizes.values()), color=colors[i], alpha=1) else: ax.scatter(X, Y, s = 50 * sizes_plot / max(sizes.values()), color=colors[i], alpha=1) else: if len(dims) == 3: ax.scatter(X, Y, Z,color = colors[i],alpha=1) else: ax.scatter(X, Y,color=colors[i],alpha=1) if True: if len(dims)==3: axs='xyz' else: axs='xy' extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in axs]) sz = extents[:,1] - extents[:,0] maxsize = max(abs(sz)) for dim in axs: getattr(ax, 'set_{}lim'.format(dim))(0, maxsize) if axis_labels is not None: ax.set_xlabel(axis_labels[0]) if len(dims)>1: ax.set_ylabel(axis_labels[1]) if len(dims)>1: ax.set_zlabel(axis_labels[2]) else: ax.set_xlabel('$k_' + str(dims[0])+'$',size=20) if len(dims) > 1: ax.set_ylabel('$k_' + str(dims[1])+'$',size=20) if len(dims) > 2: ax.set_zlabel('$k_' + str(dims[2])+'$',size=20) plt.grid() x_coordinates = [mi[dims[0]] for mi in mis] xticks=list(range(min(x_coordinates),max(x_coordinates)+1)) ax.set_xticks(xticks) if len(dims)>1: y_coordinates = [mi[dims[1]] for mi in mis] ax.set_yticks(list(range(min(y_coordinates),max(y_coordinates)+1))) if len(dims)>2: z_coordinates = [mi[dims[2]] for mi in mis] ax.set_zticks(list(range(min(z_coordinates),max(z_coordinates)+1))) if index_labels: for mi in index_labels: ax.annotate('{:.3g}'.format(index_labels[mi]),xy=(mi[0],mi[1])) if legend and len(group_names)>1: ax.legend([patches.Patch(color=color) for color in np.flipud(colors)],group_names) return ax
def plot_indices(mis, dims=None, weights=None, groups=1,legend = True,index_labels=None, colors = None,axis_labels = None,size_exponent=0.1,ax=None): ''' Plot multi-index set :param mis: Multi-index set :type mis: Iterable of SparseIndices :param dims: Which dimensions to use for plotting :type dims: List of integers. :param weights: Weights associated with each multi-index :type weights: Dictionary :param quantiles: Number of groups plotted in different colors :type quantiles: Integer>=1 or list of colors TODO: exchange index_labels and dims, exchange quantiles and dims ''' if weights is None: weights = {mi: 1 for mi in mis} if Function.valid(weights): weights = {mi:weights(mi) for mi in mis} values = list(weights.values()) if Integer.valid(groups): N_g = groups groups = [[mi for mi in mis if (weights[mi] > np.percentile(values, 100/groups*g) or g==0) and weights[mi] <= np.percentile(values, 100/groups*(g+1))] for g in range(N_g)] group_names = ['{:.0f} -- {:.0f} percentile'.format(100/N_g*(N_g-i-1),100/N_g*(N_g-i)) for i in reversed(range(N_g))] else: if Function.valid(groups): groups = {mi:groups(mi) for mi in mis} group_names = unique(list(groups.values())) groups = [[mi for mi in mis if groups[mi]==name] for name in group_names] N_g = len(group_names) if colors is None: colors = matplotlib.cm.rainbow(np.linspace(0, 1, N_g)) # @UndefinedVariable if Dict.valid(mis): if index_labels is None or weights is None: temp = list(mis.keys()) if (List|Tuple).valid(temp[0]): if not (index_labels is None and weights is None): raise ValueError('mis cannot be dictionary with tuple entries if both index_labels and weights are specified separately') weights = {mi:mis[mi][0] for mi in mis} index_labels= {mi:mis[mi][1] for mi in mis} else: if weights is None: weights = mis else: index_labels = mis mis = temp else: raise ValueError('mis cannot be dictionary if index_labels are specified separately') if dims is None: try: dims = len(mis[0]) except TypeError: dims = sorted(list(set.union(*(set(mi.active_dims()) for mi in mis)))) if len(dims) > 3: raise ValueError('Cannot plot in more than three dimensions.') if len(dims) < 1: warnings.warn('Sure you don\'t want to plot anything?') return if ax is None: fig = plt.figure() # Creates new figure, because adding onto old axes doesn't work if they were created without 3d if len(dims) == 3: ax = fig.gca(projection='3d') else: ax = fig.gca() size_function = lambda mi: sum([weights[mi2] for mi2 in mis if mi.equal_mod(mi2, lambda dim: dim not in dims)]) sizes = {mi: np.power(size_function(mi), size_exponent) for mi in mis} for i,plot_indices in enumerate(groups): X = np.array([mi[dims[0]] for mi in plot_indices]) if len(dims) > 1: Y = np.array([mi[dims[1]] for mi in plot_indices]) else: Y = np.array([0 for mi in plot_indices]) if len(dims) > 2: Z = np.array([mi[dims[2]] for mi in plot_indices]) else: Z = np.array([0 for mi in plot_indices]) sizes_plot = np.array([sizes[mi] for mi in plot_indices]) if weights: if len(dims) == 3: ax.scatter(X, Y, Z, s = 50 * sizes_plot / max(sizes.values()), color=colors[i], alpha=1) else: ax.scatter(X, Y, s = 50 * sizes_plot / max(sizes.values()), color=colors[i], alpha=1) else: if len(dims) == 3: ax.scatter(X, Y, Z,color = colors[i],alpha=1) else: ax.scatter(X, Y,color=colors[i],alpha=1) if True: if len(dims)==3: axs='xyz' else: axs='xy' extents = np.array([getattr(ax, 'get_{}lim'.format(dim))() for dim in axs]) sz = extents[:,1] - extents[:,0] maxsize = max(abs(sz)) for dim in axs: getattr(ax, 'set_{}lim'.format(dim))(0, maxsize) if axis_labels is not None: ax.set_xlabel(axis_labels[0]) if len(dims)>1: ax.set_ylabel(axis_labels[1]) if len(dims)>1: ax.set_zlabel(axis_labels[2]) else: ax.set_xlabel('$k_' + str(dims[0])+'$',size=20) if len(dims) > 1: ax.set_ylabel('$k_' + str(dims[1])+'$',size=20) if len(dims) > 2: ax.set_zlabel('$k_' + str(dims[2])+'$',size=20) plt.grid() x_coordinates = [mi[dims[0]] for mi in mis] xticks=list(range(min(x_coordinates),max(x_coordinates)+1)) ax.set_xticks(xticks) if len(dims)>1: y_coordinates = [mi[dims[1]] for mi in mis] ax.set_yticks(list(range(min(y_coordinates),max(y_coordinates)+1))) if len(dims)>2: z_coordinates = [mi[dims[2]] for mi in mis] ax.set_zticks(list(range(min(z_coordinates),max(z_coordinates)+1))) if index_labels: for mi in index_labels: ax.annotate('{:.3g}'.format(index_labels[mi]),xy=(mi[0],mi[1])) if legend and len(group_names)>1: ax.legend([patches.Patch(color=color) for color in np.flipud(colors)],group_names) return ax
[ "Plot", "multi", "-", "index", "set", ":", "param", "mis", ":", "Multi", "-", "index", "set", ":", "type", "mis", ":", "Iterable", "of", "SparseIndices", ":", "param", "dims", ":", "Which", "dimensions", "to", "use", "for", "plotting", ":", "type", "dims", ":", "List", "of", "integers", ".", ":", "param", "weights", ":", "Weights", "associated", "with", "each", "multi", "-", "index", ":", "type", "weights", ":", "Dictionary", ":", "param", "quantiles", ":", "Number", "of", "groups", "plotted", "in", "different", "colors", ":", "type", "quantiles", ":", "Integer", ">", "=", "1", "or", "list", "of", "colors", "TODO", ":", "exchange", "index_labels", "and", "dims", "exchange", "quantiles", "and", "dims" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/plots.py#L67-L191
[ "def", "plot_indices", "(", "mis", ",", "dims", "=", "None", ",", "weights", "=", "None", ",", "groups", "=", "1", ",", "legend", "=", "True", ",", "index_labels", "=", "None", ",", "colors", "=", "None", ",", "axis_labels", "=", "None", ",", "size_exponent", "=", "0.1", ",", "ax", "=", "None", ")", ":", "if", "weights", "is", "None", ":", "weights", "=", "{", "mi", ":", "1", "for", "mi", "in", "mis", "}", "if", "Function", ".", "valid", "(", "weights", ")", ":", "weights", "=", "{", "mi", ":", "weights", "(", "mi", ")", "for", "mi", "in", "mis", "}", "values", "=", "list", "(", "weights", ".", "values", "(", ")", ")", "if", "Integer", ".", "valid", "(", "groups", ")", ":", "N_g", "=", "groups", "groups", "=", "[", "[", "mi", "for", "mi", "in", "mis", "if", "(", "weights", "[", "mi", "]", ">", "np", ".", "percentile", "(", "values", ",", "100", "/", "groups", "*", "g", ")", "or", "g", "==", "0", ")", "and", "weights", "[", "mi", "]", "<=", "np", ".", "percentile", "(", "values", ",", "100", "/", "groups", "*", "(", "g", "+", "1", ")", ")", "]", "for", "g", "in", "range", "(", "N_g", ")", "]", "group_names", "=", "[", "'{:.0f} -- {:.0f} percentile'", ".", "format", "(", "100", "/", "N_g", "*", "(", "N_g", "-", "i", "-", "1", ")", ",", "100", "/", "N_g", "*", "(", "N_g", "-", "i", ")", ")", "for", "i", "in", "reversed", "(", "range", "(", "N_g", ")", ")", "]", "else", ":", "if", "Function", ".", "valid", "(", "groups", ")", ":", "groups", "=", "{", "mi", ":", "groups", "(", "mi", ")", "for", "mi", "in", "mis", "}", "group_names", "=", "unique", "(", "list", "(", "groups", ".", "values", "(", ")", ")", ")", "groups", "=", "[", "[", "mi", "for", "mi", "in", "mis", "if", "groups", "[", "mi", "]", "==", "name", "]", "for", "name", "in", "group_names", "]", "N_g", "=", "len", "(", "group_names", ")", "if", "colors", "is", "None", ":", "colors", "=", "matplotlib", ".", "cm", ".", "rainbow", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "N_g", ")", ")", "# @UndefinedVariable", "if", "Dict", ".", "valid", "(", "mis", ")", ":", "if", "index_labels", "is", "None", "or", "weights", "is", "None", ":", "temp", "=", "list", "(", "mis", ".", "keys", "(", ")", ")", "if", "(", "List", "|", "Tuple", ")", ".", "valid", "(", "temp", "[", "0", "]", ")", ":", "if", "not", "(", "index_labels", "is", "None", "and", "weights", "is", "None", ")", ":", "raise", "ValueError", "(", "'mis cannot be dictionary with tuple entries if both index_labels and weights are specified separately'", ")", "weights", "=", "{", "mi", ":", "mis", "[", "mi", "]", "[", "0", "]", "for", "mi", "in", "mis", "}", "index_labels", "=", "{", "mi", ":", "mis", "[", "mi", "]", "[", "1", "]", "for", "mi", "in", "mis", "}", "else", ":", "if", "weights", "is", "None", ":", "weights", "=", "mis", "else", ":", "index_labels", "=", "mis", "mis", "=", "temp", "else", ":", "raise", "ValueError", "(", "'mis cannot be dictionary if index_labels are specified separately'", ")", "if", "dims", "is", "None", ":", "try", ":", "dims", "=", "len", "(", "mis", "[", "0", "]", ")", "except", "TypeError", ":", "dims", "=", "sorted", "(", "list", "(", "set", ".", "union", "(", "*", "(", "set", "(", "mi", ".", "active_dims", "(", ")", ")", "for", "mi", "in", "mis", ")", ")", ")", ")", "if", "len", "(", "dims", ")", ">", "3", ":", "raise", "ValueError", "(", "'Cannot plot in more than three dimensions.'", ")", "if", "len", "(", "dims", ")", "<", "1", ":", "warnings", ".", "warn", "(", "'Sure you don\\'t want to plot anything?'", ")", "return", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", ")", "# Creates new figure, because adding onto old axes doesn't work if they were created without 3d", "if", "len", "(", "dims", ")", "==", "3", ":", "ax", "=", "fig", ".", "gca", "(", "projection", "=", "'3d'", ")", "else", ":", "ax", "=", "fig", ".", "gca", "(", ")", "size_function", "=", "lambda", "mi", ":", "sum", "(", "[", "weights", "[", "mi2", "]", "for", "mi2", "in", "mis", "if", "mi", ".", "equal_mod", "(", "mi2", ",", "lambda", "dim", ":", "dim", "not", "in", "dims", ")", "]", ")", "sizes", "=", "{", "mi", ":", "np", ".", "power", "(", "size_function", "(", "mi", ")", ",", "size_exponent", ")", "for", "mi", "in", "mis", "}", "for", "i", ",", "plot_indices", "in", "enumerate", "(", "groups", ")", ":", "X", "=", "np", ".", "array", "(", "[", "mi", "[", "dims", "[", "0", "]", "]", "for", "mi", "in", "plot_indices", "]", ")", "if", "len", "(", "dims", ")", ">", "1", ":", "Y", "=", "np", ".", "array", "(", "[", "mi", "[", "dims", "[", "1", "]", "]", "for", "mi", "in", "plot_indices", "]", ")", "else", ":", "Y", "=", "np", ".", "array", "(", "[", "0", "for", "mi", "in", "plot_indices", "]", ")", "if", "len", "(", "dims", ")", ">", "2", ":", "Z", "=", "np", ".", "array", "(", "[", "mi", "[", "dims", "[", "2", "]", "]", "for", "mi", "in", "plot_indices", "]", ")", "else", ":", "Z", "=", "np", ".", "array", "(", "[", "0", "for", "mi", "in", "plot_indices", "]", ")", "sizes_plot", "=", "np", ".", "array", "(", "[", "sizes", "[", "mi", "]", "for", "mi", "in", "plot_indices", "]", ")", "if", "weights", ":", "if", "len", "(", "dims", ")", "==", "3", ":", "ax", ".", "scatter", "(", "X", ",", "Y", ",", "Z", ",", "s", "=", "50", "*", "sizes_plot", "/", "max", "(", "sizes", ".", "values", "(", ")", ")", ",", "color", "=", "colors", "[", "i", "]", ",", "alpha", "=", "1", ")", "else", ":", "ax", ".", "scatter", "(", "X", ",", "Y", ",", "s", "=", "50", "*", "sizes_plot", "/", "max", "(", "sizes", ".", "values", "(", ")", ")", ",", "color", "=", "colors", "[", "i", "]", ",", "alpha", "=", "1", ")", "else", ":", "if", "len", "(", "dims", ")", "==", "3", ":", "ax", ".", "scatter", "(", "X", ",", "Y", ",", "Z", ",", "color", "=", "colors", "[", "i", "]", ",", "alpha", "=", "1", ")", "else", ":", "ax", ".", "scatter", "(", "X", ",", "Y", ",", "color", "=", "colors", "[", "i", "]", ",", "alpha", "=", "1", ")", "if", "True", ":", "if", "len", "(", "dims", ")", "==", "3", ":", "axs", "=", "'xyz'", "else", ":", "axs", "=", "'xy'", "extents", "=", "np", ".", "array", "(", "[", "getattr", "(", "ax", ",", "'get_{}lim'", ".", "format", "(", "dim", ")", ")", "(", ")", "for", "dim", "in", "axs", "]", ")", "sz", "=", "extents", "[", ":", ",", "1", "]", "-", "extents", "[", ":", ",", "0", "]", "maxsize", "=", "max", "(", "abs", "(", "sz", ")", ")", "for", "dim", "in", "axs", ":", "getattr", "(", "ax", ",", "'set_{}lim'", ".", "format", "(", "dim", ")", ")", "(", "0", ",", "maxsize", ")", "if", "axis_labels", "is", "not", "None", ":", "ax", ".", "set_xlabel", "(", "axis_labels", "[", "0", "]", ")", "if", "len", "(", "dims", ")", ">", "1", ":", "ax", ".", "set_ylabel", "(", "axis_labels", "[", "1", "]", ")", "if", "len", "(", "dims", ")", ">", "1", ":", "ax", ".", "set_zlabel", "(", "axis_labels", "[", "2", "]", ")", "else", ":", "ax", ".", "set_xlabel", "(", "'$k_'", "+", "str", "(", "dims", "[", "0", "]", ")", "+", "'$'", ",", "size", "=", "20", ")", "if", "len", "(", "dims", ")", ">", "1", ":", "ax", ".", "set_ylabel", "(", "'$k_'", "+", "str", "(", "dims", "[", "1", "]", ")", "+", "'$'", ",", "size", "=", "20", ")", "if", "len", "(", "dims", ")", ">", "2", ":", "ax", ".", "set_zlabel", "(", "'$k_'", "+", "str", "(", "dims", "[", "2", "]", ")", "+", "'$'", ",", "size", "=", "20", ")", "plt", ".", "grid", "(", ")", "x_coordinates", "=", "[", "mi", "[", "dims", "[", "0", "]", "]", "for", "mi", "in", "mis", "]", "xticks", "=", "list", "(", "range", "(", "min", "(", "x_coordinates", ")", ",", "max", "(", "x_coordinates", ")", "+", "1", ")", ")", "ax", ".", "set_xticks", "(", "xticks", ")", "if", "len", "(", "dims", ")", ">", "1", ":", "y_coordinates", "=", "[", "mi", "[", "dims", "[", "1", "]", "]", "for", "mi", "in", "mis", "]", "ax", ".", "set_yticks", "(", "list", "(", "range", "(", "min", "(", "y_coordinates", ")", ",", "max", "(", "y_coordinates", ")", "+", "1", ")", ")", ")", "if", "len", "(", "dims", ")", ">", "2", ":", "z_coordinates", "=", "[", "mi", "[", "dims", "[", "2", "]", "]", "for", "mi", "in", "mis", "]", "ax", ".", "set_zticks", "(", "list", "(", "range", "(", "min", "(", "z_coordinates", ")", ",", "max", "(", "z_coordinates", ")", "+", "1", ")", ")", ")", "if", "index_labels", ":", "for", "mi", "in", "index_labels", ":", "ax", ".", "annotate", "(", "'{:.3g}'", ".", "format", "(", "index_labels", "[", "mi", "]", ")", ",", "xy", "=", "(", "mi", "[", "0", "]", ",", "mi", "[", "1", "]", ")", ")", "if", "legend", "and", "len", "(", "group_names", ")", ">", "1", ":", "ax", ".", "legend", "(", "[", "patches", ".", "Patch", "(", "color", "=", "color", ")", "for", "color", "in", "np", ".", "flipud", "(", "colors", ")", "]", ",", "group_names", ")", "return", "ax" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
ezplot
Plot polynomial approximation. :param vectorized: `f` can handle an array of inputs
swutil/plots.py
def ezplot(f,xlim,ylim=None,ax = None,vectorized=True,N=None,contour = False,args=None,kwargs=None,dry_run=False,show=None,include_endpoints=False): ''' Plot polynomial approximation. :param vectorized: `f` can handle an array of inputs ''' kwargs = kwargs or {} args = args or [] d = 1 if ylim is None else 2 if ax is None: fig = plt.figure() show = show if show is not None else True ax = fig.gca() if (d==1 or contour) else fig.gca(projection='3d') if d == 1: if N is None: N = 200 if include_endpoints: X = np.linspace(xlim[0],xlim[1],N) else: L = xlim[1] - xlim[0] X = np.linspace(xlim[0] + L / N, xlim[1] - L / N, N) X = X.reshape((-1, 1)) if vectorized: Z = f(X) else: Z = np.array([f(x) for x in X]) if not dry_run: C = ax.plot(X, Z,*args,**kwargs) elif d == 2: if N is None: N = 30 T = np.zeros((N, 2)) if include_endpoints: T[:,0]=np.linspace(xlim[0],xlim[1],N) T[:,1]=np.linspace(ylim[0],ylim[1],N) else: L = xlim[1] - xlim[0] T[:, 0] = np.linspace(xlim[0] + L / N, xlim[1] - L / N, N) L = ylim[1] - ylim[0] T[:, 1] = np.linspace(ylim[0] + L / N, ylim[1] - L / N, N) X, Y = meshgrid(T[:, 0], T[:, 1]) Z = grid_evaluation(X, Y, f,vectorized=vectorized) if contour: if not dry_run: # C = ax.contour(X,Y,Z,levels = np.array([0.001,1000]),colors=['red','blue']) N=200 colors=np.concatenate((np.ones((N,1)),np.tile(np.linspace(1,0,N).reshape(-1,1),(1,2))),axis=1) colors = [ [1,1,1],*colors,[1,0,0]] print('max',np.max(Z[:])) C = ax.contourf(X,Y,Z,levels = [-np.inf,*np.linspace(-20,20,N),np.inf],colors=colors) else: if not dry_run: C = ax.plot_surface(X, Y, Z)#cmap=cm.coolwarm, # C = ax.plot_wireframe(X, Y, Z, rcount=30,ccount=30) if show: plt.show() return ax,C,Z
def ezplot(f,xlim,ylim=None,ax = None,vectorized=True,N=None,contour = False,args=None,kwargs=None,dry_run=False,show=None,include_endpoints=False): ''' Plot polynomial approximation. :param vectorized: `f` can handle an array of inputs ''' kwargs = kwargs or {} args = args or [] d = 1 if ylim is None else 2 if ax is None: fig = plt.figure() show = show if show is not None else True ax = fig.gca() if (d==1 or contour) else fig.gca(projection='3d') if d == 1: if N is None: N = 200 if include_endpoints: X = np.linspace(xlim[0],xlim[1],N) else: L = xlim[1] - xlim[0] X = np.linspace(xlim[0] + L / N, xlim[1] - L / N, N) X = X.reshape((-1, 1)) if vectorized: Z = f(X) else: Z = np.array([f(x) for x in X]) if not dry_run: C = ax.plot(X, Z,*args,**kwargs) elif d == 2: if N is None: N = 30 T = np.zeros((N, 2)) if include_endpoints: T[:,0]=np.linspace(xlim[0],xlim[1],N) T[:,1]=np.linspace(ylim[0],ylim[1],N) else: L = xlim[1] - xlim[0] T[:, 0] = np.linspace(xlim[0] + L / N, xlim[1] - L / N, N) L = ylim[1] - ylim[0] T[:, 1] = np.linspace(ylim[0] + L / N, ylim[1] - L / N, N) X, Y = meshgrid(T[:, 0], T[:, 1]) Z = grid_evaluation(X, Y, f,vectorized=vectorized) if contour: if not dry_run: # C = ax.contour(X,Y,Z,levels = np.array([0.001,1000]),colors=['red','blue']) N=200 colors=np.concatenate((np.ones((N,1)),np.tile(np.linspace(1,0,N).reshape(-1,1),(1,2))),axis=1) colors = [ [1,1,1],*colors,[1,0,0]] print('max',np.max(Z[:])) C = ax.contourf(X,Y,Z,levels = [-np.inf,*np.linspace(-20,20,N),np.inf],colors=colors) else: if not dry_run: C = ax.plot_surface(X, Y, Z)#cmap=cm.coolwarm, # C = ax.plot_wireframe(X, Y, Z, rcount=30,ccount=30) if show: plt.show() return ax,C,Z
[ "Plot", "polynomial", "approximation", ".", ":", "param", "vectorized", ":", "f", "can", "handle", "an", "array", "of", "inputs" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/plots.py#L194-L250
[ "def", "ezplot", "(", "f", ",", "xlim", ",", "ylim", "=", "None", ",", "ax", "=", "None", ",", "vectorized", "=", "True", ",", "N", "=", "None", ",", "contour", "=", "False", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "dry_run", "=", "False", ",", "show", "=", "None", ",", "include_endpoints", "=", "False", ")", ":", "kwargs", "=", "kwargs", "or", "{", "}", "args", "=", "args", "or", "[", "]", "d", "=", "1", "if", "ylim", "is", "None", "else", "2", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", ")", "show", "=", "show", "if", "show", "is", "not", "None", "else", "True", "ax", "=", "fig", ".", "gca", "(", ")", "if", "(", "d", "==", "1", "or", "contour", ")", "else", "fig", ".", "gca", "(", "projection", "=", "'3d'", ")", "if", "d", "==", "1", ":", "if", "N", "is", "None", ":", "N", "=", "200", "if", "include_endpoints", ":", "X", "=", "np", ".", "linspace", "(", "xlim", "[", "0", "]", ",", "xlim", "[", "1", "]", ",", "N", ")", "else", ":", "L", "=", "xlim", "[", "1", "]", "-", "xlim", "[", "0", "]", "X", "=", "np", ".", "linspace", "(", "xlim", "[", "0", "]", "+", "L", "/", "N", ",", "xlim", "[", "1", "]", "-", "L", "/", "N", ",", "N", ")", "X", "=", "X", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", "if", "vectorized", ":", "Z", "=", "f", "(", "X", ")", "else", ":", "Z", "=", "np", ".", "array", "(", "[", "f", "(", "x", ")", "for", "x", "in", "X", "]", ")", "if", "not", "dry_run", ":", "C", "=", "ax", ".", "plot", "(", "X", ",", "Z", ",", "*", "args", ",", "*", "*", "kwargs", ")", "elif", "d", "==", "2", ":", "if", "N", "is", "None", ":", "N", "=", "30", "T", "=", "np", ".", "zeros", "(", "(", "N", ",", "2", ")", ")", "if", "include_endpoints", ":", "T", "[", ":", ",", "0", "]", "=", "np", ".", "linspace", "(", "xlim", "[", "0", "]", ",", "xlim", "[", "1", "]", ",", "N", ")", "T", "[", ":", ",", "1", "]", "=", "np", ".", "linspace", "(", "ylim", "[", "0", "]", ",", "ylim", "[", "1", "]", ",", "N", ")", "else", ":", "L", "=", "xlim", "[", "1", "]", "-", "xlim", "[", "0", "]", "T", "[", ":", ",", "0", "]", "=", "np", ".", "linspace", "(", "xlim", "[", "0", "]", "+", "L", "/", "N", ",", "xlim", "[", "1", "]", "-", "L", "/", "N", ",", "N", ")", "L", "=", "ylim", "[", "1", "]", "-", "ylim", "[", "0", "]", "T", "[", ":", ",", "1", "]", "=", "np", ".", "linspace", "(", "ylim", "[", "0", "]", "+", "L", "/", "N", ",", "ylim", "[", "1", "]", "-", "L", "/", "N", ",", "N", ")", "X", ",", "Y", "=", "meshgrid", "(", "T", "[", ":", ",", "0", "]", ",", "T", "[", ":", ",", "1", "]", ")", "Z", "=", "grid_evaluation", "(", "X", ",", "Y", ",", "f", ",", "vectorized", "=", "vectorized", ")", "if", "contour", ":", "if", "not", "dry_run", ":", "# C = ax.contour(X,Y,Z,levels = np.array([0.001,1000]),colors=['red','blue'])", "N", "=", "200", "colors", "=", "np", ".", "concatenate", "(", "(", "np", ".", "ones", "(", "(", "N", ",", "1", ")", ")", ",", "np", ".", "tile", "(", "np", ".", "linspace", "(", "1", ",", "0", ",", "N", ")", ".", "reshape", "(", "-", "1", ",", "1", ")", ",", "(", "1", ",", "2", ")", ")", ")", ",", "axis", "=", "1", ")", "colors", "=", "[", "[", "1", ",", "1", ",", "1", "]", ",", "*", "colors", ",", "[", "1", ",", "0", ",", "0", "]", "]", "print", "(", "'max'", ",", "np", ".", "max", "(", "Z", "[", ":", "]", ")", ")", "C", "=", "ax", ".", "contourf", "(", "X", ",", "Y", ",", "Z", ",", "levels", "=", "[", "-", "np", ".", "inf", ",", "*", "np", ".", "linspace", "(", "-", "20", ",", "20", ",", "N", ")", ",", "np", ".", "inf", "]", ",", "colors", "=", "colors", ")", "else", ":", "if", "not", "dry_run", ":", "C", "=", "ax", ".", "plot_surface", "(", "X", ",", "Y", ",", "Z", ")", "#cmap=cm.coolwarm, ", "# C = ax.plot_wireframe(X, Y, Z, rcount=30,ccount=30)", "if", "show", ":", "plt", ".", "show", "(", ")", "return", "ax", ",", "C", ",", "Z" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
plot3D
Surface plot. Generate X and Y using, for example X,Y = np.mgrid[0:1:50j, 0:1:50j] or X,Y= np.meshgrid([0,1,2],[1,2,3]). :param X: 2D-Array of x-coordinates :param Y: 2D-Array of y-coordinates :param Z: 2D-Array of z-coordinates
swutil/plots.py
def plot3D(X, Y, Z): ''' Surface plot. Generate X and Y using, for example X,Y = np.mgrid[0:1:50j, 0:1:50j] or X,Y= np.meshgrid([0,1,2],[1,2,3]). :param X: 2D-Array of x-coordinates :param Y: 2D-Array of y-coordinates :param Z: 2D-Array of z-coordinates ''' fig = plt.figure() ax = Axes3D(fig) light = LightSource(90, 90) illuminated_surface = light.shade(Z, cmap=cm.coolwarm) # @UndefinedVariable Xmin = np.amin(X) Xmax = np.amax(X) Ymin = np.amin(Y) Ymax = np.amax(Y) Zmin = np.amin(Z) Zmax = np.amax(Z) ax.contourf(X, Y, Z, zdir='x', offset=Xmin - 0.1 * (Xmax - Xmin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.contourf(X, Y, Z, zdir='y', offset=Ymax + 0.1 * (Ymax - Ymin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.contourf(X, Y, Z, zdir='z', offset=Zmin - 0.1 * (Zmax - Zmin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.plot_surface(X, Y, Z, cstride=5, rstride=5, facecolors=illuminated_surface, alpha=0.5) plt.show()
def plot3D(X, Y, Z): ''' Surface plot. Generate X and Y using, for example X,Y = np.mgrid[0:1:50j, 0:1:50j] or X,Y= np.meshgrid([0,1,2],[1,2,3]). :param X: 2D-Array of x-coordinates :param Y: 2D-Array of y-coordinates :param Z: 2D-Array of z-coordinates ''' fig = plt.figure() ax = Axes3D(fig) light = LightSource(90, 90) illuminated_surface = light.shade(Z, cmap=cm.coolwarm) # @UndefinedVariable Xmin = np.amin(X) Xmax = np.amax(X) Ymin = np.amin(Y) Ymax = np.amax(Y) Zmin = np.amin(Z) Zmax = np.amax(Z) ax.contourf(X, Y, Z, zdir='x', offset=Xmin - 0.1 * (Xmax - Xmin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.contourf(X, Y, Z, zdir='y', offset=Ymax + 0.1 * (Ymax - Ymin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.contourf(X, Y, Z, zdir='z', offset=Zmin - 0.1 * (Zmax - Zmin), cmap=cm.coolwarm, alpha=1) # @UndefinedVariable ax.plot_surface(X, Y, Z, cstride=5, rstride=5, facecolors=illuminated_surface, alpha=0.5) plt.show()
[ "Surface", "plot", ".", "Generate", "X", "and", "Y", "using", "for", "example", "X", "Y", "=", "np", ".", "mgrid", "[", "0", ":", "1", ":", "50j", "0", ":", "1", ":", "50j", "]", "or", "X", "Y", "=", "np", ".", "meshgrid", "(", "[", "0", "1", "2", "]", "[", "1", "2", "3", "]", ")", ".", ":", "param", "X", ":", "2D", "-", "Array", "of", "x", "-", "coordinates", ":", "param", "Y", ":", "2D", "-", "Array", "of", "y", "-", "coordinates", ":", "param", "Z", ":", "2D", "-", "Array", "of", "z", "-", "coordinates" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/plots.py#L252-L279
[ "def", "plot3D", "(", "X", ",", "Y", ",", "Z", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax", "=", "Axes3D", "(", "fig", ")", "light", "=", "LightSource", "(", "90", ",", "90", ")", "illuminated_surface", "=", "light", ".", "shade", "(", "Z", ",", "cmap", "=", "cm", ".", "coolwarm", ")", "# @UndefinedVariable", "Xmin", "=", "np", ".", "amin", "(", "X", ")", "Xmax", "=", "np", ".", "amax", "(", "X", ")", "Ymin", "=", "np", ".", "amin", "(", "Y", ")", "Ymax", "=", "np", ".", "amax", "(", "Y", ")", "Zmin", "=", "np", ".", "amin", "(", "Z", ")", "Zmax", "=", "np", ".", "amax", "(", "Z", ")", "ax", ".", "contourf", "(", "X", ",", "Y", ",", "Z", ",", "zdir", "=", "'x'", ",", "offset", "=", "Xmin", "-", "0.1", "*", "(", "Xmax", "-", "Xmin", ")", ",", "cmap", "=", "cm", ".", "coolwarm", ",", "alpha", "=", "1", ")", "# @UndefinedVariable", "ax", ".", "contourf", "(", "X", ",", "Y", ",", "Z", ",", "zdir", "=", "'y'", ",", "offset", "=", "Ymax", "+", "0.1", "*", "(", "Ymax", "-", "Ymin", ")", ",", "cmap", "=", "cm", ".", "coolwarm", ",", "alpha", "=", "1", ")", "# @UndefinedVariable", "ax", ".", "contourf", "(", "X", ",", "Y", ",", "Z", ",", "zdir", "=", "'z'", ",", "offset", "=", "Zmin", "-", "0.1", "*", "(", "Zmax", "-", "Zmin", ")", ",", "cmap", "=", "cm", ".", "coolwarm", ",", "alpha", "=", "1", ")", "# @UndefinedVariable", "ax", ".", "plot_surface", "(", "X", ",", "Y", ",", "Z", ",", "cstride", "=", "5", ",", "rstride", "=", "5", ",", "facecolors", "=", "illuminated_surface", ",", "alpha", "=", "0.5", ")", "plt", ".", "show", "(", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
plot_convergence
Show loglog or semilogy convergence plot. Specify :code:`reference` if exact limit is known. Otherwise limit is taken to be last entry of :code:`values`. Distance to limit is computed as RMSE (or analogous p-norm if p is specified) Specify either :code:`plot_rate`(pass number or 'fit') or :code:`expect_residuals` and :code:`expect_times` to add a second plot with the expected convergence. :param times: Runtimes :type times: List of positive numbers :param values: Outputs :type values: List of arrays :param reference: Exact solution, or 'self' if not available :type reference: Array or 'self' :param convergence_type: Convergence type :type convergence_type: 'algebraic' or 'exponential' :param expect_residuals: Expected residuals :type expect_residuals: List of positive numbers :param expect_times: Expected runtimes :type expect_times: List of positive numbers :param plot_rate: Expected convergence order :type plot_rate: Real or 'fit' :param preasymptotics: Ignore initial entries for rate fitting :type preasymptotics: Boolean :param stagnation: Ignore final entries from rate fitting :type stagnation: Boolean :param marker: Marker for data points :type marker: Matplotlib marker string :return: fitted convergence order
swutil/plots.py
def plot_convergence(times, values, name=None, title=None, reference='self', convergence_type='algebraic', expect_residuals=None, expect_times=None, plot_rate='fit', base = np.exp(0),xlabel = 'x', p=2, preasymptotics=True, stagnation=False, marker='.', legend='lower left',relative = False,ax = None): ''' Show loglog or semilogy convergence plot. Specify :code:`reference` if exact limit is known. Otherwise limit is taken to be last entry of :code:`values`. Distance to limit is computed as RMSE (or analogous p-norm if p is specified) Specify either :code:`plot_rate`(pass number or 'fit') or :code:`expect_residuals` and :code:`expect_times` to add a second plot with the expected convergence. :param times: Runtimes :type times: List of positive numbers :param values: Outputs :type values: List of arrays :param reference: Exact solution, or 'self' if not available :type reference: Array or 'self' :param convergence_type: Convergence type :type convergence_type: 'algebraic' or 'exponential' :param expect_residuals: Expected residuals :type expect_residuals: List of positive numbers :param expect_times: Expected runtimes :type expect_times: List of positive numbers :param plot_rate: Expected convergence order :type plot_rate: Real or 'fit' :param preasymptotics: Ignore initial entries for rate fitting :type preasymptotics: Boolean :param stagnation: Ignore final entries from rate fitting :type stagnation: Boolean :param marker: Marker for data points :type marker: Matplotlib marker string :return: fitted convergence order ''' name = name or '' self_reference = (isinstance(reference,str) and reference=='self') #reference == 'self' complains when reference is a numpy array ax = ax or plt.gca() color = next(ax._get_lines.prop_cycler)['color'] ax.tick_params(labeltop=False, labelright=True, right=True, which='both') ax.yaxis.grid(which="minor", linestyle='-', alpha=0.5) ax.yaxis.grid(which="major", linestyle='-', alpha=0.6) c_ticks = 3 ACCEPT_MISFIT = 0.1 values, times = np.squeeze(values), np.squeeze(times) assert(times.ndim == 1) assert(len(times) == len(values)) sorting = np.argsort(times) times = times[sorting] values = values[sorting] if plot_rate == True: plot_rate = 'fit' if plot_rate !='fit': plot_rate = plot_rate*np.log(base)#Convert to a rate w.r.t. exp if self_reference: if len(times) <= 2: raise ValueError('Too few data points') limit = values[-1] limit_time = times[-1] times = times[0:-1] values = values[0:-1] else: limit = np.squeeze(reference) limit_time = np.Inf residuals = np.zeros(len(times)) N = limit.size for L in range(len(times)): if p < np.Inf: residuals[L] = np.power(np.sum(np.power(np.abs(values[L] - limit), p) / N), 1. / p) # else: residuals[L] = np.amax(np.abs(values[L] - limit)) if relative: if p<np.Inf: residuals /= np.power(np.sum(np.power(np.abs(limit),p)/N),1./p) else: residuals /= np.amax(np.abs(limit)) try: remove = np.isnan(times) | np.isinf(times) | np.isnan(residuals) | np.isinf(residuals) | (residuals == 0) | ((times == 0) & (convergence_type == 'algebraic')) except TypeError: print(times,residuals) times = times[~remove] if sum(~remove) < (2 if self_reference else 1): raise ValueError('Too few valid data points') residuals = residuals[~remove] if convergence_type == 'algebraic': x = np.log(times) limit_x = np.log(limit_time) else: x = times limit_x = limit_time #min_x = min(x) max_x = max(x) y = np.log(residuals) try: rate, offset, min_x_fit, max_x_fit = _fit_rate(x, y, stagnation, preasymptotics, limit_x, have_rate=False if (plot_rate == 'fit' or plot_rate is None) else plot_rate) except FitError as e: warnings.warn(str(e)) plot_rate = False rate = None if self_reference: if rate >= 0: warnings.warn('No sign of convergence') else: real_rate = _real_rate(rate, l_bound=min_x_fit, r_bound=max_x_fit, reference_x=limit_x) if (real_rate is None or abs((real_rate - rate) / rate) >= ACCEPT_MISFIT): warnings.warn(('Self-convergence strongly affects plot and would yield misleading fit.') + (' Estimated true rate: {}.'.format(real_rate) if real_rate else '') + (' Fitted rate: {}.'.format(rate) if rate else '')) if plot_rate: name += 'Fitted rate: ' if plot_rate == 'fit' else 'Plotted rate: ' if convergence_type == 'algebraic': name+='{:.2g})'.format(rate) else: base_rate = rate/np.log(base) base_rate_str = f'{base_rate:.2g}' if base_rate_str=='-1': base_rate_str='-' if base_rate_str =='1': base_rate_str = '' name+=f'${base}^{{{base_rate_str}{xlabel}}}$' if convergence_type == 'algebraic': X = np.linspace(np.exp(min_x_fit), np.exp(max_x_fit), c_ticks) ax.loglog(X, np.exp(offset) * X ** rate, '--', color=color) else: X = np.linspace(min_x_fit, max_x_fit, c_ticks) ax.semilogy(X, np.exp(offset + rate * X), '--', color=color) max_x_data = max_x keep_1 = (x <= max_x_data) if convergence_type == 'algebraic': ax.loglog(np.array(times)[keep_1], np.array(residuals)[keep_1], label=name, marker=marker, color=color) ax.loglog(np.array(times), np.array(residuals), marker=marker, color=color, alpha=0.5) else: ax.semilogy(np.array(times)[keep_1], np.array(residuals)[keep_1], label=name, marker=marker, color=color) ax.semilogy(np.array(times), np.array(residuals), marker=marker, color=color, alpha=0.5) if expect_times is not None and expect_residuals is not None: ax.loglog(expect_times, expect_residuals, '--', marker=marker, color=color) if name: ax.legend(loc=legend) if title: ax.set_title(title) return rate
def plot_convergence(times, values, name=None, title=None, reference='self', convergence_type='algebraic', expect_residuals=None, expect_times=None, plot_rate='fit', base = np.exp(0),xlabel = 'x', p=2, preasymptotics=True, stagnation=False, marker='.', legend='lower left',relative = False,ax = None): ''' Show loglog or semilogy convergence plot. Specify :code:`reference` if exact limit is known. Otherwise limit is taken to be last entry of :code:`values`. Distance to limit is computed as RMSE (or analogous p-norm if p is specified) Specify either :code:`plot_rate`(pass number or 'fit') or :code:`expect_residuals` and :code:`expect_times` to add a second plot with the expected convergence. :param times: Runtimes :type times: List of positive numbers :param values: Outputs :type values: List of arrays :param reference: Exact solution, or 'self' if not available :type reference: Array or 'self' :param convergence_type: Convergence type :type convergence_type: 'algebraic' or 'exponential' :param expect_residuals: Expected residuals :type expect_residuals: List of positive numbers :param expect_times: Expected runtimes :type expect_times: List of positive numbers :param plot_rate: Expected convergence order :type plot_rate: Real or 'fit' :param preasymptotics: Ignore initial entries for rate fitting :type preasymptotics: Boolean :param stagnation: Ignore final entries from rate fitting :type stagnation: Boolean :param marker: Marker for data points :type marker: Matplotlib marker string :return: fitted convergence order ''' name = name or '' self_reference = (isinstance(reference,str) and reference=='self') #reference == 'self' complains when reference is a numpy array ax = ax or plt.gca() color = next(ax._get_lines.prop_cycler)['color'] ax.tick_params(labeltop=False, labelright=True, right=True, which='both') ax.yaxis.grid(which="minor", linestyle='-', alpha=0.5) ax.yaxis.grid(which="major", linestyle='-', alpha=0.6) c_ticks = 3 ACCEPT_MISFIT = 0.1 values, times = np.squeeze(values), np.squeeze(times) assert(times.ndim == 1) assert(len(times) == len(values)) sorting = np.argsort(times) times = times[sorting] values = values[sorting] if plot_rate == True: plot_rate = 'fit' if plot_rate !='fit': plot_rate = plot_rate*np.log(base)#Convert to a rate w.r.t. exp if self_reference: if len(times) <= 2: raise ValueError('Too few data points') limit = values[-1] limit_time = times[-1] times = times[0:-1] values = values[0:-1] else: limit = np.squeeze(reference) limit_time = np.Inf residuals = np.zeros(len(times)) N = limit.size for L in range(len(times)): if p < np.Inf: residuals[L] = np.power(np.sum(np.power(np.abs(values[L] - limit), p) / N), 1. / p) # else: residuals[L] = np.amax(np.abs(values[L] - limit)) if relative: if p<np.Inf: residuals /= np.power(np.sum(np.power(np.abs(limit),p)/N),1./p) else: residuals /= np.amax(np.abs(limit)) try: remove = np.isnan(times) | np.isinf(times) | np.isnan(residuals) | np.isinf(residuals) | (residuals == 0) | ((times == 0) & (convergence_type == 'algebraic')) except TypeError: print(times,residuals) times = times[~remove] if sum(~remove) < (2 if self_reference else 1): raise ValueError('Too few valid data points') residuals = residuals[~remove] if convergence_type == 'algebraic': x = np.log(times) limit_x = np.log(limit_time) else: x = times limit_x = limit_time #min_x = min(x) max_x = max(x) y = np.log(residuals) try: rate, offset, min_x_fit, max_x_fit = _fit_rate(x, y, stagnation, preasymptotics, limit_x, have_rate=False if (plot_rate == 'fit' or plot_rate is None) else plot_rate) except FitError as e: warnings.warn(str(e)) plot_rate = False rate = None if self_reference: if rate >= 0: warnings.warn('No sign of convergence') else: real_rate = _real_rate(rate, l_bound=min_x_fit, r_bound=max_x_fit, reference_x=limit_x) if (real_rate is None or abs((real_rate - rate) / rate) >= ACCEPT_MISFIT): warnings.warn(('Self-convergence strongly affects plot and would yield misleading fit.') + (' Estimated true rate: {}.'.format(real_rate) if real_rate else '') + (' Fitted rate: {}.'.format(rate) if rate else '')) if plot_rate: name += 'Fitted rate: ' if plot_rate == 'fit' else 'Plotted rate: ' if convergence_type == 'algebraic': name+='{:.2g})'.format(rate) else: base_rate = rate/np.log(base) base_rate_str = f'{base_rate:.2g}' if base_rate_str=='-1': base_rate_str='-' if base_rate_str =='1': base_rate_str = '' name+=f'${base}^{{{base_rate_str}{xlabel}}}$' if convergence_type == 'algebraic': X = np.linspace(np.exp(min_x_fit), np.exp(max_x_fit), c_ticks) ax.loglog(X, np.exp(offset) * X ** rate, '--', color=color) else: X = np.linspace(min_x_fit, max_x_fit, c_ticks) ax.semilogy(X, np.exp(offset + rate * X), '--', color=color) max_x_data = max_x keep_1 = (x <= max_x_data) if convergence_type == 'algebraic': ax.loglog(np.array(times)[keep_1], np.array(residuals)[keep_1], label=name, marker=marker, color=color) ax.loglog(np.array(times), np.array(residuals), marker=marker, color=color, alpha=0.5) else: ax.semilogy(np.array(times)[keep_1], np.array(residuals)[keep_1], label=name, marker=marker, color=color) ax.semilogy(np.array(times), np.array(residuals), marker=marker, color=color, alpha=0.5) if expect_times is not None and expect_residuals is not None: ax.loglog(expect_times, expect_residuals, '--', marker=marker, color=color) if name: ax.legend(loc=legend) if title: ax.set_title(title) return rate
[ "Show", "loglog", "or", "semilogy", "convergence", "plot", ".", "Specify", ":", "code", ":", "reference", "if", "exact", "limit", "is", "known", ".", "Otherwise", "limit", "is", "taken", "to", "be", "last", "entry", "of", ":", "code", ":", "values", ".", "Distance", "to", "limit", "is", "computed", "as", "RMSE", "(", "or", "analogous", "p", "-", "norm", "if", "p", "is", "specified", ")", "Specify", "either", ":", "code", ":", "plot_rate", "(", "pass", "number", "or", "fit", ")", "or", ":", "code", ":", "expect_residuals", "and", ":", "code", ":", "expect_times", "to", "add", "a", "second", "plot", "with", "the", "expected", "convergence", ".", ":", "param", "times", ":", "Runtimes", ":", "type", "times", ":", "List", "of", "positive", "numbers", ":", "param", "values", ":", "Outputs", ":", "type", "values", ":", "List", "of", "arrays", ":", "param", "reference", ":", "Exact", "solution", "or", "self", "if", "not", "available", ":", "type", "reference", ":", "Array", "or", "self", ":", "param", "convergence_type", ":", "Convergence", "type", ":", "type", "convergence_type", ":", "algebraic", "or", "exponential", ":", "param", "expect_residuals", ":", "Expected", "residuals", ":", "type", "expect_residuals", ":", "List", "of", "positive", "numbers", ":", "param", "expect_times", ":", "Expected", "runtimes", ":", "type", "expect_times", ":", "List", "of", "positive", "numbers", ":", "param", "plot_rate", ":", "Expected", "convergence", "order", ":", "type", "plot_rate", ":", "Real", "or", "fit", ":", "param", "preasymptotics", ":", "Ignore", "initial", "entries", "for", "rate", "fitting", ":", "type", "preasymptotics", ":", "Boolean", ":", "param", "stagnation", ":", "Ignore", "final", "entries", "from", "rate", "fitting", ":", "type", "stagnation", ":", "Boolean", ":", "param", "marker", ":", "Marker", "for", "data", "points", ":", "type", "marker", ":", "Matplotlib", "marker", "string", ":", "return", ":", "fitted", "convergence", "order" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/plots.py#L286-L428
[ "def", "plot_convergence", "(", "times", ",", "values", ",", "name", "=", "None", ",", "title", "=", "None", ",", "reference", "=", "'self'", ",", "convergence_type", "=", "'algebraic'", ",", "expect_residuals", "=", "None", ",", "expect_times", "=", "None", ",", "plot_rate", "=", "'fit'", ",", "base", "=", "np", ".", "exp", "(", "0", ")", ",", "xlabel", "=", "'x'", ",", "p", "=", "2", ",", "preasymptotics", "=", "True", ",", "stagnation", "=", "False", ",", "marker", "=", "'.'", ",", "legend", "=", "'lower left'", ",", "relative", "=", "False", ",", "ax", "=", "None", ")", ":", "name", "=", "name", "or", "''", "self_reference", "=", "(", "isinstance", "(", "reference", ",", "str", ")", "and", "reference", "==", "'self'", ")", "#reference == 'self' complains when reference is a numpy array", "ax", "=", "ax", "or", "plt", ".", "gca", "(", ")", "color", "=", "next", "(", "ax", ".", "_get_lines", ".", "prop_cycler", ")", "[", "'color'", "]", "ax", ".", "tick_params", "(", "labeltop", "=", "False", ",", "labelright", "=", "True", ",", "right", "=", "True", ",", "which", "=", "'both'", ")", "ax", ".", "yaxis", ".", "grid", "(", "which", "=", "\"minor\"", ",", "linestyle", "=", "'-'", ",", "alpha", "=", "0.5", ")", "ax", ".", "yaxis", ".", "grid", "(", "which", "=", "\"major\"", ",", "linestyle", "=", "'-'", ",", "alpha", "=", "0.6", ")", "c_ticks", "=", "3", "ACCEPT_MISFIT", "=", "0.1", "values", ",", "times", "=", "np", ".", "squeeze", "(", "values", ")", ",", "np", ".", "squeeze", "(", "times", ")", "assert", "(", "times", ".", "ndim", "==", "1", ")", "assert", "(", "len", "(", "times", ")", "==", "len", "(", "values", ")", ")", "sorting", "=", "np", ".", "argsort", "(", "times", ")", "times", "=", "times", "[", "sorting", "]", "values", "=", "values", "[", "sorting", "]", "if", "plot_rate", "==", "True", ":", "plot_rate", "=", "'fit'", "if", "plot_rate", "!=", "'fit'", ":", "plot_rate", "=", "plot_rate", "*", "np", ".", "log", "(", "base", ")", "#Convert to a rate w.r.t. exp", "if", "self_reference", ":", "if", "len", "(", "times", ")", "<=", "2", ":", "raise", "ValueError", "(", "'Too few data points'", ")", "limit", "=", "values", "[", "-", "1", "]", "limit_time", "=", "times", "[", "-", "1", "]", "times", "=", "times", "[", "0", ":", "-", "1", "]", "values", "=", "values", "[", "0", ":", "-", "1", "]", "else", ":", "limit", "=", "np", ".", "squeeze", "(", "reference", ")", "limit_time", "=", "np", ".", "Inf", "residuals", "=", "np", ".", "zeros", "(", "len", "(", "times", ")", ")", "N", "=", "limit", ".", "size", "for", "L", "in", "range", "(", "len", "(", "times", ")", ")", ":", "if", "p", "<", "np", ".", "Inf", ":", "residuals", "[", "L", "]", "=", "np", ".", "power", "(", "np", ".", "sum", "(", "np", ".", "power", "(", "np", ".", "abs", "(", "values", "[", "L", "]", "-", "limit", ")", ",", "p", ")", "/", "N", ")", ",", "1.", "/", "p", ")", "#", "else", ":", "residuals", "[", "L", "]", "=", "np", ".", "amax", "(", "np", ".", "abs", "(", "values", "[", "L", "]", "-", "limit", ")", ")", "if", "relative", ":", "if", "p", "<", "np", ".", "Inf", ":", "residuals", "/=", "np", ".", "power", "(", "np", ".", "sum", "(", "np", ".", "power", "(", "np", ".", "abs", "(", "limit", ")", ",", "p", ")", "/", "N", ")", ",", "1.", "/", "p", ")", "else", ":", "residuals", "/=", "np", ".", "amax", "(", "np", ".", "abs", "(", "limit", ")", ")", "try", ":", "remove", "=", "np", ".", "isnan", "(", "times", ")", "|", "np", ".", "isinf", "(", "times", ")", "|", "np", ".", "isnan", "(", "residuals", ")", "|", "np", ".", "isinf", "(", "residuals", ")", "|", "(", "residuals", "==", "0", ")", "|", "(", "(", "times", "==", "0", ")", "&", "(", "convergence_type", "==", "'algebraic'", ")", ")", "except", "TypeError", ":", "print", "(", "times", ",", "residuals", ")", "times", "=", "times", "[", "~", "remove", "]", "if", "sum", "(", "~", "remove", ")", "<", "(", "2", "if", "self_reference", "else", "1", ")", ":", "raise", "ValueError", "(", "'Too few valid data points'", ")", "residuals", "=", "residuals", "[", "~", "remove", "]", "if", "convergence_type", "==", "'algebraic'", ":", "x", "=", "np", ".", "log", "(", "times", ")", "limit_x", "=", "np", ".", "log", "(", "limit_time", ")", "else", ":", "x", "=", "times", "limit_x", "=", "limit_time", "#min_x = min(x)", "max_x", "=", "max", "(", "x", ")", "y", "=", "np", ".", "log", "(", "residuals", ")", "try", ":", "rate", ",", "offset", ",", "min_x_fit", ",", "max_x_fit", "=", "_fit_rate", "(", "x", ",", "y", ",", "stagnation", ",", "preasymptotics", ",", "limit_x", ",", "have_rate", "=", "False", "if", "(", "plot_rate", "==", "'fit'", "or", "plot_rate", "is", "None", ")", "else", "plot_rate", ")", "except", "FitError", "as", "e", ":", "warnings", ".", "warn", "(", "str", "(", "e", ")", ")", "plot_rate", "=", "False", "rate", "=", "None", "if", "self_reference", ":", "if", "rate", ">=", "0", ":", "warnings", ".", "warn", "(", "'No sign of convergence'", ")", "else", ":", "real_rate", "=", "_real_rate", "(", "rate", ",", "l_bound", "=", "min_x_fit", ",", "r_bound", "=", "max_x_fit", ",", "reference_x", "=", "limit_x", ")", "if", "(", "real_rate", "is", "None", "or", "abs", "(", "(", "real_rate", "-", "rate", ")", "/", "rate", ")", ">=", "ACCEPT_MISFIT", ")", ":", "warnings", ".", "warn", "(", "(", "'Self-convergence strongly affects plot and would yield misleading fit.'", ")", "+", "(", "' Estimated true rate: {}.'", ".", "format", "(", "real_rate", ")", "if", "real_rate", "else", "''", ")", "+", "(", "' Fitted rate: {}.'", ".", "format", "(", "rate", ")", "if", "rate", "else", "''", ")", ")", "if", "plot_rate", ":", "name", "+=", "'Fitted rate: '", "if", "plot_rate", "==", "'fit'", "else", "'Plotted rate: '", "if", "convergence_type", "==", "'algebraic'", ":", "name", "+=", "'{:.2g})'", ".", "format", "(", "rate", ")", "else", ":", "base_rate", "=", "rate", "/", "np", ".", "log", "(", "base", ")", "base_rate_str", "=", "f'{base_rate:.2g}'", "if", "base_rate_str", "==", "'-1'", ":", "base_rate_str", "=", "'-'", "if", "base_rate_str", "==", "'1'", ":", "base_rate_str", "=", "''", "name", "+=", "f'${base}^{{{base_rate_str}{xlabel}}}$'", "if", "convergence_type", "==", "'algebraic'", ":", "X", "=", "np", ".", "linspace", "(", "np", ".", "exp", "(", "min_x_fit", ")", ",", "np", ".", "exp", "(", "max_x_fit", ")", ",", "c_ticks", ")", "ax", ".", "loglog", "(", "X", ",", "np", ".", "exp", "(", "offset", ")", "*", "X", "**", "rate", ",", "'--'", ",", "color", "=", "color", ")", "else", ":", "X", "=", "np", ".", "linspace", "(", "min_x_fit", ",", "max_x_fit", ",", "c_ticks", ")", "ax", ".", "semilogy", "(", "X", ",", "np", ".", "exp", "(", "offset", "+", "rate", "*", "X", ")", ",", "'--'", ",", "color", "=", "color", ")", "max_x_data", "=", "max_x", "keep_1", "=", "(", "x", "<=", "max_x_data", ")", "if", "convergence_type", "==", "'algebraic'", ":", "ax", ".", "loglog", "(", "np", ".", "array", "(", "times", ")", "[", "keep_1", "]", ",", "np", ".", "array", "(", "residuals", ")", "[", "keep_1", "]", ",", "label", "=", "name", ",", "marker", "=", "marker", ",", "color", "=", "color", ")", "ax", ".", "loglog", "(", "np", ".", "array", "(", "times", ")", ",", "np", ".", "array", "(", "residuals", ")", ",", "marker", "=", "marker", ",", "color", "=", "color", ",", "alpha", "=", "0.5", ")", "else", ":", "ax", ".", "semilogy", "(", "np", ".", "array", "(", "times", ")", "[", "keep_1", "]", ",", "np", ".", "array", "(", "residuals", ")", "[", "keep_1", "]", ",", "label", "=", "name", ",", "marker", "=", "marker", ",", "color", "=", "color", ")", "ax", ".", "semilogy", "(", "np", ".", "array", "(", "times", ")", ",", "np", ".", "array", "(", "residuals", ")", ",", "marker", "=", "marker", ",", "color", "=", "color", ",", "alpha", "=", "0.5", ")", "if", "expect_times", "is", "not", "None", "and", "expect_residuals", "is", "not", "None", ":", "ax", ".", "loglog", "(", "expect_times", ",", "expect_residuals", ",", "'--'", ",", "marker", "=", "marker", ",", "color", "=", "color", ")", "if", "name", ":", "ax", ".", "legend", "(", "loc", "=", "legend", ")", "if", "title", ":", "ax", ".", "set_title", "(", "title", ")", "return", "rate" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
lower
Enforces lower case options and option values where appropriate
swutil/config.py
def lower(option,value): ''' Enforces lower case options and option values where appropriate ''' if type(option) is str: option=option.lower() if type(value) is str: value=value.lower() return (option,value)
def lower(option,value): ''' Enforces lower case options and option values where appropriate ''' if type(option) is str: option=option.lower() if type(value) is str: value=value.lower() return (option,value)
[ "Enforces", "lower", "case", "options", "and", "option", "values", "where", "appropriate" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/config.py#L21-L29
[ "def", "lower", "(", "option", ",", "value", ")", ":", "if", "type", "(", "option", ")", "is", "str", ":", "option", "=", "option", ".", "lower", "(", ")", "if", "type", "(", "value", ")", "is", "str", ":", "value", "=", "value", ".", "lower", "(", ")", "return", "(", "option", ",", "value", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
to_float
Converts string values to floats when appropriate
swutil/config.py
def to_float(option,value): ''' Converts string values to floats when appropriate ''' if type(value) is str: try: value=float(value) except ValueError: pass return (option,value)
def to_float(option,value): ''' Converts string values to floats when appropriate ''' if type(value) is str: try: value=float(value) except ValueError: pass return (option,value)
[ "Converts", "string", "values", "to", "floats", "when", "appropriate" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/config.py#L41-L50
[ "def", "to_float", "(", "option", ",", "value", ")", ":", "if", "type", "(", "value", ")", "is", "str", ":", "try", ":", "value", "=", "float", "(", "value", ")", "except", "ValueError", ":", "pass", "return", "(", "option", ",", "value", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
to_bool
Converts string values to booleans when appropriate
swutil/config.py
def to_bool(option,value): ''' Converts string values to booleans when appropriate ''' if type(value) is str: if value.lower() == 'true': value=True elif value.lower() == 'false': value=False return (option,value)
def to_bool(option,value): ''' Converts string values to booleans when appropriate ''' if type(value) is str: if value.lower() == 'true': value=True elif value.lower() == 'false': value=False return (option,value)
[ "Converts", "string", "values", "to", "booleans", "when", "appropriate" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/config.py#L52-L61
[ "def", "to_bool", "(", "option", ",", "value", ")", ":", "if", "type", "(", "value", ")", "is", "str", ":", "if", "value", ".", "lower", "(", ")", "==", "'true'", ":", "value", "=", "True", "elif", "value", ".", "lower", "(", ")", "==", "'false'", ":", "value", "=", "False", "return", "(", "option", ",", "value", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
Config.fork
Create fork and store it in current instance
swutil/config.py
def fork(self,name): ''' Create fork and store it in current instance ''' fork=deepcopy(self) self[name]=fork return fork
def fork(self,name): ''' Create fork and store it in current instance ''' fork=deepcopy(self) self[name]=fork return fork
[ "Create", "fork", "and", "store", "it", "in", "current", "instance" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/config.py#L136-L142
[ "def", "fork", "(", "self", ",", "name", ")", ":", "fork", "=", "deepcopy", "(", "self", ")", "self", "[", "name", "]", "=", "fork", "return", "fork" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
smart_range
smart_range(1,3,9)==[1,3,5,7,9]
swutil/aux.py
def smart_range(*args): ''' smart_range(1,3,9)==[1,3,5,7,9] ''' if len(args)==1:#String string_input = True string = args[0].replace(' ','') original_args=string.split(',') args = [] for arg in original_args: try: args.append(ast.literal_eval(arg)) except (ValueError,SyntaxError): try:# Maybe an arithmetic expression? args.append(eval(arg,{'__builtins__':{}})) except (NameError,SyntaxError):#Input was actually meant to be a string, e.g. smart_range('a,...,z'), or input was interval type, e.g. smart_range('[1,3]/10') args.append(arg) else: string_input = False arg_start = args[0] if len(args)>2: arg_step = args[1] if len(args)>3: raise ValueError('At most 3 arguments: start, step, stop') else: arg_step = None arg_end = args[-1] if String.valid(arg_start) and len(arg_start)==1:#Character range_type = 'char' elif all(Integer.valid(arg) for arg in args): range_type = 'integer' else: if string_input and original_args[0][0] in ['(','[']: range_type = 'linspace' else: range_type = 'float' if range_type == 'char': start = ord(arg_start) step = (ord(arg_step)- start) if arg_step else 1 end = ord(arg_end) out = [chr(i) for i in range(start,end+step,step)] if np.sign(step)*(ord(out[-1])-end)>0: del out[-1] return out elif range_type == 'integer': if string_input: if len(args)==2 and all('**' in oa for oa in original_args):#Attempt geometric progresesion bases,exponents = zip(*[oa.split('**') for oa in original_args]) if len(set(bases))==1:#Keep attempting geometric progression return [int(bases[0])**exponent for exponent in smart_range(','.join(exponents))] start = arg_start step = (arg_step - arg_start) if arg_step is not None else 1 end = arg_end out = list(range(start,end+step,step)) if np.sign(step)*(out[-1]-end)>0: del out[-1] return out elif range_type == 'float': if len(args)==2 and all('**' in oa for oa in original_args):#Attempt geometric progresesion bases,exponents = zip(*[oa.split('**') for oa in original_args]) if len(set(bases))==1:#Keep attempting geometric progression return [float(bases[0])**exponent for exponent in smart_range(','.join(exponents)) ] if len(args) == 2: raise ValueError() start = arg_start step = arg_step - arg_start end = arg_end out = list(np.arange(start,end+1e-12*step,step)) return out elif range_type == 'linspace': lopen,start = (original_args[0][0]=='('),float(original_args[0][1:]) end,N = original_args[1].split('/') end,ropen = float(end[:-1]),(end[-1]==')') N = ast.literal_eval(N)+lopen +ropen points = np.linspace(start,end,num=N) return points[lopen:len(points)-ropen]
def smart_range(*args): ''' smart_range(1,3,9)==[1,3,5,7,9] ''' if len(args)==1:#String string_input = True string = args[0].replace(' ','') original_args=string.split(',') args = [] for arg in original_args: try: args.append(ast.literal_eval(arg)) except (ValueError,SyntaxError): try:# Maybe an arithmetic expression? args.append(eval(arg,{'__builtins__':{}})) except (NameError,SyntaxError):#Input was actually meant to be a string, e.g. smart_range('a,...,z'), or input was interval type, e.g. smart_range('[1,3]/10') args.append(arg) else: string_input = False arg_start = args[0] if len(args)>2: arg_step = args[1] if len(args)>3: raise ValueError('At most 3 arguments: start, step, stop') else: arg_step = None arg_end = args[-1] if String.valid(arg_start) and len(arg_start)==1:#Character range_type = 'char' elif all(Integer.valid(arg) for arg in args): range_type = 'integer' else: if string_input and original_args[0][0] in ['(','[']: range_type = 'linspace' else: range_type = 'float' if range_type == 'char': start = ord(arg_start) step = (ord(arg_step)- start) if arg_step else 1 end = ord(arg_end) out = [chr(i) for i in range(start,end+step,step)] if np.sign(step)*(ord(out[-1])-end)>0: del out[-1] return out elif range_type == 'integer': if string_input: if len(args)==2 and all('**' in oa for oa in original_args):#Attempt geometric progresesion bases,exponents = zip(*[oa.split('**') for oa in original_args]) if len(set(bases))==1:#Keep attempting geometric progression return [int(bases[0])**exponent for exponent in smart_range(','.join(exponents))] start = arg_start step = (arg_step - arg_start) if arg_step is not None else 1 end = arg_end out = list(range(start,end+step,step)) if np.sign(step)*(out[-1]-end)>0: del out[-1] return out elif range_type == 'float': if len(args)==2 and all('**' in oa for oa in original_args):#Attempt geometric progresesion bases,exponents = zip(*[oa.split('**') for oa in original_args]) if len(set(bases))==1:#Keep attempting geometric progression return [float(bases[0])**exponent for exponent in smart_range(','.join(exponents)) ] if len(args) == 2: raise ValueError() start = arg_start step = arg_step - arg_start end = arg_end out = list(np.arange(start,end+1e-12*step,step)) return out elif range_type == 'linspace': lopen,start = (original_args[0][0]=='('),float(original_args[0][1:]) end,N = original_args[1].split('/') end,ropen = float(end[:-1]),(end[-1]==')') N = ast.literal_eval(N)+lopen +ropen points = np.linspace(start,end,num=N) return points[lopen:len(points)-ropen]
[ "smart_range", "(", "1", "3", "9", ")", "==", "[", "1", "3", "5", "7", "9", "]" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L21-L97
[ "def", "smart_range", "(", "*", "args", ")", ":", "if", "len", "(", "args", ")", "==", "1", ":", "#String", "string_input", "=", "True", "string", "=", "args", "[", "0", "]", ".", "replace", "(", "' '", ",", "''", ")", "original_args", "=", "string", ".", "split", "(", "','", ")", "args", "=", "[", "]", "for", "arg", "in", "original_args", ":", "try", ":", "args", ".", "append", "(", "ast", ".", "literal_eval", "(", "arg", ")", ")", "except", "(", "ValueError", ",", "SyntaxError", ")", ":", "try", ":", "# Maybe an arithmetic expression?", "args", ".", "append", "(", "eval", "(", "arg", ",", "{", "'__builtins__'", ":", "{", "}", "}", ")", ")", "except", "(", "NameError", ",", "SyntaxError", ")", ":", "#Input was actually meant to be a string, e.g. smart_range('a,...,z'), or input was interval type, e.g. smart_range('[1,3]/10')", "args", ".", "append", "(", "arg", ")", "else", ":", "string_input", "=", "False", "arg_start", "=", "args", "[", "0", "]", "if", "len", "(", "args", ")", ">", "2", ":", "arg_step", "=", "args", "[", "1", "]", "if", "len", "(", "args", ")", ">", "3", ":", "raise", "ValueError", "(", "'At most 3 arguments: start, step, stop'", ")", "else", ":", "arg_step", "=", "None", "arg_end", "=", "args", "[", "-", "1", "]", "if", "String", ".", "valid", "(", "arg_start", ")", "and", "len", "(", "arg_start", ")", "==", "1", ":", "#Character", "range_type", "=", "'char'", "elif", "all", "(", "Integer", ".", "valid", "(", "arg", ")", "for", "arg", "in", "args", ")", ":", "range_type", "=", "'integer'", "else", ":", "if", "string_input", "and", "original_args", "[", "0", "]", "[", "0", "]", "in", "[", "'('", ",", "'['", "]", ":", "range_type", "=", "'linspace'", "else", ":", "range_type", "=", "'float'", "if", "range_type", "==", "'char'", ":", "start", "=", "ord", "(", "arg_start", ")", "step", "=", "(", "ord", "(", "arg_step", ")", "-", "start", ")", "if", "arg_step", "else", "1", "end", "=", "ord", "(", "arg_end", ")", "out", "=", "[", "chr", "(", "i", ")", "for", "i", "in", "range", "(", "start", ",", "end", "+", "step", ",", "step", ")", "]", "if", "np", ".", "sign", "(", "step", ")", "*", "(", "ord", "(", "out", "[", "-", "1", "]", ")", "-", "end", ")", ">", "0", ":", "del", "out", "[", "-", "1", "]", "return", "out", "elif", "range_type", "==", "'integer'", ":", "if", "string_input", ":", "if", "len", "(", "args", ")", "==", "2", "and", "all", "(", "'**'", "in", "oa", "for", "oa", "in", "original_args", ")", ":", "#Attempt geometric progresesion", "bases", ",", "exponents", "=", "zip", "(", "*", "[", "oa", ".", "split", "(", "'**'", ")", "for", "oa", "in", "original_args", "]", ")", "if", "len", "(", "set", "(", "bases", ")", ")", "==", "1", ":", "#Keep attempting geometric progression", "return", "[", "int", "(", "bases", "[", "0", "]", ")", "**", "exponent", "for", "exponent", "in", "smart_range", "(", "','", ".", "join", "(", "exponents", ")", ")", "]", "start", "=", "arg_start", "step", "=", "(", "arg_step", "-", "arg_start", ")", "if", "arg_step", "is", "not", "None", "else", "1", "end", "=", "arg_end", "out", "=", "list", "(", "range", "(", "start", ",", "end", "+", "step", ",", "step", ")", ")", "if", "np", ".", "sign", "(", "step", ")", "*", "(", "out", "[", "-", "1", "]", "-", "end", ")", ">", "0", ":", "del", "out", "[", "-", "1", "]", "return", "out", "elif", "range_type", "==", "'float'", ":", "if", "len", "(", "args", ")", "==", "2", "and", "all", "(", "'**'", "in", "oa", "for", "oa", "in", "original_args", ")", ":", "#Attempt geometric progresesion", "bases", ",", "exponents", "=", "zip", "(", "*", "[", "oa", ".", "split", "(", "'**'", ")", "for", "oa", "in", "original_args", "]", ")", "if", "len", "(", "set", "(", "bases", ")", ")", "==", "1", ":", "#Keep attempting geometric progression", "return", "[", "float", "(", "bases", "[", "0", "]", ")", "**", "exponent", "for", "exponent", "in", "smart_range", "(", "','", ".", "join", "(", "exponents", ")", ")", "]", "if", "len", "(", "args", ")", "==", "2", ":", "raise", "ValueError", "(", ")", "start", "=", "arg_start", "step", "=", "arg_step", "-", "arg_start", "end", "=", "arg_end", "out", "=", "list", "(", "np", ".", "arange", "(", "start", ",", "end", "+", "1e-12", "*", "step", ",", "step", ")", ")", "return", "out", "elif", "range_type", "==", "'linspace'", ":", "lopen", ",", "start", "=", "(", "original_args", "[", "0", "]", "[", "0", "]", "==", "'('", ")", ",", "float", "(", "original_args", "[", "0", "]", "[", "1", ":", "]", ")", "end", ",", "N", "=", "original_args", "[", "1", "]", ".", "split", "(", "'/'", ")", "end", ",", "ropen", "=", "float", "(", "end", "[", ":", "-", "1", "]", ")", ",", "(", "end", "[", "-", "1", "]", "==", "')'", ")", "N", "=", "ast", ".", "literal_eval", "(", "N", ")", "+", "lopen", "+", "ropen", "points", "=", "np", ".", "linspace", "(", "start", ",", "end", ",", "num", "=", "N", ")", "return", "points", "[", "lopen", ":", "len", "(", "points", ")", "-", "ropen", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
ld_to_dl
Convert list of dictionaries to dictionary of lists
swutil/aux.py
def ld_to_dl(ld): ''' Convert list of dictionaries to dictionary of lists ''' if ld: keys = list(ld[0]) dl = {key:[d[key] for d in ld] for key in keys} return dl else: return {}
def ld_to_dl(ld): ''' Convert list of dictionaries to dictionary of lists ''' if ld: keys = list(ld[0]) dl = {key:[d[key] for d in ld] for key in keys} return dl else: return {}
[ "Convert", "list", "of", "dictionaries", "to", "dictionary", "of", "lists" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L99-L108
[ "def", "ld_to_dl", "(", "ld", ")", ":", "if", "ld", ":", "keys", "=", "list", "(", "ld", "[", "0", "]", ")", "dl", "=", "{", "key", ":", "[", "d", "[", "key", "]", "for", "d", "in", "ld", "]", "for", "key", "in", "keys", "}", "return", "dl", "else", ":", "return", "{", "}" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
chain
Concatenate functions
swutil/aux.py
def chain(*fs): ''' Concatenate functions ''' def chained(x): for f in reversed(fs): if f: x=f(x) return x return chained
def chain(*fs): ''' Concatenate functions ''' def chained(x): for f in reversed(fs): if f: x=f(x) return x return chained
[ "Concatenate", "functions" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L116-L125
[ "def", "chain", "(", "*", "fs", ")", ":", "def", "chained", "(", "x", ")", ":", "for", "f", "in", "reversed", "(", "fs", ")", ":", "if", "f", ":", "x", "=", "f", "(", "x", ")", "return", "x", "return", "chained" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
split_list
Subdivide list into N lists
swutil/aux.py
def split_list(l,N): ''' Subdivide list into N lists ''' npmode = isinstance(l,np.ndarray) if npmode: l=list(l) g=np.concatenate((np.array([0]),np.cumsum(split_integer(len(l),length=N)))) s=[l[g[i]:g[i+1]] for i in range(N)] if npmode: s=[np.array(sl) for sl in s] return s
def split_list(l,N): ''' Subdivide list into N lists ''' npmode = isinstance(l,np.ndarray) if npmode: l=list(l) g=np.concatenate((np.array([0]),np.cumsum(split_integer(len(l),length=N)))) s=[l[g[i]:g[i+1]] for i in range(N)] if npmode: s=[np.array(sl) for sl in s] return s
[ "Subdivide", "list", "into", "N", "lists" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L158-L169
[ "def", "split_list", "(", "l", ",", "N", ")", ":", "npmode", "=", "isinstance", "(", "l", ",", "np", ".", "ndarray", ")", "if", "npmode", ":", "l", "=", "list", "(", "l", ")", "g", "=", "np", ".", "concatenate", "(", "(", "np", ".", "array", "(", "[", "0", "]", ")", ",", "np", ".", "cumsum", "(", "split_integer", "(", "len", "(", "l", ")", ",", "length", "=", "N", ")", ")", ")", ")", "s", "=", "[", "l", "[", "g", "[", "i", "]", ":", "g", "[", "i", "+", "1", "]", "]", "for", "i", "in", "range", "(", "N", ")", "]", "if", "npmode", ":", "s", "=", "[", "np", ".", "array", "(", "sl", ")", "for", "sl", "in", "s", "]", "return", "s" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
random_word
Creates random lowercase words from dictionary or by alternating vowels and consonants The second method chooses from 85**length words. The dictionary method chooses from 3000--12000 words for 3<=length<=12 (though this of course depends on the available dictionary) :param length: word length :param dictionary: Try reading from dictionary, else fall back to artificial words
swutil/aux.py
def random_word(length,dictionary = False):#may return offensive words if dictionary = True ''' Creates random lowercase words from dictionary or by alternating vowels and consonants The second method chooses from 85**length words. The dictionary method chooses from 3000--12000 words for 3<=length<=12 (though this of course depends on the available dictionary) :param length: word length :param dictionary: Try reading from dictionary, else fall back to artificial words ''' if dictionary: try: with open('/usr/share/dict/words') as fp: words = [word.lower()[:-1] for word in fp.readlines() if re.match('[A-Za-z0-9]{}$'.format('{'+str(length)+'}'),word)] return random.choice(words) except FileNotFoundError: pass vowels = list('aeiou') consonants = list('bcdfghklmnprstvwz') pairs = [(random.choice(consonants),random.choice(vowels)) for _ in range(length//2+1)] return ''.join([l for p in pairs for l in p])[:length]
def random_word(length,dictionary = False):#may return offensive words if dictionary = True ''' Creates random lowercase words from dictionary or by alternating vowels and consonants The second method chooses from 85**length words. The dictionary method chooses from 3000--12000 words for 3<=length<=12 (though this of course depends on the available dictionary) :param length: word length :param dictionary: Try reading from dictionary, else fall back to artificial words ''' if dictionary: try: with open('/usr/share/dict/words') as fp: words = [word.lower()[:-1] for word in fp.readlines() if re.match('[A-Za-z0-9]{}$'.format('{'+str(length)+'}'),word)] return random.choice(words) except FileNotFoundError: pass vowels = list('aeiou') consonants = list('bcdfghklmnprstvwz') pairs = [(random.choice(consonants),random.choice(vowels)) for _ in range(length//2+1)] return ''.join([l for p in pairs for l in p])[:length]
[ "Creates", "random", "lowercase", "words", "from", "dictionary", "or", "by", "alternating", "vowels", "and", "consonants", "The", "second", "method", "chooses", "from", "85", "**", "length", "words", ".", "The", "dictionary", "method", "chooses", "from", "3000", "--", "12000", "words", "for", "3<", "=", "length<", "=", "12", "(", "though", "this", "of", "course", "depends", "on", "the", "available", "dictionary", ")", ":", "param", "length", ":", "word", "length", ":", "param", "dictionary", ":", "Try", "reading", "from", "dictionary", "else", "fall", "back", "to", "artificial", "words" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L177-L198
[ "def", "random_word", "(", "length", ",", "dictionary", "=", "False", ")", ":", "#may return offensive words if dictionary = True", "if", "dictionary", ":", "try", ":", "with", "open", "(", "'/usr/share/dict/words'", ")", "as", "fp", ":", "words", "=", "[", "word", ".", "lower", "(", ")", "[", ":", "-", "1", "]", "for", "word", "in", "fp", ".", "readlines", "(", ")", "if", "re", ".", "match", "(", "'[A-Za-z0-9]{}$'", ".", "format", "(", "'{'", "+", "str", "(", "length", ")", "+", "'}'", ")", ",", "word", ")", "]", "return", "random", ".", "choice", "(", "words", ")", "except", "FileNotFoundError", ":", "pass", "vowels", "=", "list", "(", "'aeiou'", ")", "consonants", "=", "list", "(", "'bcdfghklmnprstvwz'", ")", "pairs", "=", "[", "(", "random", ".", "choice", "(", "consonants", ")", ",", "random", ".", "choice", "(", "vowels", ")", ")", "for", "_", "in", "range", "(", "length", "//", "2", "+", "1", ")", "]", "return", "''", ".", "join", "(", "[", "l", "for", "p", "in", "pairs", "for", "l", "in", "p", "]", ")", "[", ":", "length", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
string_from_seconds
Converts seconds into elapsed time string of form (X days(s)?,)? HH:MM:SS.YY
swutil/aux.py
def string_from_seconds(seconds): ''' Converts seconds into elapsed time string of form (X days(s)?,)? HH:MM:SS.YY ''' td = str(timedelta(seconds = seconds)) parts = td.split('.') if len(parts) == 1: td = td+'.00' elif len(parts) == 2: td = '.'.join([parts[0],parts[1][:2]]) return td
def string_from_seconds(seconds): ''' Converts seconds into elapsed time string of form (X days(s)?,)? HH:MM:SS.YY ''' td = str(timedelta(seconds = seconds)) parts = td.split('.') if len(parts) == 1: td = td+'.00' elif len(parts) == 2: td = '.'.join([parts[0],parts[1][:2]]) return td
[ "Converts", "seconds", "into", "elapsed", "time", "string", "of", "form", "(", "X", "days", "(", "s", ")", "?", ")", "?", "HH", ":", "MM", ":", "SS", ".", "YY" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L200-L213
[ "def", "string_from_seconds", "(", "seconds", ")", ":", "td", "=", "str", "(", "timedelta", "(", "seconds", "=", "seconds", ")", ")", "parts", "=", "td", ".", "split", "(", "'.'", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "td", "=", "td", "+", "'.00'", "elif", "len", "(", "parts", ")", "==", "2", ":", "td", "=", "'.'", ".", "join", "(", "[", "parts", "[", "0", "]", ",", "parts", "[", "1", "]", "[", ":", "2", "]", "]", ")", "return", "td" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
input_with_prefill
https://stackoverflow.com/questions/8505163/is-it-possible-to-prefill-a-input-in-python-3s-command-line-interface
swutil/aux.py
def input_with_prefill(prompt, text): ''' https://stackoverflow.com/questions/8505163/is-it-possible-to-prefill-a-input-in-python-3s-command-line-interface ''' def hook(): readline.insert_text(text) readline.redisplay() try: readline.set_pre_input_hook(hook) except Exception: pass result = input(prompt) try: readline.set_pre_input_hook() except Exception: pass return result
def input_with_prefill(prompt, text): ''' https://stackoverflow.com/questions/8505163/is-it-possible-to-prefill-a-input-in-python-3s-command-line-interface ''' def hook(): readline.insert_text(text) readline.redisplay() try: readline.set_pre_input_hook(hook) except Exception: pass result = input(prompt) try: readline.set_pre_input_hook() except Exception: pass return result
[ "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "8505163", "/", "is", "-", "it", "-", "possible", "-", "to", "-", "prefill", "-", "a", "-", "input", "-", "in", "-", "python", "-", "3s", "-", "command", "-", "line", "-", "interface" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/aux.py#L215-L231
[ "def", "input_with_prefill", "(", "prompt", ",", "text", ")", ":", "def", "hook", "(", ")", ":", "readline", ".", "insert_text", "(", "text", ")", "readline", ".", "redisplay", "(", ")", "try", ":", "readline", ".", "set_pre_input_hook", "(", "hook", ")", "except", "Exception", ":", "pass", "result", "=", "input", "(", "prompt", ")", "try", ":", "readline", ".", "set_pre_input_hook", "(", ")", "except", "Exception", ":", "pass", "return", "result" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
EasyHPC
:param n_tasks: How many tasks does the decorated function handle? :param n_results: If the decorated function handles many tasks at once, are the results reduced (n_results = 'one') or not (as many results as tasks)? :param reduce: Function that reduces multiple outputs to a single output :param splitjob: Function that converts an input (to the decorated function) that represents one large job to two smaller jobs NOTE: don't turn this into a class, you'll run into strange pickling errors
swutil/hpc.py
def EasyHPC(backend:In('MP', 'MPI')|Function='MP', n_tasks:In('implicitly many', 'many', 'one', 'count')='one',#Count is special case of implicitly many where it is already known how to split jobs n_results:In('many', 'one')='one', aux_output:Bool=True, # Parellelize only first entry of n_results is tuple reduce:Function=None, split_job=NotPassed, parallel = True,#If false, use the wrapper functionality of EasyHPC but don't actually use multiprocessing method = None, pool = None ): ''' :param n_tasks: How many tasks does the decorated function handle? :param n_results: If the decorated function handles many tasks at once, are the results reduced (n_results = 'one') or not (as many results as tasks)? :param reduce: Function that reduces multiple outputs to a single output :param splitjob: Function that converts an input (to the decorated function) that represents one large job to two smaller jobs NOTE: don't turn this into a class, you'll run into strange pickling errors ''' self = argparse.Namespace() direct_call = (~String&Function).valid(backend) if direct_call: f = backend backend = 'MP' if backend == 'MPI': self.processor = _MPI_processor self.finalizer = _MPI_finalizer if backend == 'MP': self.processor = _MP_processor self.finalizer = None self.info = argparse.Namespace() self.info.n_tasks = n_tasks self.info.n_results = n_results self.info.parallel = parallel self.info.reduce = reduce self.info.wrap_MPI = False self.info.aux_output = aux_output self.info.method = method self.info.pool = pool or Pool() self.info.split_job = split_job if self.info.n_tasks == 'implicitly many': if self.info.n_results == 'many': raise ValueError('Do not know how to handle functions that handle implicitly many tasks and return multiple results') if NotPassed(self.info.split_job): raise ValueError('Functions handling implicitly many tasks must specify how to split a job using `split_job`') if direct_call: def _lam(*args,**kwargs): return _MultiProcessorWrapper_call(args,kwargs,f,self.processor,self.finalizer,self.info) return _lam return lambda f: _easy_hpc_call(f,self)
def EasyHPC(backend:In('MP', 'MPI')|Function='MP', n_tasks:In('implicitly many', 'many', 'one', 'count')='one',#Count is special case of implicitly many where it is already known how to split jobs n_results:In('many', 'one')='one', aux_output:Bool=True, # Parellelize only first entry of n_results is tuple reduce:Function=None, split_job=NotPassed, parallel = True,#If false, use the wrapper functionality of EasyHPC but don't actually use multiprocessing method = None, pool = None ): ''' :param n_tasks: How many tasks does the decorated function handle? :param n_results: If the decorated function handles many tasks at once, are the results reduced (n_results = 'one') or not (as many results as tasks)? :param reduce: Function that reduces multiple outputs to a single output :param splitjob: Function that converts an input (to the decorated function) that represents one large job to two smaller jobs NOTE: don't turn this into a class, you'll run into strange pickling errors ''' self = argparse.Namespace() direct_call = (~String&Function).valid(backend) if direct_call: f = backend backend = 'MP' if backend == 'MPI': self.processor = _MPI_processor self.finalizer = _MPI_finalizer if backend == 'MP': self.processor = _MP_processor self.finalizer = None self.info = argparse.Namespace() self.info.n_tasks = n_tasks self.info.n_results = n_results self.info.parallel = parallel self.info.reduce = reduce self.info.wrap_MPI = False self.info.aux_output = aux_output self.info.method = method self.info.pool = pool or Pool() self.info.split_job = split_job if self.info.n_tasks == 'implicitly many': if self.info.n_results == 'many': raise ValueError('Do not know how to handle functions that handle implicitly many tasks and return multiple results') if NotPassed(self.info.split_job): raise ValueError('Functions handling implicitly many tasks must specify how to split a job using `split_job`') if direct_call: def _lam(*args,**kwargs): return _MultiProcessorWrapper_call(args,kwargs,f,self.processor,self.finalizer,self.info) return _lam return lambda f: _easy_hpc_call(f,self)
[ ":", "param", "n_tasks", ":", "How", "many", "tasks", "does", "the", "decorated", "function", "handle?", ":", "param", "n_results", ":", "If", "the", "decorated", "function", "handles", "many", "tasks", "at", "once", "are", "the", "results", "reduced", "(", "n_results", "=", "one", ")", "or", "not", "(", "as", "many", "results", "as", "tasks", ")", "?", ":", "param", "reduce", ":", "Function", "that", "reduces", "multiple", "outputs", "to", "a", "single", "output", ":", "param", "splitjob", ":", "Function", "that", "converts", "an", "input", "(", "to", "the", "decorated", "function", ")", "that", "represents", "one", "large", "job", "to", "two", "smaller", "jobs" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/hpc.py#L33-L81
[ "def", "EasyHPC", "(", "backend", ":", "In", "(", "'MP'", ",", "'MPI'", ")", "|", "Function", "=", "'MP'", ",", "n_tasks", ":", "In", "(", "'implicitly many'", ",", "'many'", ",", "'one'", ",", "'count'", ")", "=", "'one'", ",", "#Count is special case of implicitly many where it is already known how to split jobs ", "n_results", ":", "In", "(", "'many'", ",", "'one'", ")", "=", "'one'", ",", "aux_output", ":", "Bool", "=", "True", ",", "# Parellelize only first entry of n_results is tuple", "reduce", ":", "Function", "=", "None", ",", "split_job", "=", "NotPassed", ",", "parallel", "=", "True", ",", "#If false, use the wrapper functionality of EasyHPC but don't actually use multiprocessing", "method", "=", "None", ",", "pool", "=", "None", ")", ":", "self", "=", "argparse", ".", "Namespace", "(", ")", "direct_call", "=", "(", "~", "String", "&", "Function", ")", ".", "valid", "(", "backend", ")", "if", "direct_call", ":", "f", "=", "backend", "backend", "=", "'MP'", "if", "backend", "==", "'MPI'", ":", "self", ".", "processor", "=", "_MPI_processor", "self", ".", "finalizer", "=", "_MPI_finalizer", "if", "backend", "==", "'MP'", ":", "self", ".", "processor", "=", "_MP_processor", "self", ".", "finalizer", "=", "None", "self", ".", "info", "=", "argparse", ".", "Namespace", "(", ")", "self", ".", "info", ".", "n_tasks", "=", "n_tasks", "self", ".", "info", ".", "n_results", "=", "n_results", "self", ".", "info", ".", "parallel", "=", "parallel", "self", ".", "info", ".", "reduce", "=", "reduce", "self", ".", "info", ".", "wrap_MPI", "=", "False", "self", ".", "info", ".", "aux_output", "=", "aux_output", "self", ".", "info", ".", "method", "=", "method", "self", ".", "info", ".", "pool", "=", "pool", "or", "Pool", "(", ")", "self", ".", "info", ".", "split_job", "=", "split_job", "if", "self", ".", "info", ".", "n_tasks", "==", "'implicitly many'", ":", "if", "self", ".", "info", ".", "n_results", "==", "'many'", ":", "raise", "ValueError", "(", "'Do not know how to handle functions that handle implicitly many tasks and return multiple results'", ")", "if", "NotPassed", "(", "self", ".", "info", ".", "split_job", ")", ":", "raise", "ValueError", "(", "'Functions handling implicitly many tasks must specify how to split a job using `split_job`'", ")", "if", "direct_call", ":", "def", "_lam", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_MultiProcessorWrapper_call", "(", "args", ",", "kwargs", ",", "f", ",", "self", ".", "processor", ",", "self", ".", "finalizer", ",", "self", ".", "info", ")", "return", "_lam", "return", "lambda", "f", ":", "_easy_hpc_call", "(", "f", ",", "self", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
path_from_keywords
turns keyword pairs into path or filename if `into=='path'`, then keywords are separted by underscores, else keywords are used to create a directory hierarchy
swutil/files.py
def path_from_keywords(keywords,into='path'): ''' turns keyword pairs into path or filename if `into=='path'`, then keywords are separted by underscores, else keywords are used to create a directory hierarchy ''' subdirs = [] def prepare_string(s): s = str(s) s = re.sub('[][{},*"'+f"'{os.sep}]",'_',s)#replace characters that make bash life difficult by underscore if into=='file': s = s.replace('_', ' ')#Remove underscore because they will be used as separator if ' ' in s: s = s.title() s = s.replace(' ','') return s if isinstance(keywords,set): keywords_list = sorted(keywords) for property in keywords_list: subdirs.append(prepare_string(property)) else: keywords_list = sorted(keywords.items()) for property,value in keywords_list: # @reservedassignment if Bool.valid(value): subdirs.append(('' if value else ('not_' if into=='path' else 'not'))+prepare_string(property)) #elif String.valid(value): # subdirs.append(prepare_string(value)) elif (Float|Integer).valid(value): subdirs.append('{}{}'.format(prepare_string(property),prepare_string(value))) else: subdirs.append('{}{}{}'.format(prepare_string(property),'_' if into == 'path' else '',prepare_string(value))) if into == 'path': out = os.path.join(*subdirs) else: out = '_'.join(subdirs) return out
def path_from_keywords(keywords,into='path'): ''' turns keyword pairs into path or filename if `into=='path'`, then keywords are separted by underscores, else keywords are used to create a directory hierarchy ''' subdirs = [] def prepare_string(s): s = str(s) s = re.sub('[][{},*"'+f"'{os.sep}]",'_',s)#replace characters that make bash life difficult by underscore if into=='file': s = s.replace('_', ' ')#Remove underscore because they will be used as separator if ' ' in s: s = s.title() s = s.replace(' ','') return s if isinstance(keywords,set): keywords_list = sorted(keywords) for property in keywords_list: subdirs.append(prepare_string(property)) else: keywords_list = sorted(keywords.items()) for property,value in keywords_list: # @reservedassignment if Bool.valid(value): subdirs.append(('' if value else ('not_' if into=='path' else 'not'))+prepare_string(property)) #elif String.valid(value): # subdirs.append(prepare_string(value)) elif (Float|Integer).valid(value): subdirs.append('{}{}'.format(prepare_string(property),prepare_string(value))) else: subdirs.append('{}{}{}'.format(prepare_string(property),'_' if into == 'path' else '',prepare_string(value))) if into == 'path': out = os.path.join(*subdirs) else: out = '_'.join(subdirs) return out
[ "turns", "keyword", "pairs", "into", "path", "or", "filename", "if", "into", "==", "path", "then", "keywords", "are", "separted", "by", "underscores", "else", "keywords", "are", "used", "to", "create", "a", "directory", "hierarchy" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/files.py#L6-L41
[ "def", "path_from_keywords", "(", "keywords", ",", "into", "=", "'path'", ")", ":", "subdirs", "=", "[", "]", "def", "prepare_string", "(", "s", ")", ":", "s", "=", "str", "(", "s", ")", "s", "=", "re", ".", "sub", "(", "'[][{},*\"'", "+", "f\"'{os.sep}]\"", ",", "'_'", ",", "s", ")", "#replace characters that make bash life difficult by underscore ", "if", "into", "==", "'file'", ":", "s", "=", "s", ".", "replace", "(", "'_'", ",", "' '", ")", "#Remove underscore because they will be used as separator", "if", "' '", "in", "s", ":", "s", "=", "s", ".", "title", "(", ")", "s", "=", "s", ".", "replace", "(", "' '", ",", "''", ")", "return", "s", "if", "isinstance", "(", "keywords", ",", "set", ")", ":", "keywords_list", "=", "sorted", "(", "keywords", ")", "for", "property", "in", "keywords_list", ":", "subdirs", ".", "append", "(", "prepare_string", "(", "property", ")", ")", "else", ":", "keywords_list", "=", "sorted", "(", "keywords", ".", "items", "(", ")", ")", "for", "property", ",", "value", "in", "keywords_list", ":", "# @reservedassignment", "if", "Bool", ".", "valid", "(", "value", ")", ":", "subdirs", ".", "append", "(", "(", "''", "if", "value", "else", "(", "'not_'", "if", "into", "==", "'path'", "else", "'not'", ")", ")", "+", "prepare_string", "(", "property", ")", ")", "#elif String.valid(value):", "# subdirs.append(prepare_string(value))", "elif", "(", "Float", "|", "Integer", ")", ".", "valid", "(", "value", ")", ":", "subdirs", ".", "append", "(", "'{}{}'", ".", "format", "(", "prepare_string", "(", "property", ")", ",", "prepare_string", "(", "value", ")", ")", ")", "else", ":", "subdirs", ".", "append", "(", "'{}{}{}'", ".", "format", "(", "prepare_string", "(", "property", ")", ",", "'_'", "if", "into", "==", "'path'", "else", "''", ",", "prepare_string", "(", "value", ")", ")", ")", "if", "into", "==", "'path'", ":", "out", "=", "os", ".", "path", ".", "join", "(", "*", "subdirs", ")", "else", ":", "out", "=", "'_'", ".", "join", "(", "subdirs", ")", "return", "out" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
find_files
https://stackoverflow.com/questions/1724693/find-a-file-in-python WARNING: pattern is by default matched to entire path not to file names
swutil/files.py
def find_files(pattern, path=None,match_name=False): ''' https://stackoverflow.com/questions/1724693/find-a-file-in-python WARNING: pattern is by default matched to entire path not to file names ''' if not path: path = os.getcwd() result = [] for root, __, files in os.walk(path): for name in files: if fnmatch.fnmatch(name if match_name else os.path.join(root,name),pattern): result.append(os.path.join(root, name)) return result
def find_files(pattern, path=None,match_name=False): ''' https://stackoverflow.com/questions/1724693/find-a-file-in-python WARNING: pattern is by default matched to entire path not to file names ''' if not path: path = os.getcwd() result = [] for root, __, files in os.walk(path): for name in files: if fnmatch.fnmatch(name if match_name else os.path.join(root,name),pattern): result.append(os.path.join(root, name)) return result
[ "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "1724693", "/", "find", "-", "a", "-", "file", "-", "in", "-", "python" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/files.py#L55-L68
[ "def", "find_files", "(", "pattern", ",", "path", "=", "None", ",", "match_name", "=", "False", ")", ":", "if", "not", "path", ":", "path", "=", "os", ".", "getcwd", "(", ")", "result", "=", "[", "]", "for", "root", ",", "__", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "name", "in", "files", ":", "if", "fnmatch", ".", "fnmatch", "(", "name", "if", "match_name", "else", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ",", "pattern", ")", ":", "result", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")", "return", "result" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
find_directories
WARNING: pattern is matched to entire path, not directory names, unless match_name = True
swutil/files.py
def find_directories(pattern, path=None,match_name=False): ''' WARNING: pattern is matched to entire path, not directory names, unless match_name = True ''' if not path: path = os.getcwd() result = [] for root, __, __ in os.walk(path): match_against = os.path.basename(root) if match_name else root try: does_match = pattern.match(match_against) except AttributeError: does_match = fnmatch.fnmatch(match_against,pattern) if does_match: result.append(root) return result
def find_directories(pattern, path=None,match_name=False): ''' WARNING: pattern is matched to entire path, not directory names, unless match_name = True ''' if not path: path = os.getcwd() result = [] for root, __, __ in os.walk(path): match_against = os.path.basename(root) if match_name else root try: does_match = pattern.match(match_against) except AttributeError: does_match = fnmatch.fnmatch(match_against,pattern) if does_match: result.append(root) return result
[ "WARNING", ":", "pattern", "is", "matched", "to", "entire", "path", "not", "directory", "names", "unless", "match_name", "=", "True" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/files.py#L70-L86
[ "def", "find_directories", "(", "pattern", ",", "path", "=", "None", ",", "match_name", "=", "False", ")", ":", "if", "not", "path", ":", "path", "=", "os", ".", "getcwd", "(", ")", "result", "=", "[", "]", "for", "root", ",", "__", ",", "__", "in", "os", ".", "walk", "(", "path", ")", ":", "match_against", "=", "os", ".", "path", ".", "basename", "(", "root", ")", "if", "match_name", "else", "root", "try", ":", "does_match", "=", "pattern", ".", "match", "(", "match_against", ")", "except", "AttributeError", ":", "does_match", "=", "fnmatch", ".", "fnmatch", "(", "match_against", ",", "pattern", ")", "if", "does_match", ":", "result", ".", "append", "(", "root", ")", "return", "result" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
zip_dir
https://stackoverflow.com/questions/1855095/how-to-create-a-zip-archive-of-a-directory
swutil/files.py
def zip_dir(zip_name, source_dir,rename_source_dir=False): ''' https://stackoverflow.com/questions/1855095/how-to-create-a-zip-archive-of-a-directory ''' src_path = Path(source_dir).expanduser().resolve() with ZipFile(zip_name, 'w', ZIP_DEFLATED) as zf: for file in src_path.rglob('*'): path_in_zip = str(file.relative_to(src_path.parent)) if rename_source_dir != False: _,tail = path_in_zip.split(os.sep,1) path_in_zip=os.sep.join([rename_source_dir,tail]) zf.write(str(file.resolve()), path_in_zip)
def zip_dir(zip_name, source_dir,rename_source_dir=False): ''' https://stackoverflow.com/questions/1855095/how-to-create-a-zip-archive-of-a-directory ''' src_path = Path(source_dir).expanduser().resolve() with ZipFile(zip_name, 'w', ZIP_DEFLATED) as zf: for file in src_path.rglob('*'): path_in_zip = str(file.relative_to(src_path.parent)) if rename_source_dir != False: _,tail = path_in_zip.split(os.sep,1) path_in_zip=os.sep.join([rename_source_dir,tail]) zf.write(str(file.resolve()), path_in_zip)
[ "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "1855095", "/", "how", "-", "to", "-", "create", "-", "a", "-", "zip", "-", "archive", "-", "of", "-", "a", "-", "directory" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/files.py#L91-L102
[ "def", "zip_dir", "(", "zip_name", ",", "source_dir", ",", "rename_source_dir", "=", "False", ")", ":", "src_path", "=", "Path", "(", "source_dir", ")", ".", "expanduser", "(", ")", ".", "resolve", "(", ")", "with", "ZipFile", "(", "zip_name", ",", "'w'", ",", "ZIP_DEFLATED", ")", "as", "zf", ":", "for", "file", "in", "src_path", ".", "rglob", "(", "'*'", ")", ":", "path_in_zip", "=", "str", "(", "file", ".", "relative_to", "(", "src_path", ".", "parent", ")", ")", "if", "rename_source_dir", "!=", "False", ":", "_", ",", "tail", "=", "path_in_zip", ".", "split", "(", "os", ".", "sep", ",", "1", ")", "path_in_zip", "=", "os", ".", "sep", ".", "join", "(", "[", "rename_source_dir", ",", "tail", "]", ")", "zf", ".", "write", "(", "str", "(", "file", ".", "resolve", "(", ")", ")", ",", "path_in_zip", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
integral
Turns an array A of length N (the function values in N points) and an array dF of length N-1 (the masses of the N-1 intervals) into an array of length N (the integral \int A dF at N points, with first entry 0) :param A: Integrand (optional, default ones, length N) :param dF: Integrator (optional, default ones, length N-1) :param F: Alternative to dF (optional, length N) :param trapez: Use trapezoidal rule (else left point)
swutil/np_tools.py
def integral(A=None,dF=None,F=None,axis = 0,trapez = False,cumulative = False): ''' Turns an array A of length N (the function values in N points) and an array dF of length N-1 (the masses of the N-1 intervals) into an array of length N (the integral \int A dF at N points, with first entry 0) :param A: Integrand (optional, default ones, length N) :param dF: Integrator (optional, default ones, length N-1) :param F: Alternative to dF (optional, length N) :param trapez: Use trapezoidal rule (else left point) ''' ndim = max(v.ndim for v in (A,dF,F) if v is not None) def broadcast(x): new_shape = [1]*ndim new_shape[axis] = -1 return np.reshape(x,new_shape) if F is not None: assert(dF is None) if F.ndim<ndim: F = broadcast(F) N = F.shape[axis] dF = F.take(indices = range(1,N),axis = axis)-F.take(indices = range(N-1),axis = axis) elif dF is not None: if dF.ndim<ndim: dF = broadcast(dF) N = dF.shape[axis]+1 else: if A.ndim<ndim: A = broadcast(A) N = A.shape[axis] if A is not None: if trapez: midA = (A.take(indices = range(1,N),axis = axis)+A.take(indices = range(N-1),axis = axis))/2 else: midA = A.take(indices=range(N-1),axis=axis) if dF is not None: dY = midA*dF else: dY = midA else: dY = dF pad_shape = list(dY.shape) pad_shape[axis] = 1 pad = np.zeros(pad_shape) if cumulative: return np.concatenate((pad,np.cumsum(dY,axis = axis)),axis = axis) else: return np.sum(dY,axis = axis)
def integral(A=None,dF=None,F=None,axis = 0,trapez = False,cumulative = False): ''' Turns an array A of length N (the function values in N points) and an array dF of length N-1 (the masses of the N-1 intervals) into an array of length N (the integral \int A dF at N points, with first entry 0) :param A: Integrand (optional, default ones, length N) :param dF: Integrator (optional, default ones, length N-1) :param F: Alternative to dF (optional, length N) :param trapez: Use trapezoidal rule (else left point) ''' ndim = max(v.ndim for v in (A,dF,F) if v is not None) def broadcast(x): new_shape = [1]*ndim new_shape[axis] = -1 return np.reshape(x,new_shape) if F is not None: assert(dF is None) if F.ndim<ndim: F = broadcast(F) N = F.shape[axis] dF = F.take(indices = range(1,N),axis = axis)-F.take(indices = range(N-1),axis = axis) elif dF is not None: if dF.ndim<ndim: dF = broadcast(dF) N = dF.shape[axis]+1 else: if A.ndim<ndim: A = broadcast(A) N = A.shape[axis] if A is not None: if trapez: midA = (A.take(indices = range(1,N),axis = axis)+A.take(indices = range(N-1),axis = axis))/2 else: midA = A.take(indices=range(N-1),axis=axis) if dF is not None: dY = midA*dF else: dY = midA else: dY = dF pad_shape = list(dY.shape) pad_shape[axis] = 1 pad = np.zeros(pad_shape) if cumulative: return np.concatenate((pad,np.cumsum(dY,axis = axis)),axis = axis) else: return np.sum(dY,axis = axis)
[ "Turns", "an", "array", "A", "of", "length", "N", "(", "the", "function", "values", "in", "N", "points", ")", "and", "an", "array", "dF", "of", "length", "N", "-", "1", "(", "the", "masses", "of", "the", "N", "-", "1", "intervals", ")", "into", "an", "array", "of", "length", "N", "(", "the", "integral", "\\", "int", "A", "dF", "at", "N", "points", "with", "first", "entry", "0", ")", ":", "param", "A", ":", "Integrand", "(", "optional", "default", "ones", "length", "N", ")", ":", "param", "dF", ":", "Integrator", "(", "optional", "default", "ones", "length", "N", "-", "1", ")", ":", "param", "F", ":", "Alternative", "to", "dF", "(", "optional", "length", "N", ")", ":", "param", "trapez", ":", "Use", "trapezoidal", "rule", "(", "else", "left", "point", ")" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/np_tools.py#L109-L156
[ "def", "integral", "(", "A", "=", "None", ",", "dF", "=", "None", ",", "F", "=", "None", ",", "axis", "=", "0", ",", "trapez", "=", "False", ",", "cumulative", "=", "False", ")", ":", "ndim", "=", "max", "(", "v", ".", "ndim", "for", "v", "in", "(", "A", ",", "dF", ",", "F", ")", "if", "v", "is", "not", "None", ")", "def", "broadcast", "(", "x", ")", ":", "new_shape", "=", "[", "1", "]", "*", "ndim", "new_shape", "[", "axis", "]", "=", "-", "1", "return", "np", ".", "reshape", "(", "x", ",", "new_shape", ")", "if", "F", "is", "not", "None", ":", "assert", "(", "dF", "is", "None", ")", "if", "F", ".", "ndim", "<", "ndim", ":", "F", "=", "broadcast", "(", "F", ")", "N", "=", "F", ".", "shape", "[", "axis", "]", "dF", "=", "F", ".", "take", "(", "indices", "=", "range", "(", "1", ",", "N", ")", ",", "axis", "=", "axis", ")", "-", "F", ".", "take", "(", "indices", "=", "range", "(", "N", "-", "1", ")", ",", "axis", "=", "axis", ")", "elif", "dF", "is", "not", "None", ":", "if", "dF", ".", "ndim", "<", "ndim", ":", "dF", "=", "broadcast", "(", "dF", ")", "N", "=", "dF", ".", "shape", "[", "axis", "]", "+", "1", "else", ":", "if", "A", ".", "ndim", "<", "ndim", ":", "A", "=", "broadcast", "(", "A", ")", "N", "=", "A", ".", "shape", "[", "axis", "]", "if", "A", "is", "not", "None", ":", "if", "trapez", ":", "midA", "=", "(", "A", ".", "take", "(", "indices", "=", "range", "(", "1", ",", "N", ")", ",", "axis", "=", "axis", ")", "+", "A", ".", "take", "(", "indices", "=", "range", "(", "N", "-", "1", ")", ",", "axis", "=", "axis", ")", ")", "/", "2", "else", ":", "midA", "=", "A", ".", "take", "(", "indices", "=", "range", "(", "N", "-", "1", ")", ",", "axis", "=", "axis", ")", "if", "dF", "is", "not", "None", ":", "dY", "=", "midA", "*", "dF", "else", ":", "dY", "=", "midA", "else", ":", "dY", "=", "dF", "pad_shape", "=", "list", "(", "dY", ".", "shape", ")", "pad_shape", "[", "axis", "]", "=", "1", "pad", "=", "np", ".", "zeros", "(", "pad_shape", ")", "if", "cumulative", ":", "return", "np", ".", "concatenate", "(", "(", "pad", ",", "np", ".", "cumsum", "(", "dY", ",", "axis", "=", "axis", ")", ")", ",", "axis", "=", "axis", ")", "else", ":", "return", "np", ".", "sum", "(", "dY", ",", "axis", "=", "axis", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
toeplitz_multiplication
Multiply Toeplitz matrix with first row a and first column b with vector v Normal matrix multiplication would require storage and runtime O(n^2); embedding into a circulant matrix and using FFT yields O(log(n)n)
swutil/np_tools.py
def toeplitz_multiplication(a,b,v): ''' Multiply Toeplitz matrix with first row a and first column b with vector v Normal matrix multiplication would require storage and runtime O(n^2); embedding into a circulant matrix and using FFT yields O(log(n)n) ''' a = np.reshape(a,(-1)) b = np.reshape(b,(-1)) n = len(a) c = np.concatenate((a[[0]],b[1:],np.zeros(1),a[-1:0:-1])) p = ifft(fft(c)*fft(v.T,n=2*n)).T#fft autopads input with zeros if n is supplied if np.all(np.isreal(a)) and np.all(np.isreal(b)) and np.all(np.isreal(v)): return np.real(p[:n]) else: return p[:n]
def toeplitz_multiplication(a,b,v): ''' Multiply Toeplitz matrix with first row a and first column b with vector v Normal matrix multiplication would require storage and runtime O(n^2); embedding into a circulant matrix and using FFT yields O(log(n)n) ''' a = np.reshape(a,(-1)) b = np.reshape(b,(-1)) n = len(a) c = np.concatenate((a[[0]],b[1:],np.zeros(1),a[-1:0:-1])) p = ifft(fft(c)*fft(v.T,n=2*n)).T#fft autopads input with zeros if n is supplied if np.all(np.isreal(a)) and np.all(np.isreal(b)) and np.all(np.isreal(v)): return np.real(p[:n]) else: return p[:n]
[ "Multiply", "Toeplitz", "matrix", "with", "first", "row", "a", "and", "first", "column", "b", "with", "vector", "v", "Normal", "matrix", "multiplication", "would", "require", "storage", "and", "runtime", "O", "(", "n^2", ")", ";", "embedding", "into", "a", "circulant", "matrix", "and", "using", "FFT", "yields", "O", "(", "log", "(", "n", ")", "n", ")" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/np_tools.py#L158-L173
[ "def", "toeplitz_multiplication", "(", "a", ",", "b", ",", "v", ")", ":", "a", "=", "np", ".", "reshape", "(", "a", ",", "(", "-", "1", ")", ")", "b", "=", "np", ".", "reshape", "(", "b", ",", "(", "-", "1", ")", ")", "n", "=", "len", "(", "a", ")", "c", "=", "np", ".", "concatenate", "(", "(", "a", "[", "[", "0", "]", "]", ",", "b", "[", "1", ":", "]", ",", "np", ".", "zeros", "(", "1", ")", ",", "a", "[", "-", "1", ":", "0", ":", "-", "1", "]", ")", ")", "p", "=", "ifft", "(", "fft", "(", "c", ")", "*", "fft", "(", "v", ".", "T", ",", "n", "=", "2", "*", "n", ")", ")", ".", "T", "#fft autopads input with zeros if n is supplied", "if", "np", ".", "all", "(", "np", ".", "isreal", "(", "a", ")", ")", "and", "np", ".", "all", "(", "np", ".", "isreal", "(", "b", ")", ")", "and", "np", ".", "all", "(", "np", ".", "isreal", "(", "v", ")", ")", ":", "return", "np", ".", "real", "(", "p", "[", ":", "n", "]", ")", "else", ":", "return", "p", "[", ":", "n", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
grid_evaluation
Evaluate function on given grid and return values in grid format Assume X and Y are 2-dimensional arrays containing x and y coordinates, respectively, of a two-dimensional grid, and f is a function that takes 1-d arrays with two entries. This function evaluates f on the grid points described by X and Y and returns another 2-dimensional array of the shape of X and Y that contains the values of f. :param X: 2-dimensional array of x-coordinates :param Y: 2-dimensional array of y-coordinates :param f: function to be evaluated on grid :param vectorized: `f` can handle arrays of inputs :return: 2-dimensional array of values of f
swutil/np_tools.py
def grid_evaluation(X, Y, f,vectorized=True): ''' Evaluate function on given grid and return values in grid format Assume X and Y are 2-dimensional arrays containing x and y coordinates, respectively, of a two-dimensional grid, and f is a function that takes 1-d arrays with two entries. This function evaluates f on the grid points described by X and Y and returns another 2-dimensional array of the shape of X and Y that contains the values of f. :param X: 2-dimensional array of x-coordinates :param Y: 2-dimensional array of y-coordinates :param f: function to be evaluated on grid :param vectorized: `f` can handle arrays of inputs :return: 2-dimensional array of values of f ''' XX = np.reshape(np.concatenate([X[..., None], Y[..., None]], axis=2), (X.size, 2), order='C') if vectorized: ZZ = f(XX) else: ZZ = np.array([f(x) for x in XX]) return np.reshape(ZZ, X.shape, order='C')
def grid_evaluation(X, Y, f,vectorized=True): ''' Evaluate function on given grid and return values in grid format Assume X and Y are 2-dimensional arrays containing x and y coordinates, respectively, of a two-dimensional grid, and f is a function that takes 1-d arrays with two entries. This function evaluates f on the grid points described by X and Y and returns another 2-dimensional array of the shape of X and Y that contains the values of f. :param X: 2-dimensional array of x-coordinates :param Y: 2-dimensional array of y-coordinates :param f: function to be evaluated on grid :param vectorized: `f` can handle arrays of inputs :return: 2-dimensional array of values of f ''' XX = np.reshape(np.concatenate([X[..., None], Y[..., None]], axis=2), (X.size, 2), order='C') if vectorized: ZZ = f(XX) else: ZZ = np.array([f(x) for x in XX]) return np.reshape(ZZ, X.shape, order='C')
[ "Evaluate", "function", "on", "given", "grid", "and", "return", "values", "in", "grid", "format", "Assume", "X", "and", "Y", "are", "2", "-", "dimensional", "arrays", "containing", "x", "and", "y", "coordinates", "respectively", "of", "a", "two", "-", "dimensional", "grid", "and", "f", "is", "a", "function", "that", "takes", "1", "-", "d", "arrays", "with", "two", "entries", ".", "This", "function", "evaluates", "f", "on", "the", "grid", "points", "described", "by", "X", "and", "Y", "and", "returns", "another", "2", "-", "dimensional", "array", "of", "the", "shape", "of", "X", "and", "Y", "that", "contains", "the", "values", "of", "f", "." ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/np_tools.py#L175-L196
[ "def", "grid_evaluation", "(", "X", ",", "Y", ",", "f", ",", "vectorized", "=", "True", ")", ":", "XX", "=", "np", ".", "reshape", "(", "np", ".", "concatenate", "(", "[", "X", "[", "...", ",", "None", "]", ",", "Y", "[", "...", ",", "None", "]", "]", ",", "axis", "=", "2", ")", ",", "(", "X", ".", "size", ",", "2", ")", ",", "order", "=", "'C'", ")", "if", "vectorized", ":", "ZZ", "=", "f", "(", "XX", ")", "else", ":", "ZZ", "=", "np", ".", "array", "(", "[", "f", "(", "x", ")", "for", "x", "in", "XX", "]", ")", "return", "np", ".", "reshape", "(", "ZZ", ",", "X", ".", "shape", ",", "order", "=", "'C'", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
orthonormal_complement_basis
Return orthonormal basis of complement of vector. :param v: 1-dimensional numpy array :return: Matrix whose .dot() computes coefficients w.r.t. an orthonormal basis of the complement of v (i.e. whose row vectors form an orthonormal basis of the complement of v)
swutil/np_tools.py
def orthonormal_complement_basis(v:NDim(1)): ''' Return orthonormal basis of complement of vector. :param v: 1-dimensional numpy array :return: Matrix whose .dot() computes coefficients w.r.t. an orthonormal basis of the complement of v (i.e. whose row vectors form an orthonormal basis of the complement of v) ''' _, _, V = np.linalg.svd(np.array([v])) return V[1:]
def orthonormal_complement_basis(v:NDim(1)): ''' Return orthonormal basis of complement of vector. :param v: 1-dimensional numpy array :return: Matrix whose .dot() computes coefficients w.r.t. an orthonormal basis of the complement of v (i.e. whose row vectors form an orthonormal basis of the complement of v) ''' _, _, V = np.linalg.svd(np.array([v])) return V[1:]
[ "Return", "orthonormal", "basis", "of", "complement", "of", "vector", ".", ":", "param", "v", ":", "1", "-", "dimensional", "numpy", "array", ":", "return", ":", "Matrix", "whose", ".", "dot", "()", "computes", "coefficients", "w", ".", "r", ".", "t", ".", "an", "orthonormal", "basis", "of", "the", "complement", "of", "v", "(", "i", ".", "e", ".", "whose", "row", "vectors", "form", "an", "orthonormal", "basis", "of", "the", "complement", "of", "v", ")" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/np_tools.py#L201-L210
[ "def", "orthonormal_complement_basis", "(", "v", ":", "NDim", "(", "1", ")", ")", ":", "_", ",", "_", ",", "V", "=", "np", ".", "linalg", ".", "svd", "(", "np", ".", "array", "(", "[", "v", "]", ")", ")", "return", "V", "[", "1", ":", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
weighted_median
Returns element such that sum of weights below and above are (roughly) equal :param values: Values whose median is sought :type values: List of reals :param weights: Weights of each value :type weights: List of positive reals :return: value of weighted median :rtype: Real
swutil/np_tools.py
def weighted_median(values, weights): ''' Returns element such that sum of weights below and above are (roughly) equal :param values: Values whose median is sought :type values: List of reals :param weights: Weights of each value :type weights: List of positive reals :return: value of weighted median :rtype: Real ''' if len(values) == 1: return values[0] if len(values) == 0: raise ValueError('Cannot take median of empty list') values = [float(value) for value in values] indices_sorted = np.argsort(values) values = [values[ind] for ind in indices_sorted] weights = [weights[ind] for ind in indices_sorted] total_weight = sum(weights) below_weight = 0 i = -1 while below_weight < total_weight / 2: i += 1 below_weight += weights[i] return values[i]
def weighted_median(values, weights): ''' Returns element such that sum of weights below and above are (roughly) equal :param values: Values whose median is sought :type values: List of reals :param weights: Weights of each value :type weights: List of positive reals :return: value of weighted median :rtype: Real ''' if len(values) == 1: return values[0] if len(values) == 0: raise ValueError('Cannot take median of empty list') values = [float(value) for value in values] indices_sorted = np.argsort(values) values = [values[ind] for ind in indices_sorted] weights = [weights[ind] for ind in indices_sorted] total_weight = sum(weights) below_weight = 0 i = -1 while below_weight < total_weight / 2: i += 1 below_weight += weights[i] return values[i]
[ "Returns", "element", "such", "that", "sum", "of", "weights", "below", "and", "above", "are", "(", "roughly", ")", "equal", ":", "param", "values", ":", "Values", "whose", "median", "is", "sought", ":", "type", "values", ":", "List", "of", "reals", ":", "param", "weights", ":", "Weights", "of", "each", "value", ":", "type", "weights", ":", "List", "of", "positive", "reals", ":", "return", ":", "value", "of", "weighted", "median", ":", "rtype", ":", "Real" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/np_tools.py#L212-L237
[ "def", "weighted_median", "(", "values", ",", "weights", ")", ":", "if", "len", "(", "values", ")", "==", "1", ":", "return", "values", "[", "0", "]", "if", "len", "(", "values", ")", "==", "0", ":", "raise", "ValueError", "(", "'Cannot take median of empty list'", ")", "values", "=", "[", "float", "(", "value", ")", "for", "value", "in", "values", "]", "indices_sorted", "=", "np", ".", "argsort", "(", "values", ")", "values", "=", "[", "values", "[", "ind", "]", "for", "ind", "in", "indices_sorted", "]", "weights", "=", "[", "weights", "[", "ind", "]", "for", "ind", "in", "indices_sorted", "]", "total_weight", "=", "sum", "(", "weights", ")", "below_weight", "=", "0", "i", "=", "-", "1", "while", "below_weight", "<", "total_weight", "/", "2", ":", "i", "+=", "1", "below_weight", "+=", "weights", "[", "i", "]", "return", "values", "[", "i", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
log_calls
Decorator that logs function calls in their self.log
swutil/decorators.py
def log_calls(function): ''' Decorator that logs function calls in their self.log ''' def wrapper(self,*args,**kwargs): self.log.log(group=function.__name__,message='Enter') function(self,*args,**kwargs) self.log.log(group=function.__name__,message='Exit') return wrapper
def log_calls(function): ''' Decorator that logs function calls in their self.log ''' def wrapper(self,*args,**kwargs): self.log.log(group=function.__name__,message='Enter') function(self,*args,**kwargs) self.log.log(group=function.__name__,message='Exit') return wrapper
[ "Decorator", "that", "logs", "function", "calls", "in", "their", "self", ".", "log" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L8-L16
[ "def", "log_calls", "(", "function", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "log", ".", "log", "(", "group", "=", "function", ".", "__name__", ",", "message", "=", "'Enter'", ")", "function", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "log", ".", "log", "(", "group", "=", "function", ".", "__name__", ",", "message", "=", "'Exit'", ")", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
add_runtime
Decorator that adds a runtime profile object to the output
swutil/decorators.py
def add_runtime(function): ''' Decorator that adds a runtime profile object to the output ''' def wrapper(*args,**kwargs): pr=cProfile.Profile() pr.enable() output = function(*args,**kwargs) pr.disable() return pr,output return wrapper
def add_runtime(function): ''' Decorator that adds a runtime profile object to the output ''' def wrapper(*args,**kwargs): pr=cProfile.Profile() pr.enable() output = function(*args,**kwargs) pr.disable() return pr,output return wrapper
[ "Decorator", "that", "adds", "a", "runtime", "profile", "object", "to", "the", "output" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L18-L28
[ "def", "add_runtime", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "pr", "=", "cProfile", ".", "Profile", "(", ")", "pr", ".", "enable", "(", ")", "output", "=", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "pr", ".", "disable", "(", ")", "return", "pr", ",", "output", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
print_memory
Decorator that prints memory information at each call of the function
swutil/decorators.py
def print_memory(function): ''' Decorator that prints memory information at each call of the function ''' import memory_profiler def wrapper(*args,**kwargs): m = StringIO() temp_func = memory_profiler.profile(func = function,stream=m,precision=4) output = temp_func(*args,**kwargs) print(m.getvalue()) m.close() return output return wrapper
def print_memory(function): ''' Decorator that prints memory information at each call of the function ''' import memory_profiler def wrapper(*args,**kwargs): m = StringIO() temp_func = memory_profiler.profile(func = function,stream=m,precision=4) output = temp_func(*args,**kwargs) print(m.getvalue()) m.close() return output return wrapper
[ "Decorator", "that", "prints", "memory", "information", "at", "each", "call", "of", "the", "function" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L30-L42
[ "def", "print_memory", "(", "function", ")", ":", "import", "memory_profiler", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "m", "=", "StringIO", "(", ")", "temp_func", "=", "memory_profiler", ".", "profile", "(", "func", "=", "function", ",", "stream", "=", "m", ",", "precision", "=", "4", ")", "output", "=", "temp_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "print", "(", "m", ".", "getvalue", "(", ")", ")", "m", ".", "close", "(", ")", "return", "output", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
print_profile
Decorator that prints memory and runtime information at each call of the function
swutil/decorators.py
def print_profile(function): ''' Decorator that prints memory and runtime information at each call of the function ''' import memory_profiler def wrapper(*args,**kwargs): m=StringIO() pr=cProfile.Profile() pr.enable() temp_func = memory_profiler.profile(func=function,stream=m,precision=4) output = temp_func(*args,**kwargs) print(m.getvalue()) pr.disable() ps = pstats.Stats(pr) ps.sort_stats('cumulative').print_stats('(?!.*memory_profiler.*)(^.*$)',20) m.close() return output return wrapper
def print_profile(function): ''' Decorator that prints memory and runtime information at each call of the function ''' import memory_profiler def wrapper(*args,**kwargs): m=StringIO() pr=cProfile.Profile() pr.enable() temp_func = memory_profiler.profile(func=function,stream=m,precision=4) output = temp_func(*args,**kwargs) print(m.getvalue()) pr.disable() ps = pstats.Stats(pr) ps.sort_stats('cumulative').print_stats('(?!.*memory_profiler.*)(^.*$)',20) m.close() return output return wrapper
[ "Decorator", "that", "prints", "memory", "and", "runtime", "information", "at", "each", "call", "of", "the", "function" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L44-L61
[ "def", "print_profile", "(", "function", ")", ":", "import", "memory_profiler", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "m", "=", "StringIO", "(", ")", "pr", "=", "cProfile", ".", "Profile", "(", ")", "pr", ".", "enable", "(", ")", "temp_func", "=", "memory_profiler", ".", "profile", "(", "func", "=", "function", ",", "stream", "=", "m", ",", "precision", "=", "4", ")", "output", "=", "temp_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "print", "(", "m", ".", "getvalue", "(", ")", ")", "pr", ".", "disable", "(", ")", "ps", "=", "pstats", ".", "Stats", "(", "pr", ")", "ps", ".", "sort_stats", "(", "'cumulative'", ")", ".", "print_stats", "(", "'(?!.*memory_profiler.*)(^.*$)'", ",", "20", ")", "m", ".", "close", "(", ")", "return", "output", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
declaration
Declare abstract function. Requires function to be empty except for docstring describing semantics. To apply function, first argument must come with implementation of semantics.
swutil/decorators.py
def declaration(function): ''' Declare abstract function. Requires function to be empty except for docstring describing semantics. To apply function, first argument must come with implementation of semantics. ''' function,name=_strip_function(function) if not function.__code__.co_code in [empty_function.__code__.co_code, doc_string_only_function.__code__.co_code]: raise ValueError('Declaration requires empty function definition') def not_implemented_function(*args,**kwargs): raise ValueError('Argument \'{}\' did not specify how \'{}\' should act on it'.format(args[0],name)) not_implemented_function.__qualname__=not_implemented_function.__name__ return default(not_implemented_function,name=name)
def declaration(function): ''' Declare abstract function. Requires function to be empty except for docstring describing semantics. To apply function, first argument must come with implementation of semantics. ''' function,name=_strip_function(function) if not function.__code__.co_code in [empty_function.__code__.co_code, doc_string_only_function.__code__.co_code]: raise ValueError('Declaration requires empty function definition') def not_implemented_function(*args,**kwargs): raise ValueError('Argument \'{}\' did not specify how \'{}\' should act on it'.format(args[0],name)) not_implemented_function.__qualname__=not_implemented_function.__name__ return default(not_implemented_function,name=name)
[ "Declare", "abstract", "function", ".", "Requires", "function", "to", "be", "empty", "except", "for", "docstring", "describing", "semantics", ".", "To", "apply", "function", "first", "argument", "must", "come", "with", "implementation", "of", "semantics", "." ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L107-L120
[ "def", "declaration", "(", "function", ")", ":", "function", ",", "name", "=", "_strip_function", "(", "function", ")", "if", "not", "function", ".", "__code__", ".", "co_code", "in", "[", "empty_function", ".", "__code__", ".", "co_code", ",", "doc_string_only_function", ".", "__code__", ".", "co_code", "]", ":", "raise", "ValueError", "(", "'Declaration requires empty function definition'", ")", "def", "not_implemented_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raise", "ValueError", "(", "'Argument \\'{}\\' did not specify how \\'{}\\' should act on it'", ".", "format", "(", "args", "[", "0", "]", ",", "name", ")", ")", "not_implemented_function", ".", "__qualname__", "=", "not_implemented_function", ".", "__name__", "return", "default", "(", "not_implemented_function", ",", "name", "=", "name", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
print_runtime
Decorator that prints running time information at each call of the function
swutil/decorators.py
def print_runtime(function): ''' Decorator that prints running time information at each call of the function ''' def wrapper(*args,**kwargs): pr=cProfile.Profile() pr.enable() output = function(*args,**kwargs) pr.disable() ps = pstats.Stats(pr) ps.sort_stats('tot').print_stats(20) return output return wrapper
def print_runtime(function): ''' Decorator that prints running time information at each call of the function ''' def wrapper(*args,**kwargs): pr=cProfile.Profile() pr.enable() output = function(*args,**kwargs) pr.disable() ps = pstats.Stats(pr) ps.sort_stats('tot').print_stats(20) return output return wrapper
[ "Decorator", "that", "prints", "running", "time", "information", "at", "each", "call", "of", "the", "function" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L162-L174
[ "def", "print_runtime", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "pr", "=", "cProfile", ".", "Profile", "(", ")", "pr", ".", "enable", "(", ")", "output", "=", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "pr", ".", "disable", "(", ")", "ps", "=", "pstats", ".", "Stats", "(", "pr", ")", "ps", ".", "sort_stats", "(", "'tot'", ")", ".", "print_stats", "(", "20", ")", "return", "output", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
print_peak_memory
Print peak memory usage (in MB) of a function call :param func: Function to be called :param stream: Stream to write peak memory usage (defaults to stdout) https://stackoverflow.com/questions/9850995/tracking-maximum-memory-usage-by-a-python-function
swutil/decorators.py
def print_peak_memory(func,stream = None): """ Print peak memory usage (in MB) of a function call :param func: Function to be called :param stream: Stream to write peak memory usage (defaults to stdout) https://stackoverflow.com/questions/9850995/tracking-maximum-memory-usage-by-a-python-function """ import time import psutil import os memory_denominator=1024**2 memory_usage_refresh=0.05 def wrapper(*args,**kwargs): from multiprocessing.pool import ThreadPool pool = ThreadPool(processes=1) process = psutil.Process(os.getpid()) start_mem = process.memory_info().rss delta_mem = 0 max_memory = 0 async_result = pool.apply_async(func, args,kwargs) # do some other stuff in the main process while(not async_result.ready()): current_mem = process.memory_info().rss delta_mem = current_mem - start_mem if delta_mem > max_memory: max_memory = delta_mem # Check to see if the library call is complete time.sleep(memory_usage_refresh) return_val = async_result.get() # get the return value from your function. max_memory /= memory_denominator if stream is not None: stream.write(str(max_memory)) return return_val return wrapper
def print_peak_memory(func,stream = None): """ Print peak memory usage (in MB) of a function call :param func: Function to be called :param stream: Stream to write peak memory usage (defaults to stdout) https://stackoverflow.com/questions/9850995/tracking-maximum-memory-usage-by-a-python-function """ import time import psutil import os memory_denominator=1024**2 memory_usage_refresh=0.05 def wrapper(*args,**kwargs): from multiprocessing.pool import ThreadPool pool = ThreadPool(processes=1) process = psutil.Process(os.getpid()) start_mem = process.memory_info().rss delta_mem = 0 max_memory = 0 async_result = pool.apply_async(func, args,kwargs) # do some other stuff in the main process while(not async_result.ready()): current_mem = process.memory_info().rss delta_mem = current_mem - start_mem if delta_mem > max_memory: max_memory = delta_mem # Check to see if the library call is complete time.sleep(memory_usage_refresh) return_val = async_result.get() # get the return value from your function. max_memory /= memory_denominator if stream is not None: stream.write(str(max_memory)) return return_val return wrapper
[ "Print", "peak", "memory", "usage", "(", "in", "MB", ")", "of", "a", "function", "call", ":", "param", "func", ":", "Function", "to", "be", "called", ":", "param", "stream", ":", "Stream", "to", "write", "peak", "memory", "usage", "(", "defaults", "to", "stdout", ")", "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "9850995", "/", "tracking", "-", "maximum", "-", "memory", "-", "usage", "-", "by", "-", "a", "-", "python", "-", "function" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/decorators.py#L176-L212
[ "def", "print_peak_memory", "(", "func", ",", "stream", "=", "None", ")", ":", "import", "time", "import", "psutil", "import", "os", "memory_denominator", "=", "1024", "**", "2", "memory_usage_refresh", "=", "0.05", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "multiprocessing", ".", "pool", "import", "ThreadPool", "pool", "=", "ThreadPool", "(", "processes", "=", "1", ")", "process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "start_mem", "=", "process", ".", "memory_info", "(", ")", ".", "rss", "delta_mem", "=", "0", "max_memory", "=", "0", "async_result", "=", "pool", ".", "apply_async", "(", "func", ",", "args", ",", "kwargs", ")", "# do some other stuff in the main process", "while", "(", "not", "async_result", ".", "ready", "(", ")", ")", ":", "current_mem", "=", "process", ".", "memory_info", "(", ")", ".", "rss", "delta_mem", "=", "current_mem", "-", "start_mem", "if", "delta_mem", ">", "max_memory", ":", "max_memory", "=", "delta_mem", "# Check to see if the library call is complete", "time", ".", "sleep", "(", "memory_usage_refresh", ")", "return_val", "=", "async_result", ".", "get", "(", ")", "# get the return value from your function.", "max_memory", "/=", "memory_denominator", "if", "stream", "is", "not", "None", ":", "stream", ".", "write", "(", "str", "(", "max_memory", ")", ")", "return", "return_val", "return", "wrapper" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
validate
Make sure `arg` adheres to specification :param arg: Anything :param spec: Specification :type spec: Specification :return: Validated object
swutil/validation.py
def validate(arg, spec): ''' Make sure `arg` adheres to specification :param arg: Anything :param spec: Specification :type spec: Specification :return: Validated object ''' rejection_subreason = None if spec is None: return arg try: return spec._validate(arg) except Exception as e: rejection_subreason = e try: lenience = spec.lenience except AttributeError: pass else: for level in range(1, lenience + 1): temp = None try: temp = spec.forgive(arg=arg, level=level) except Exception: pass # Forgiving might fail, it is very hard to predict what happens when you do stuff to things that aren't what you think if temp is not None and temp is not arg: arg = temp try: return spec._validate(arg) except Exception as e: rejection_subreason = e rejection_reason = '`{}` was rejected by `{}`.'.format(arg, spec) rejection_subreason = ' ({}: {})'.format(rejection_subreason.__class__.__name__, rejection_subreason) if rejection_subreason is not None else '' raise ValidationError(rejection_reason + rejection_subreason)
def validate(arg, spec): ''' Make sure `arg` adheres to specification :param arg: Anything :param spec: Specification :type spec: Specification :return: Validated object ''' rejection_subreason = None if spec is None: return arg try: return spec._validate(arg) except Exception as e: rejection_subreason = e try: lenience = spec.lenience except AttributeError: pass else: for level in range(1, lenience + 1): temp = None try: temp = spec.forgive(arg=arg, level=level) except Exception: pass # Forgiving might fail, it is very hard to predict what happens when you do stuff to things that aren't what you think if temp is not None and temp is not arg: arg = temp try: return spec._validate(arg) except Exception as e: rejection_subreason = e rejection_reason = '`{}` was rejected by `{}`.'.format(arg, spec) rejection_subreason = ' ({}: {})'.format(rejection_subreason.__class__.__name__, rejection_subreason) if rejection_subreason is not None else '' raise ValidationError(rejection_reason + rejection_subreason)
[ "Make", "sure", "arg", "adheres", "to", "specification", ":", "param", "arg", ":", "Anything", ":", "param", "spec", ":", "Specification", ":", "type", "spec", ":", "Specification", ":", "return", ":", "Validated", "object" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/validation.py#L127-L163
[ "def", "validate", "(", "arg", ",", "spec", ")", ":", "rejection_subreason", "=", "None", "if", "spec", "is", "None", ":", "return", "arg", "try", ":", "return", "spec", ".", "_validate", "(", "arg", ")", "except", "Exception", "as", "e", ":", "rejection_subreason", "=", "e", "try", ":", "lenience", "=", "spec", ".", "lenience", "except", "AttributeError", ":", "pass", "else", ":", "for", "level", "in", "range", "(", "1", ",", "lenience", "+", "1", ")", ":", "temp", "=", "None", "try", ":", "temp", "=", "spec", ".", "forgive", "(", "arg", "=", "arg", ",", "level", "=", "level", ")", "except", "Exception", ":", "pass", "# Forgiving might fail, it is very hard to predict what happens when you do stuff to things that aren't what you think", "if", "temp", "is", "not", "None", "and", "temp", "is", "not", "arg", ":", "arg", "=", "temp", "try", ":", "return", "spec", ".", "_validate", "(", "arg", ")", "except", "Exception", "as", "e", ":", "rejection_subreason", "=", "e", "rejection_reason", "=", "'`{}` was rejected by `{}`.'", ".", "format", "(", "arg", ",", "spec", ")", "rejection_subreason", "=", "' ({}: {})'", ".", "format", "(", "rejection_subreason", ".", "__class__", ".", "__name__", ",", "rejection_subreason", ")", "if", "rejection_subreason", "is", "not", "None", "else", "''", "raise", "ValidationError", "(", "rejection_reason", "+", "rejection_subreason", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
_validate_many
Similar to validate but validates multiple objects at once, each with their own specification. Fill objects that were specified but not provided with NotPassed or default values Apply `value_condition` to object dictionary as a whole
swutil/validation.py
def _validate_many(args, specs, defaults,passed_conditions,value_conditions, allow_unknowns,unknowns_spec): ''' Similar to validate but validates multiple objects at once, each with their own specification. Fill objects that were specified but not provided with NotPassed or default values Apply `value_condition` to object dictionary as a whole ''' validated_args = builtins.dict() passed_but_not_specified = set(args.keys()) - set(specs.keys()) if passed_but_not_specified: if not allow_unknowns: raise ValueError(('Arguments {} were passed but not specified (use ' + '`allow_unknowns=True` to avoid this error)'.format(passed_but_not_specified))) else: for arg in passed_but_not_specified: if unknowns_spec is not None: specs[arg] = unknowns_spec if passed_conditions: validate(args, Dict(passed_conditions=passed_conditions)) for arg in specs: if (not arg in args) or NotPassed(args[arg]): if arg in defaults: if isinstance(defaults[arg],DefaultGenerator): validated_args[arg] = defaults[arg]() else: validated_args[arg] = defaults[arg] else: validated_args[arg] = NotPassed else:#Default values and NotPassed values are not validated. Former has advantage that default values need to be `correct` without validation and thus encourage the user to pass stuff that doesn't need validation, and is therefore faster validated_args[arg] = validate(args[arg], specs[arg]) if value_conditions: validated_args = validate(validated_args, value_conditions) return validated_args
def _validate_many(args, specs, defaults,passed_conditions,value_conditions, allow_unknowns,unknowns_spec): ''' Similar to validate but validates multiple objects at once, each with their own specification. Fill objects that were specified but not provided with NotPassed or default values Apply `value_condition` to object dictionary as a whole ''' validated_args = builtins.dict() passed_but_not_specified = set(args.keys()) - set(specs.keys()) if passed_but_not_specified: if not allow_unknowns: raise ValueError(('Arguments {} were passed but not specified (use ' + '`allow_unknowns=True` to avoid this error)'.format(passed_but_not_specified))) else: for arg in passed_but_not_specified: if unknowns_spec is not None: specs[arg] = unknowns_spec if passed_conditions: validate(args, Dict(passed_conditions=passed_conditions)) for arg in specs: if (not arg in args) or NotPassed(args[arg]): if arg in defaults: if isinstance(defaults[arg],DefaultGenerator): validated_args[arg] = defaults[arg]() else: validated_args[arg] = defaults[arg] else: validated_args[arg] = NotPassed else:#Default values and NotPassed values are not validated. Former has advantage that default values need to be `correct` without validation and thus encourage the user to pass stuff that doesn't need validation, and is therefore faster validated_args[arg] = validate(args[arg], specs[arg]) if value_conditions: validated_args = validate(validated_args, value_conditions) return validated_args
[ "Similar", "to", "validate", "but", "validates", "multiple", "objects", "at", "once", "each", "with", "their", "own", "specification", ".", "Fill", "objects", "that", "were", "specified", "but", "not", "provided", "with", "NotPassed", "or", "default", "values", "Apply", "value_condition", "to", "object", "dictionary", "as", "a", "whole" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/validation.py#L165-L198
[ "def", "_validate_many", "(", "args", ",", "specs", ",", "defaults", ",", "passed_conditions", ",", "value_conditions", ",", "allow_unknowns", ",", "unknowns_spec", ")", ":", "validated_args", "=", "builtins", ".", "dict", "(", ")", "passed_but_not_specified", "=", "set", "(", "args", ".", "keys", "(", ")", ")", "-", "set", "(", "specs", ".", "keys", "(", ")", ")", "if", "passed_but_not_specified", ":", "if", "not", "allow_unknowns", ":", "raise", "ValueError", "(", "(", "'Arguments {} were passed but not specified (use '", "+", "'`allow_unknowns=True` to avoid this error)'", ".", "format", "(", "passed_but_not_specified", ")", ")", ")", "else", ":", "for", "arg", "in", "passed_but_not_specified", ":", "if", "unknowns_spec", "is", "not", "None", ":", "specs", "[", "arg", "]", "=", "unknowns_spec", "if", "passed_conditions", ":", "validate", "(", "args", ",", "Dict", "(", "passed_conditions", "=", "passed_conditions", ")", ")", "for", "arg", "in", "specs", ":", "if", "(", "not", "arg", "in", "args", ")", "or", "NotPassed", "(", "args", "[", "arg", "]", ")", ":", "if", "arg", "in", "defaults", ":", "if", "isinstance", "(", "defaults", "[", "arg", "]", ",", "DefaultGenerator", ")", ":", "validated_args", "[", "arg", "]", "=", "defaults", "[", "arg", "]", "(", ")", "else", ":", "validated_args", "[", "arg", "]", "=", "defaults", "[", "arg", "]", "else", ":", "validated_args", "[", "arg", "]", "=", "NotPassed", "else", ":", "#Default values and NotPassed values are not validated. Former has advantage that default values need to be `correct` without validation and thus encourage the user to pass stuff that doesn't need validation, and is therefore faster", "validated_args", "[", "arg", "]", "=", "validate", "(", "args", "[", "arg", "]", ",", "specs", "[", "arg", "]", ")", "if", "value_conditions", ":", "validated_args", "=", "validate", "(", "validated_args", ",", "value_conditions", ")", "return", "validated_args" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
black_scholes
Return M Euler-Maruyama sample paths with N time steps of S_t, where dS_t = S_t*r*dt+S_t*sigma*dW_t S(0)=S0 :rtype: M x N x d array
swutil/stochastic_processes.py
def black_scholes(times,r,sigma,S0,d,M,dW=None): ''' Return M Euler-Maruyama sample paths with N time steps of S_t, where dS_t = S_t*r*dt+S_t*sigma*dW_t S(0)=S0 :rtype: M x N x d array ''' N=len(times) times = times.flatten() p0 = np.log(S0) if dW is None: dW=np.sqrt(times[1:]-times[:-1])[None,:,None]*np.random.normal(size=(M,N-1,d)) if np.squeeze(sigma).ndim<=1: dF = sigma*dW ito_correction = np.squeeze(sigma**2/2) else: dF = np.einsum('ij,...j',sigma,dW) ito_correction = np.sum(sigma**2,1)/2 drift = (r-ito_correction)*times[None,:,None] diffusion = integral(dF=dF,axis=1,cumulative = True) return np.exp(p0 + drift + diffusion)
def black_scholes(times,r,sigma,S0,d,M,dW=None): ''' Return M Euler-Maruyama sample paths with N time steps of S_t, where dS_t = S_t*r*dt+S_t*sigma*dW_t S(0)=S0 :rtype: M x N x d array ''' N=len(times) times = times.flatten() p0 = np.log(S0) if dW is None: dW=np.sqrt(times[1:]-times[:-1])[None,:,None]*np.random.normal(size=(M,N-1,d)) if np.squeeze(sigma).ndim<=1: dF = sigma*dW ito_correction = np.squeeze(sigma**2/2) else: dF = np.einsum('ij,...j',sigma,dW) ito_correction = np.sum(sigma**2,1)/2 drift = (r-ito_correction)*times[None,:,None] diffusion = integral(dF=dF,axis=1,cumulative = True) return np.exp(p0 + drift + diffusion)
[ "Return", "M", "Euler", "-", "Maruyama", "sample", "paths", "with", "N", "time", "steps", "of", "S_t", "where", "dS_t", "=", "S_t", "*", "r", "*", "dt", "+", "S_t", "*", "sigma", "*", "dW_t", "S", "(", "0", ")", "=", "S0", ":", "rtype", ":", "M", "x", "N", "x", "d", "array" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/stochastic_processes.py#L10-L31
[ "def", "black_scholes", "(", "times", ",", "r", ",", "sigma", ",", "S0", ",", "d", ",", "M", ",", "dW", "=", "None", ")", ":", "N", "=", "len", "(", "times", ")", "times", "=", "times", ".", "flatten", "(", ")", "p0", "=", "np", ".", "log", "(", "S0", ")", "if", "dW", "is", "None", ":", "dW", "=", "np", ".", "sqrt", "(", "times", "[", "1", ":", "]", "-", "times", "[", ":", "-", "1", "]", ")", "[", "None", ",", ":", ",", "None", "]", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "M", ",", "N", "-", "1", ",", "d", ")", ")", "if", "np", ".", "squeeze", "(", "sigma", ")", ".", "ndim", "<=", "1", ":", "dF", "=", "sigma", "*", "dW", "ito_correction", "=", "np", ".", "squeeze", "(", "sigma", "**", "2", "/", "2", ")", "else", ":", "dF", "=", "np", ".", "einsum", "(", "'ij,...j'", ",", "sigma", ",", "dW", ")", "ito_correction", "=", "np", ".", "sum", "(", "sigma", "**", "2", ",", "1", ")", "/", "2", "drift", "=", "(", "r", "-", "ito_correction", ")", "*", "times", "[", "None", ",", ":", ",", "None", "]", "diffusion", "=", "integral", "(", "dF", "=", "dF", ",", "axis", "=", "1", ",", "cumulative", "=", "True", ")", "return", "np", ".", "exp", "(", "p0", "+", "drift", "+", "diffusion", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
heston
Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the Heston model of mathematical finance :rtype: M x N x d array
swutil/stochastic_processes.py
def heston(times,mu,rho,kappa,theta,xi,S0,nu0,d,M,nu_1d=True): ''' Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the Heston model of mathematical finance :rtype: M x N x d array ''' d_nu = 1 if nu_1d else d nu = np.zeros((M,len(times),d_nu)) S = np.zeros((M,len(times),d)) nu[:,0,:] = nu0 S[:,0,:] = S0 if 2*kappa*theta<=xi**2: raise ValueError('Feller condition not satisfied') test = np.std(np.diff(times.flatten())) if test>1e-12: raise ValueError dt = times[1]-times[0] N = len(times) if d == 1: if np.array(rho).size ==1: rho = np.array([[1,rho],[rho,1]]) chol = np.linalg.cholesky(rho) dW = np.sqrt(dt)*np.einsum('ij,...j',chol,np.random.normal(size=(M,N-1,d+d_nu))) for i in range(1,N): dt = times[i]-times[i-1] nu[:,i,:] = np.abs(nu[:,i-1,:] + kappa*(theta-nu[:,i-1,:])*dt+xi*np.sqrt(nu[:,i-1,:])*dW[:,i-1,d:]) S = S0*np.exp(integral(np.sqrt(nu),dF = dW[:,:,:d],axis=1,cumulative = True)+integral(mu - 0.5*nu,F = times,axis=1,trapez=False,cumulative = True)) return np.concatenate((S,nu),axis=-1)
def heston(times,mu,rho,kappa,theta,xi,S0,nu0,d,M,nu_1d=True): ''' Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the Heston model of mathematical finance :rtype: M x N x d array ''' d_nu = 1 if nu_1d else d nu = np.zeros((M,len(times),d_nu)) S = np.zeros((M,len(times),d)) nu[:,0,:] = nu0 S[:,0,:] = S0 if 2*kappa*theta<=xi**2: raise ValueError('Feller condition not satisfied') test = np.std(np.diff(times.flatten())) if test>1e-12: raise ValueError dt = times[1]-times[0] N = len(times) if d == 1: if np.array(rho).size ==1: rho = np.array([[1,rho],[rho,1]]) chol = np.linalg.cholesky(rho) dW = np.sqrt(dt)*np.einsum('ij,...j',chol,np.random.normal(size=(M,N-1,d+d_nu))) for i in range(1,N): dt = times[i]-times[i-1] nu[:,i,:] = np.abs(nu[:,i-1,:] + kappa*(theta-nu[:,i-1,:])*dt+xi*np.sqrt(nu[:,i-1,:])*dW[:,i-1,d:]) S = S0*np.exp(integral(np.sqrt(nu),dF = dW[:,:,:d],axis=1,cumulative = True)+integral(mu - 0.5*nu,F = times,axis=1,trapez=False,cumulative = True)) return np.concatenate((S,nu),axis=-1)
[ "Return", "M", "Euler", "-", "Maruyama", "sample", "paths", "with", "N", "time", "steps", "of", "(", "S_t", "v_t", ")", "where", "(", "S_t", "v_t", ")", "follows", "the", "Heston", "model", "of", "mathematical", "finance" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/stochastic_processes.py#L33-L61
[ "def", "heston", "(", "times", ",", "mu", ",", "rho", ",", "kappa", ",", "theta", ",", "xi", ",", "S0", ",", "nu0", ",", "d", ",", "M", ",", "nu_1d", "=", "True", ")", ":", "d_nu", "=", "1", "if", "nu_1d", "else", "d", "nu", "=", "np", ".", "zeros", "(", "(", "M", ",", "len", "(", "times", ")", ",", "d_nu", ")", ")", "S", "=", "np", ".", "zeros", "(", "(", "M", ",", "len", "(", "times", ")", ",", "d", ")", ")", "nu", "[", ":", ",", "0", ",", ":", "]", "=", "nu0", "S", "[", ":", ",", "0", ",", ":", "]", "=", "S0", "if", "2", "*", "kappa", "*", "theta", "<=", "xi", "**", "2", ":", "raise", "ValueError", "(", "'Feller condition not satisfied'", ")", "test", "=", "np", ".", "std", "(", "np", ".", "diff", "(", "times", ".", "flatten", "(", ")", ")", ")", "if", "test", ">", "1e-12", ":", "raise", "ValueError", "dt", "=", "times", "[", "1", "]", "-", "times", "[", "0", "]", "N", "=", "len", "(", "times", ")", "if", "d", "==", "1", ":", "if", "np", ".", "array", "(", "rho", ")", ".", "size", "==", "1", ":", "rho", "=", "np", ".", "array", "(", "[", "[", "1", ",", "rho", "]", ",", "[", "rho", ",", "1", "]", "]", ")", "chol", "=", "np", ".", "linalg", ".", "cholesky", "(", "rho", ")", "dW", "=", "np", ".", "sqrt", "(", "dt", ")", "*", "np", ".", "einsum", "(", "'ij,...j'", ",", "chol", ",", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "M", ",", "N", "-", "1", ",", "d", "+", "d_nu", ")", ")", ")", "for", "i", "in", "range", "(", "1", ",", "N", ")", ":", "dt", "=", "times", "[", "i", "]", "-", "times", "[", "i", "-", "1", "]", "nu", "[", ":", ",", "i", ",", ":", "]", "=", "np", ".", "abs", "(", "nu", "[", ":", ",", "i", "-", "1", ",", ":", "]", "+", "kappa", "*", "(", "theta", "-", "nu", "[", ":", ",", "i", "-", "1", ",", ":", "]", ")", "*", "dt", "+", "xi", "*", "np", ".", "sqrt", "(", "nu", "[", ":", ",", "i", "-", "1", ",", ":", "]", ")", "*", "dW", "[", ":", ",", "i", "-", "1", ",", "d", ":", "]", ")", "S", "=", "S0", "*", "np", ".", "exp", "(", "integral", "(", "np", ".", "sqrt", "(", "nu", ")", ",", "dF", "=", "dW", "[", ":", ",", ":", ",", ":", "d", "]", ",", "axis", "=", "1", ",", "cumulative", "=", "True", ")", "+", "integral", "(", "mu", "-", "0.5", "*", "nu", ",", "F", "=", "times", ",", "axis", "=", "1", ",", "trapez", "=", "False", ",", "cumulative", "=", "True", ")", ")", "return", "np", ".", "concatenate", "(", "(", "S", ",", "nu", ")", ",", "axis", "=", "-", "1", ")" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
fBrown
Sample fractional Brownian motion with differentiability index H on interval [0,T] (H=1/2 yields standard Brownian motion) :param H: Differentiability, larger than 0 :param T: Final time :param N: Number of time steps :param M: Number of samples :param dW: Driving noise, optional
swutil/stochastic_processes.py
def fBrown(H,T,N,M,dW = None,cholesky = False): ''' Sample fractional Brownian motion with differentiability index H on interval [0,T] (H=1/2 yields standard Brownian motion) :param H: Differentiability, larger than 0 :param T: Final time :param N: Number of time steps :param M: Number of samples :param dW: Driving noise, optional ''' alpha = 0.5-H times = np.linspace(0, T, N) dt = T/(N-1) if cholesky: if dW is not None: raise ValueError('Cannot use provided dW if Cholesky method is used') times = times[1:] tdt = times/np.reshape(times,(-1,1)) tdt[np.tril_indices(N-1,-1)]=0 cov = np.reshape(times,(-1,1))**(1-2*alpha)*(1/(1-alpha))*(tdt-1)**(-alpha)*scipy.special.hyp2f1(alpha,1-alpha,2-alpha,1/(1-tdt)) cov[0,:] = 0 np.fill_diagonal(cov,times**(1-2*alpha)/(1-2*alpha)) cov[np.tril_indices(N-1,-1)] = cov.T[np.tril_indices(N-1,-1)] L = scipy.linalg.cholesky(cov) return np.concatenate((np.zeros((1,M)),L.T@np.random.normal(size=(N-1,M)))) if dW is None: dW = np.sqrt(dt)*np.random.normal(size=(N-1,M)) if H == 0.5: return integral(dF = dW,cumulative = True) a = 1/dt/(1-alpha)*((T-times[N-2::-1])**(1-alpha)-(T-times[:0:-1])**(1-alpha))#a is array that is convolved with dW. Values arise from conditioning integral pieces on dW out = toeplitz_multiplication(a,np.zeros_like(a),dW[::-1])[::-1] out -=a[0]*dW#Redo last bit of defining integral with exact simulation below cov = np.array([[ dt**(1-2*alpha)/(1-2*alpha),dt**(1-alpha)/(1-alpha)],[dt**(1-alpha)/(1-alpha),dt]]) var = cov[0,0]-cov[0,1]**2/cov[1,1] out += cov[0,1]/cov[1,1]*dW #Conditional mean out += np.sqrt(var)*np.random.normal(size = (N-1,M))#Conditional variance out = np.concatenate((np.zeros((1,M)),out)) return out
def fBrown(H,T,N,M,dW = None,cholesky = False): ''' Sample fractional Brownian motion with differentiability index H on interval [0,T] (H=1/2 yields standard Brownian motion) :param H: Differentiability, larger than 0 :param T: Final time :param N: Number of time steps :param M: Number of samples :param dW: Driving noise, optional ''' alpha = 0.5-H times = np.linspace(0, T, N) dt = T/(N-1) if cholesky: if dW is not None: raise ValueError('Cannot use provided dW if Cholesky method is used') times = times[1:] tdt = times/np.reshape(times,(-1,1)) tdt[np.tril_indices(N-1,-1)]=0 cov = np.reshape(times,(-1,1))**(1-2*alpha)*(1/(1-alpha))*(tdt-1)**(-alpha)*scipy.special.hyp2f1(alpha,1-alpha,2-alpha,1/(1-tdt)) cov[0,:] = 0 np.fill_diagonal(cov,times**(1-2*alpha)/(1-2*alpha)) cov[np.tril_indices(N-1,-1)] = cov.T[np.tril_indices(N-1,-1)] L = scipy.linalg.cholesky(cov) return np.concatenate((np.zeros((1,M)),L.T@np.random.normal(size=(N-1,M)))) if dW is None: dW = np.sqrt(dt)*np.random.normal(size=(N-1,M)) if H == 0.5: return integral(dF = dW,cumulative = True) a = 1/dt/(1-alpha)*((T-times[N-2::-1])**(1-alpha)-(T-times[:0:-1])**(1-alpha))#a is array that is convolved with dW. Values arise from conditioning integral pieces on dW out = toeplitz_multiplication(a,np.zeros_like(a),dW[::-1])[::-1] out -=a[0]*dW#Redo last bit of defining integral with exact simulation below cov = np.array([[ dt**(1-2*alpha)/(1-2*alpha),dt**(1-alpha)/(1-alpha)],[dt**(1-alpha)/(1-alpha),dt]]) var = cov[0,0]-cov[0,1]**2/cov[1,1] out += cov[0,1]/cov[1,1]*dW #Conditional mean out += np.sqrt(var)*np.random.normal(size = (N-1,M))#Conditional variance out = np.concatenate((np.zeros((1,M)),out)) return out
[ "Sample", "fractional", "Brownian", "motion", "with", "differentiability", "index", "H", "on", "interval", "[", "0", "T", "]", "(", "H", "=", "1", "/", "2", "yields", "standard", "Brownian", "motion", ")", ":", "param", "H", ":", "Differentiability", "larger", "than", "0", ":", "param", "T", ":", "Final", "time", ":", "param", "N", ":", "Number", "of", "time", "steps", ":", "param", "M", ":", "Number", "of", "samples", ":", "param", "dW", ":", "Driving", "noise", "optional" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/stochastic_processes.py#L64-L102
[ "def", "fBrown", "(", "H", ",", "T", ",", "N", ",", "M", ",", "dW", "=", "None", ",", "cholesky", "=", "False", ")", ":", "alpha", "=", "0.5", "-", "H", "times", "=", "np", ".", "linspace", "(", "0", ",", "T", ",", "N", ")", "dt", "=", "T", "/", "(", "N", "-", "1", ")", "if", "cholesky", ":", "if", "dW", "is", "not", "None", ":", "raise", "ValueError", "(", "'Cannot use provided dW if Cholesky method is used'", ")", "times", "=", "times", "[", "1", ":", "]", "tdt", "=", "times", "/", "np", ".", "reshape", "(", "times", ",", "(", "-", "1", ",", "1", ")", ")", "tdt", "[", "np", ".", "tril_indices", "(", "N", "-", "1", ",", "-", "1", ")", "]", "=", "0", "cov", "=", "np", ".", "reshape", "(", "times", ",", "(", "-", "1", ",", "1", ")", ")", "**", "(", "1", "-", "2", "*", "alpha", ")", "*", "(", "1", "/", "(", "1", "-", "alpha", ")", ")", "*", "(", "tdt", "-", "1", ")", "**", "(", "-", "alpha", ")", "*", "scipy", ".", "special", ".", "hyp2f1", "(", "alpha", ",", "1", "-", "alpha", ",", "2", "-", "alpha", ",", "1", "/", "(", "1", "-", "tdt", ")", ")", "cov", "[", "0", ",", ":", "]", "=", "0", "np", ".", "fill_diagonal", "(", "cov", ",", "times", "**", "(", "1", "-", "2", "*", "alpha", ")", "/", "(", "1", "-", "2", "*", "alpha", ")", ")", "cov", "[", "np", ".", "tril_indices", "(", "N", "-", "1", ",", "-", "1", ")", "]", "=", "cov", ".", "T", "[", "np", ".", "tril_indices", "(", "N", "-", "1", ",", "-", "1", ")", "]", "L", "=", "scipy", ".", "linalg", ".", "cholesky", "(", "cov", ")", "return", "np", ".", "concatenate", "(", "(", "np", ".", "zeros", "(", "(", "1", ",", "M", ")", ")", ",", "L", ".", "T", "@", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "N", "-", "1", ",", "M", ")", ")", ")", ")", "if", "dW", "is", "None", ":", "dW", "=", "np", ".", "sqrt", "(", "dt", ")", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "N", "-", "1", ",", "M", ")", ")", "if", "H", "==", "0.5", ":", "return", "integral", "(", "dF", "=", "dW", ",", "cumulative", "=", "True", ")", "a", "=", "1", "/", "dt", "/", "(", "1", "-", "alpha", ")", "*", "(", "(", "T", "-", "times", "[", "N", "-", "2", ":", ":", "-", "1", "]", ")", "**", "(", "1", "-", "alpha", ")", "-", "(", "T", "-", "times", "[", ":", "0", ":", "-", "1", "]", ")", "**", "(", "1", "-", "alpha", ")", ")", "#a is array that is convolved with dW. Values arise from conditioning integral pieces on dW ", "out", "=", "toeplitz_multiplication", "(", "a", ",", "np", ".", "zeros_like", "(", "a", ")", ",", "dW", "[", ":", ":", "-", "1", "]", ")", "[", ":", ":", "-", "1", "]", "out", "-=", "a", "[", "0", "]", "*", "dW", "#Redo last bit of defining integral with exact simulation below", "cov", "=", "np", ".", "array", "(", "[", "[", "dt", "**", "(", "1", "-", "2", "*", "alpha", ")", "/", "(", "1", "-", "2", "*", "alpha", ")", ",", "dt", "**", "(", "1", "-", "alpha", ")", "/", "(", "1", "-", "alpha", ")", "]", ",", "[", "dt", "**", "(", "1", "-", "alpha", ")", "/", "(", "1", "-", "alpha", ")", ",", "dt", "]", "]", ")", "var", "=", "cov", "[", "0", ",", "0", "]", "-", "cov", "[", "0", ",", "1", "]", "**", "2", "/", "cov", "[", "1", ",", "1", "]", "out", "+=", "cov", "[", "0", ",", "1", "]", "/", "cov", "[", "1", ",", "1", "]", "*", "dW", "#Conditional mean", "out", "+=", "np", ".", "sqrt", "(", "var", ")", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "N", "-", "1", ",", "M", ")", ")", "#Conditional variance", "out", "=", "np", ".", "concatenate", "(", "(", "np", ".", "zeros", "(", "(", "1", ",", "M", ")", ")", ",", "out", ")", ")", "return", "out" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
r_bergomi
Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the rBergomi model of mathematical finance :rtype: M x N x d array
swutil/stochastic_processes.py
def r_bergomi(H,T,eta,xi,rho,S0,r,N,M,dW=None,dW_orth=None,cholesky = False,return_v=False): ''' Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the rBergomi model of mathematical finance :rtype: M x N x d array ''' times = np.linspace(0, T, N) dt = T/(N-1) times = np.reshape(times,(-1,1)) if dW is None: dW = np.sqrt(dt)*np.random.normal(size=(N-1,M)) if dW_orth is None: dW_orth = np.sqrt(dt)*np.random.normal(size=(N-1,M)) dZ = rho*dW+np.sqrt(1-rho**2)*dW_orth Y = eta*np.sqrt(2*H)*fBrown(H,T,N,M,dW =dW,cholesky = cholesky) v = xi*np.exp(Y-0.5*(eta**2)*times**(2*H)) S = S0*np.exp(integral(np.sqrt(v),dF = dZ,axis=0,cumulative = True)+integral(r - 0.5*v,F = times,axis=0,trapez=False,cumulative = True)) if return_v: return np.array([S,v]).T else: return np.array([S]).T
def r_bergomi(H,T,eta,xi,rho,S0,r,N,M,dW=None,dW_orth=None,cholesky = False,return_v=False): ''' Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the rBergomi model of mathematical finance :rtype: M x N x d array ''' times = np.linspace(0, T, N) dt = T/(N-1) times = np.reshape(times,(-1,1)) if dW is None: dW = np.sqrt(dt)*np.random.normal(size=(N-1,M)) if dW_orth is None: dW_orth = np.sqrt(dt)*np.random.normal(size=(N-1,M)) dZ = rho*dW+np.sqrt(1-rho**2)*dW_orth Y = eta*np.sqrt(2*H)*fBrown(H,T,N,M,dW =dW,cholesky = cholesky) v = xi*np.exp(Y-0.5*(eta**2)*times**(2*H)) S = S0*np.exp(integral(np.sqrt(v),dF = dZ,axis=0,cumulative = True)+integral(r - 0.5*v,F = times,axis=0,trapez=False,cumulative = True)) if return_v: return np.array([S,v]).T else: return np.array([S]).T
[ "Return", "M", "Euler", "-", "Maruyama", "sample", "paths", "with", "N", "time", "steps", "of", "(", "S_t", "v_t", ")", "where", "(", "S_t", "v_t", ")", "follows", "the", "rBergomi", "model", "of", "mathematical", "finance" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/stochastic_processes.py#L104-L125
[ "def", "r_bergomi", "(", "H", ",", "T", ",", "eta", ",", "xi", ",", "rho", ",", "S0", ",", "r", ",", "N", ",", "M", ",", "dW", "=", "None", ",", "dW_orth", "=", "None", ",", "cholesky", "=", "False", ",", "return_v", "=", "False", ")", ":", "times", "=", "np", ".", "linspace", "(", "0", ",", "T", ",", "N", ")", "dt", "=", "T", "/", "(", "N", "-", "1", ")", "times", "=", "np", ".", "reshape", "(", "times", ",", "(", "-", "1", ",", "1", ")", ")", "if", "dW", "is", "None", ":", "dW", "=", "np", ".", "sqrt", "(", "dt", ")", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "N", "-", "1", ",", "M", ")", ")", "if", "dW_orth", "is", "None", ":", "dW_orth", "=", "np", ".", "sqrt", "(", "dt", ")", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "N", "-", "1", ",", "M", ")", ")", "dZ", "=", "rho", "*", "dW", "+", "np", ".", "sqrt", "(", "1", "-", "rho", "**", "2", ")", "*", "dW_orth", "Y", "=", "eta", "*", "np", ".", "sqrt", "(", "2", "*", "H", ")", "*", "fBrown", "(", "H", ",", "T", ",", "N", ",", "M", ",", "dW", "=", "dW", ",", "cholesky", "=", "cholesky", ")", "v", "=", "xi", "*", "np", ".", "exp", "(", "Y", "-", "0.5", "*", "(", "eta", "**", "2", ")", "*", "times", "**", "(", "2", "*", "H", ")", ")", "S", "=", "S0", "*", "np", ".", "exp", "(", "integral", "(", "np", ".", "sqrt", "(", "v", ")", ",", "dF", "=", "dZ", ",", "axis", "=", "0", ",", "cumulative", "=", "True", ")", "+", "integral", "(", "r", "-", "0.5", "*", "v", ",", "F", "=", "times", ",", "axis", "=", "0", ",", "trapez", "=", "False", ",", "cumulative", "=", "True", ")", ")", "if", "return_v", ":", "return", "np", ".", "array", "(", "[", "S", ",", "v", "]", ")", ".", "T", "else", ":", "return", "np", ".", "array", "(", "[", "S", "]", ")", ".", "T" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
unique
https://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order
swutil/collections.py
def unique(seq): ''' https://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order ''' has = [] return [x for x in seq if not (x in has or has.append(x))]
def unique(seq): ''' https://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order ''' has = [] return [x for x in seq if not (x in has or has.append(x))]
[ "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "480214", "/", "how", "-", "do", "-", "you", "-", "remove", "-", "duplicates", "-", "from", "-", "a", "-", "list", "-", "in", "-", "whilst", "-", "preserving", "-", "order" ]
soerenwolfers/swutil
python
https://github.com/soerenwolfers/swutil/blob/2d598f2deac8b7e20df95dbc68017e5ab5d6180c/swutil/collections.py#L5-L10
[ "def", "unique", "(", "seq", ")", ":", "has", "=", "[", "]", "return", "[", "x", "for", "x", "in", "seq", "if", "not", "(", "x", "in", "has", "or", "has", ".", "append", "(", "x", ")", ")", "]" ]
2d598f2deac8b7e20df95dbc68017e5ab5d6180c
valid
ModelMixin.get_default_fields
get all fields of model, execpt id
easyui/mixins/model_mixins.py
def get_default_fields(self): """ get all fields of model, execpt id """ field_names = self._meta.get_all_field_names() if 'id' in field_names: field_names.remove('id') return field_names
def get_default_fields(self): """ get all fields of model, execpt id """ field_names = self._meta.get_all_field_names() if 'id' in field_names: field_names.remove('id') return field_names
[ "get", "all", "fields", "of", "model", "execpt", "id" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/model_mixins.py#L14-L22
[ "def", "get_default_fields", "(", "self", ")", ":", "field_names", "=", "self", ".", "_meta", ".", "get_all_field_names", "(", ")", "if", "'id'", "in", "field_names", ":", "field_names", ".", "remove", "(", "'id'", ")", "return", "field_names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
ModelMixin.get_field_value
返回显示的值,而不是单纯的数据库中的值 field 是model中的field type value_verbose 为True,返回数据的显示数据,会转换为choice的内容, 如果value_verbose 为False, 返回数据的实际值
easyui/mixins/model_mixins.py
def get_field_value(self, field, value_verbose=True): """ 返回显示的值,而不是单纯的数据库中的值 field 是model中的field type value_verbose 为True,返回数据的显示数据,会转换为choice的内容, 如果value_verbose 为False, 返回数据的实际值 """ if not value_verbose: """ value_verbose == false, return raw value """ value = field._get_val_from_obj(self) else: if isinstance(field, ForeignKey): # 获取外键的内容 value = getattr(self, field.name) else: # 非外键 try: value = self._get_FIELD_display(field) except : value = field._get_val_from_obj(self) if(value == True or value == False or isinstance(value, (int, float))): return value return unicode(value)
def get_field_value(self, field, value_verbose=True): """ 返回显示的值,而不是单纯的数据库中的值 field 是model中的field type value_verbose 为True,返回数据的显示数据,会转换为choice的内容, 如果value_verbose 为False, 返回数据的实际值 """ if not value_verbose: """ value_verbose == false, return raw value """ value = field._get_val_from_obj(self) else: if isinstance(field, ForeignKey): # 获取外键的内容 value = getattr(self, field.name) else: # 非外键 try: value = self._get_FIELD_display(field) except : value = field._get_val_from_obj(self) if(value == True or value == False or isinstance(value, (int, float))): return value return unicode(value)
[ "返回显示的值,而不是单纯的数据库中的值", "field", "是model中的field", "type", "value_verbose", "为True,返回数据的显示数据,会转换为choice的内容,", "如果value_verbose", "为False,", "返回数据的实际值" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/model_mixins.py#L24-L48
[ "def", "get_field_value", "(", "self", ",", "field", ",", "value_verbose", "=", "True", ")", ":", "if", "not", "value_verbose", ":", "\"\"\"\n value_verbose == false, return raw value\n \"\"\"", "value", "=", "field", ".", "_get_val_from_obj", "(", "self", ")", "else", ":", "if", "isinstance", "(", "field", ",", "ForeignKey", ")", ":", "# 获取外键的内容", "value", "=", "getattr", "(", "self", ",", "field", ".", "name", ")", "else", ":", "# 非外键", "try", ":", "value", "=", "self", ".", "_get_FIELD_display", "(", "field", ")", "except", ":", "value", "=", "field", ".", "_get_val_from_obj", "(", "self", ")", "if", "(", "value", "==", "True", "or", "value", "==", "False", "or", "isinstance", "(", "value", ",", "(", "int", ",", "float", ")", ")", ")", ":", "return", "value", "return", "unicode", "(", "value", ")" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
ModelMixin.get_fields
返回字段名及其对应值的列表 field_verbose 为True,返回定义中的字段的verbose_name, False返回其name value_verbose 为True,返回数据的显示数据,会转换为choice的内容,为False, 返回数据的实际值 fields 指定了要显示的字段 extra_fields 指定了要特殊处理的非field,比如是函数 remove_fields 指定了不显示的字段
easyui/mixins/model_mixins.py
def get_fields(self, field_verbose=True, value_verbose=True, fields=[], extra_fields=[], remove_fields = []): ''' 返回字段名及其对应值的列表 field_verbose 为True,返回定义中的字段的verbose_name, False返回其name value_verbose 为True,返回数据的显示数据,会转换为choice的内容,为False, 返回数据的实际值 fields 指定了要显示的字段 extra_fields 指定了要特殊处理的非field,比如是函数 remove_fields 指定了不显示的字段 ''' field_list = [] for field in self.__class__._meta.fields: if field.name in remove_fields: # 不显示的字段,跳过循环 continue if fields and field.name not in fields: # fields 不为空列表,即指定了要显示的字段,并且field.name 不再指定的列表中,跳过循环 continue if field.verbose_name and field_verbose: value_tuple = (field.verbose_name, self.get_field_value(field, value_verbose)) else: value_tuple = (field.name, self.get_field_value(field, value_verbose)) field_list.append(value_tuple) for name in extra_fields: # 处理函数 method = getattr(self, name) result = method() value_tuple = (name, result) field_list.append(value_tuple) return field_list
def get_fields(self, field_verbose=True, value_verbose=True, fields=[], extra_fields=[], remove_fields = []): ''' 返回字段名及其对应值的列表 field_verbose 为True,返回定义中的字段的verbose_name, False返回其name value_verbose 为True,返回数据的显示数据,会转换为choice的内容,为False, 返回数据的实际值 fields 指定了要显示的字段 extra_fields 指定了要特殊处理的非field,比如是函数 remove_fields 指定了不显示的字段 ''' field_list = [] for field in self.__class__._meta.fields: if field.name in remove_fields: # 不显示的字段,跳过循环 continue if fields and field.name not in fields: # fields 不为空列表,即指定了要显示的字段,并且field.name 不再指定的列表中,跳过循环 continue if field.verbose_name and field_verbose: value_tuple = (field.verbose_name, self.get_field_value(field, value_verbose)) else: value_tuple = (field.name, self.get_field_value(field, value_verbose)) field_list.append(value_tuple) for name in extra_fields: # 处理函数 method = getattr(self, name) result = method() value_tuple = (name, result) field_list.append(value_tuple) return field_list
[ "返回字段名及其对应值的列表", "field_verbose", "为True,返回定义中的字段的verbose_name,", "False返回其name", "value_verbose", "为True,返回数据的显示数据,会转换为choice的内容,为False,", "返回数据的实际值", "fields", "指定了要显示的字段", "extra_fields", "指定了要特殊处理的非field,比如是函数", "remove_fields", "指定了不显示的字段" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/model_mixins.py#L51-L84
[ "def", "get_fields", "(", "self", ",", "field_verbose", "=", "True", ",", "value_verbose", "=", "True", ",", "fields", "=", "[", "]", ",", "extra_fields", "=", "[", "]", ",", "remove_fields", "=", "[", "]", ")", ":", "field_list", "=", "[", "]", "for", "field", "in", "self", ".", "__class__", ".", "_meta", ".", "fields", ":", "if", "field", ".", "name", "in", "remove_fields", ":", "# 不显示的字段,跳过循环", "continue", "if", "fields", "and", "field", ".", "name", "not", "in", "fields", ":", "# fields 不为空列表,即指定了要显示的字段,并且field.name 不再指定的列表中,跳过循环", "continue", "if", "field", ".", "verbose_name", "and", "field_verbose", ":", "value_tuple", "=", "(", "field", ".", "verbose_name", ",", "self", ".", "get_field_value", "(", "field", ",", "value_verbose", ")", ")", "else", ":", "value_tuple", "=", "(", "field", ".", "name", ",", "self", ".", "get_field_value", "(", "field", ",", "value_verbose", ")", ")", "field_list", ".", "append", "(", "value_tuple", ")", "for", "name", "in", "extra_fields", ":", "# 处理函数", "method", "=", "getattr", "(", "self", ",", "name", ")", "result", "=", "method", "(", ")", "value_tuple", "=", "(", "name", ",", "result", ")", "field_list", ".", "append", "(", "value_tuple", ")", "return", "field_list" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
get_url
通过menu_id,获取对应的URL eg. /easyui/MenuListView/
easyui/views.py
def get_url(request): """ 通过menu_id,获取对应的URL eg. /easyui/MenuListView/ """ menu_id = request.GET.get('menu_id') m_object = Menu.objects.get(pk=menu_id) namespace = m_object.namespace viewname = m_object.viewname url_string = '%s:%s' %(namespace, viewname) url = reverse(url_string) return HttpResponse(url)
def get_url(request): """ 通过menu_id,获取对应的URL eg. /easyui/MenuListView/ """ menu_id = request.GET.get('menu_id') m_object = Menu.objects.get(pk=menu_id) namespace = m_object.namespace viewname = m_object.viewname url_string = '%s:%s' %(namespace, viewname) url = reverse(url_string) return HttpResponse(url)
[ "通过menu_id,获取对应的URL", "eg", ".", "/", "easyui", "/", "MenuListView", "/" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/views.py#L27-L40
[ "def", "get_url", "(", "request", ")", ":", "menu_id", "=", "request", ".", "GET", ".", "get", "(", "'menu_id'", ")", "m_object", "=", "Menu", ".", "objects", ".", "get", "(", "pk", "=", "menu_id", ")", "namespace", "=", "m_object", ".", "namespace", "viewname", "=", "m_object", ".", "viewname", "url_string", "=", "'%s:%s'", "%", "(", "namespace", ",", "viewname", ")", "url", "=", "reverse", "(", "url_string", ")", "return", "HttpResponse", "(", "url", ")" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
AjaxUpdateView.post
Handles POST requests only argument: row_index HTML中第几行的标记,原值返回 app_label model_name pk app_label + model_name + pk 可以获取一个object method object + method 得到要调用的方法 其它参数,html和method中同时定义, 在上面的方法中使用
easyui/views.py
def post(self, request, *args, **kwargs): """ Handles POST requests only argument: row_index HTML中第几行的标记,原值返回 app_label model_name pk app_label + model_name + pk 可以获取一个object method object + method 得到要调用的方法 其它参数,html和method中同时定义, 在上面的方法中使用 """ query_dict = dict(self.request.POST.items()) # row_index原值返回,在datagrid对应行显示结果 row_index = query_dict.pop('row_index') # 如果命令执行成功,并且没有返回值,则返回 "text+'成功'" 的提示 text = query_dict.pop('text', None) app_label = query_dict.pop('app_label') model_name = query_dict.pop('model_name') method = query_dict.pop('method') pk = query_dict.pop('pk') model = get_model(app_label, model_name) object = model.objects.get(pk=pk) try: status = 0 # 0 success; else fail func = getattr(object, method) # query_dict中的其它参数传递给调用的方法, 所有参数都是字符串 print query_dict return_value = func(**query_dict) message = return_value except Exception, error_message: # ajax 处理失败 status = 1 # 1 means fail message = unicode(error_message) # 如果命令执行成功,并且没有返回值,则返回 "text+'成功'" 的提示 if not message: message = text+'成功' return self.render_to_json_response({'status':status, 'message':message, 'row_index':row_index})
def post(self, request, *args, **kwargs): """ Handles POST requests only argument: row_index HTML中第几行的标记,原值返回 app_label model_name pk app_label + model_name + pk 可以获取一个object method object + method 得到要调用的方法 其它参数,html和method中同时定义, 在上面的方法中使用 """ query_dict = dict(self.request.POST.items()) # row_index原值返回,在datagrid对应行显示结果 row_index = query_dict.pop('row_index') # 如果命令执行成功,并且没有返回值,则返回 "text+'成功'" 的提示 text = query_dict.pop('text', None) app_label = query_dict.pop('app_label') model_name = query_dict.pop('model_name') method = query_dict.pop('method') pk = query_dict.pop('pk') model = get_model(app_label, model_name) object = model.objects.get(pk=pk) try: status = 0 # 0 success; else fail func = getattr(object, method) # query_dict中的其它参数传递给调用的方法, 所有参数都是字符串 print query_dict return_value = func(**query_dict) message = return_value except Exception, error_message: # ajax 处理失败 status = 1 # 1 means fail message = unicode(error_message) # 如果命令执行成功,并且没有返回值,则返回 "text+'成功'" 的提示 if not message: message = text+'成功' return self.render_to_json_response({'status':status, 'message':message, 'row_index':row_index})
[ "Handles", "POST", "requests", "only", "argument", ":", "row_index", "HTML中第几行的标记,原值返回", "app_label", "model_name", "pk", "app_label", "+", "model_name", "+", "pk", "可以获取一个object", "method", "object", "+", "method", "得到要调用的方法", "其它参数,html和method中同时定义", "在上面的方法中使用" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/views.py#L53-L93
[ "def", "post", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "query_dict", "=", "dict", "(", "self", ".", "request", ".", "POST", ".", "items", "(", ")", ")", "# row_index原值返回,在datagrid对应行显示结果 ", "row_index", "=", "query_dict", ".", "pop", "(", "'row_index'", ")", "# 如果命令执行成功,并且没有返回值,则返回 \"text+'成功'\" 的提示", "text", "=", "query_dict", ".", "pop", "(", "'text'", ",", "None", ")", "app_label", "=", "query_dict", ".", "pop", "(", "'app_label'", ")", "model_name", "=", "query_dict", ".", "pop", "(", "'model_name'", ")", "method", "=", "query_dict", ".", "pop", "(", "'method'", ")", "pk", "=", "query_dict", ".", "pop", "(", "'pk'", ")", "model", "=", "get_model", "(", "app_label", ",", "model_name", ")", "object", "=", "model", ".", "objects", ".", "get", "(", "pk", "=", "pk", ")", "try", ":", "status", "=", "0", "# 0 success; else fail", "func", "=", "getattr", "(", "object", ",", "method", ")", "# query_dict中的其它参数传递给调用的方法, 所有参数都是字符串", "print", "query_dict", "return_value", "=", "func", "(", "*", "*", "query_dict", ")", "message", "=", "return_value", "except", "Exception", ",", "error_message", ":", "# ajax 处理失败", "status", "=", "1", "# 1 means fail", "message", "=", "unicode", "(", "error_message", ")", "# 如果命令执行成功,并且没有返回值,则返回 \"text+'成功'\" 的提示", "if", "not", "message", ":", "message", "=", "text", "+", "'成功'", "return", "self", ".", "render_to_json_response", "(", "{", "'status'", ":", "status", ",", "'message'", ":", "message", ",", "'row_index'", ":", "row_index", "}", ")" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
MenuListView.get_menu_checked
获取用户或者用户组checked的菜单列表 usermenu_form.html 中定义 usermenu 这两个model的定义类似,比如menus_checked和menus_show groupmenu @return eg. ['1', '8', '9', '10' ] 获取用户或者用户组的check_ids,会给出app_label, model_name, pk eg. /easyui/menulistview/?app_label=easyui&model_name=UserMenu&pk=1
easyui/views.py
def get_menu_checked(self, request): """ 获取用户或者用户组checked的菜单列表 usermenu_form.html 中定义 usermenu 这两个model的定义类似,比如menus_checked和menus_show groupmenu @return eg. ['1', '8', '9', '10' ] 获取用户或者用户组的check_ids,会给出app_label, model_name, pk eg. /easyui/menulistview/?app_label=easyui&model_name=UserMenu&pk=1 """ checked_id = [] qd = request.GET query_dict = dict(qd.items()) if query_dict: #object = get_object(**query_dict) app_label = query_dict['app_label'] model_name = query_dict['model_name'] pk = query_dict['pk'] model = get_model(app_label, model_name) object = model.objects.get(pk=pk) checked_id = object.menus_checked.split(',') return checked_id
def get_menu_checked(self, request): """ 获取用户或者用户组checked的菜单列表 usermenu_form.html 中定义 usermenu 这两个model的定义类似,比如menus_checked和menus_show groupmenu @return eg. ['1', '8', '9', '10' ] 获取用户或者用户组的check_ids,会给出app_label, model_name, pk eg. /easyui/menulistview/?app_label=easyui&model_name=UserMenu&pk=1 """ checked_id = [] qd = request.GET query_dict = dict(qd.items()) if query_dict: #object = get_object(**query_dict) app_label = query_dict['app_label'] model_name = query_dict['model_name'] pk = query_dict['pk'] model = get_model(app_label, model_name) object = model.objects.get(pk=pk) checked_id = object.menus_checked.split(',') return checked_id
[ "获取用户或者用户组checked的菜单列表", "usermenu_form", ".", "html", "中定义", "usermenu", "这两个model的定义类似,比如menus_checked和menus_show", "groupmenu" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/views.py#L156-L177
[ "def", "get_menu_checked", "(", "self", ",", "request", ")", ":", "checked_id", "=", "[", "]", "qd", "=", "request", ".", "GET", "query_dict", "=", "dict", "(", "qd", ".", "items", "(", ")", ")", "if", "query_dict", ":", "#object = get_object(**query_dict)", "app_label", "=", "query_dict", "[", "'app_label'", "]", "model_name", "=", "query_dict", "[", "'model_name'", "]", "pk", "=", "query_dict", "[", "'pk'", "]", "model", "=", "get_model", "(", "app_label", ",", "model_name", ")", "object", "=", "model", ".", "objects", ".", "get", "(", "pk", "=", "pk", ")", "checked_id", "=", "object", ".", "menus_checked", ".", "split", "(", "','", ")", "return", "checked_id" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
DownloaderBase.fetch
Verify if the file is already downloaded and complete. If they don't exists or if are not complete, use homura download function to fetch files. Return a list with the path of the downloaded file and the size of the remote file.
lc8_download/lc8.py
def fetch(self, url, path, filename): """Verify if the file is already downloaded and complete. If they don't exists or if are not complete, use homura download function to fetch files. Return a list with the path of the downloaded file and the size of the remote file. """ logger.debug('initializing download in ', url) remote_file_size = self.get_remote_file_size(url) if exists(join(path, filename)): size = getsize(join(path, filename)) if size == remote_file_size: logger.error('%s already exists on your system' % filename) print('%s already exists on your system' % filename) return [join(path, filename), size] logger.debug('Downloading: %s' % filename) print('Downloading: %s' % filename) fetch(url, path) print('stored at %s' % path) logger.debug('stored at %s' % path) return [join(path, filename), remote_file_size]
def fetch(self, url, path, filename): """Verify if the file is already downloaded and complete. If they don't exists or if are not complete, use homura download function to fetch files. Return a list with the path of the downloaded file and the size of the remote file. """ logger.debug('initializing download in ', url) remote_file_size = self.get_remote_file_size(url) if exists(join(path, filename)): size = getsize(join(path, filename)) if size == remote_file_size: logger.error('%s already exists on your system' % filename) print('%s already exists on your system' % filename) return [join(path, filename), size] logger.debug('Downloading: %s' % filename) print('Downloading: %s' % filename) fetch(url, path) print('stored at %s' % path) logger.debug('stored at %s' % path) return [join(path, filename), remote_file_size]
[ "Verify", "if", "the", "file", "is", "already", "downloaded", "and", "complete", ".", "If", "they", "don", "t", "exists", "or", "if", "are", "not", "complete", "use", "homura", "download", "function", "to", "fetch", "files", ".", "Return", "a", "list", "with", "the", "path", "of", "the", "downloaded", "file", "and", "the", "size", "of", "the", "remote", "file", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L44-L65
[ "def", "fetch", "(", "self", ",", "url", ",", "path", ",", "filename", ")", ":", "logger", ".", "debug", "(", "'initializing download in '", ",", "url", ")", "remote_file_size", "=", "self", ".", "get_remote_file_size", "(", "url", ")", "if", "exists", "(", "join", "(", "path", ",", "filename", ")", ")", ":", "size", "=", "getsize", "(", "join", "(", "path", ",", "filename", ")", ")", "if", "size", "==", "remote_file_size", ":", "logger", ".", "error", "(", "'%s already exists on your system'", "%", "filename", ")", "print", "(", "'%s already exists on your system'", "%", "filename", ")", "return", "[", "join", "(", "path", ",", "filename", ")", ",", "size", "]", "logger", ".", "debug", "(", "'Downloading: %s'", "%", "filename", ")", "print", "(", "'Downloading: %s'", "%", "filename", ")", "fetch", "(", "url", ",", "path", ")", "print", "(", "'stored at %s'", "%", "path", ")", "logger", ".", "debug", "(", "'stored at %s'", "%", "path", ")", "return", "[", "join", "(", "path", ",", "filename", ")", ",", "remote_file_size", "]" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
DownloaderBase.validate_bands
Validate bands parameter.
lc8_download/lc8.py
def validate_bands(self, bands): """Validate bands parameter.""" if not isinstance(bands, list): logger.error('Parameter bands must be a "list"') raise TypeError('Parameter bands must be a "list"') valid_bands = list(range(1, 12)) + ['BQA'] for band in bands: if band not in valid_bands: logger.error('%s is not a valid band' % band) raise InvalidBandError('%s is not a valid band' % band)
def validate_bands(self, bands): """Validate bands parameter.""" if not isinstance(bands, list): logger.error('Parameter bands must be a "list"') raise TypeError('Parameter bands must be a "list"') valid_bands = list(range(1, 12)) + ['BQA'] for band in bands: if band not in valid_bands: logger.error('%s is not a valid band' % band) raise InvalidBandError('%s is not a valid band' % band)
[ "Validate", "bands", "parameter", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L76-L85
[ "def", "validate_bands", "(", "self", ",", "bands", ")", ":", "if", "not", "isinstance", "(", "bands", ",", "list", ")", ":", "logger", ".", "error", "(", "'Parameter bands must be a \"list\"'", ")", "raise", "TypeError", "(", "'Parameter bands must be a \"list\"'", ")", "valid_bands", "=", "list", "(", "range", "(", "1", ",", "12", ")", ")", "+", "[", "'BQA'", "]", "for", "band", "in", "bands", ":", "if", "band", "not", "in", "valid_bands", ":", "logger", ".", "error", "(", "'%s is not a valid band'", "%", "band", ")", "raise", "InvalidBandError", "(", "'%s is not a valid band'", "%", "band", ")" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
GoogleDownloader.validate_sceneInfo
Check scene name and whether remote file exists. Raises WrongSceneNameError if the scene name is wrong.
lc8_download/lc8.py
def validate_sceneInfo(self): """Check scene name and whether remote file exists. Raises WrongSceneNameError if the scene name is wrong. """ if self.sceneInfo.prefix not in self.__satellitesMap: logger.error('Google Downloader: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix)) raise WrongSceneNameError('Google Downloader: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix))
def validate_sceneInfo(self): """Check scene name and whether remote file exists. Raises WrongSceneNameError if the scene name is wrong. """ if self.sceneInfo.prefix not in self.__satellitesMap: logger.error('Google Downloader: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix)) raise WrongSceneNameError('Google Downloader: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix))
[ "Check", "scene", "name", "and", "whether", "remote", "file", "exists", ".", "Raises", "WrongSceneNameError", "if", "the", "scene", "name", "is", "wrong", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L117-L125
[ "def", "validate_sceneInfo", "(", "self", ")", ":", "if", "self", ".", "sceneInfo", ".", "prefix", "not", "in", "self", ".", "__satellitesMap", ":", "logger", ".", "error", "(", "'Google Downloader: Prefix of %s (%s) is invalid'", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "self", ".", "sceneInfo", ".", "prefix", ")", ")", "raise", "WrongSceneNameError", "(", "'Google Downloader: Prefix of %s (%s) is invalid'", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "self", ".", "sceneInfo", ".", "prefix", ")", ")" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
GoogleDownloader.download
Download remote .tar.bz file.
lc8_download/lc8.py
def download(self, bands, download_dir=None, metadata=False): """Download remote .tar.bz file.""" super(GoogleDownloader, self).validate_bands(bands) pattern = re.compile('^[^\s]+_(.+)\.tiff?', re.I) image_list = [] band_list = ['B%i' % (i,) if isinstance(i, int) else i for i in bands] if download_dir is None: download_dir = DOWNLOAD_DIR check_create_folder(join(download_dir, self.sceneInfo.name)) filename = "%s%s" % (self.sceneInfo.name, self.__remote_file_ext) downloaded = self.fetch(self.remote_file_url, download_dir, filename) try: tar = tarfile.open(downloaded[0], 'r') folder_path = join(download_dir, self.sceneInfo.name) logger.debug('Starting data extraction in directory ', folder_path) tar.extractall(folder_path) remove(downloaded[0]) images_path = listdir(folder_path) for image_path in images_path: matched = pattern.match(image_path) file_path = join(folder_path, image_path) if matched and matched.group(1) in band_list: image_list.append([file_path, getsize(file_path)]) elif matched: remove(file_path) except tarfile.ReadError as error: logger.error('Error when extracting files: ', error) print('Error when extracting files.') return image_list
def download(self, bands, download_dir=None, metadata=False): """Download remote .tar.bz file.""" super(GoogleDownloader, self).validate_bands(bands) pattern = re.compile('^[^\s]+_(.+)\.tiff?', re.I) image_list = [] band_list = ['B%i' % (i,) if isinstance(i, int) else i for i in bands] if download_dir is None: download_dir = DOWNLOAD_DIR check_create_folder(join(download_dir, self.sceneInfo.name)) filename = "%s%s" % (self.sceneInfo.name, self.__remote_file_ext) downloaded = self.fetch(self.remote_file_url, download_dir, filename) try: tar = tarfile.open(downloaded[0], 'r') folder_path = join(download_dir, self.sceneInfo.name) logger.debug('Starting data extraction in directory ', folder_path) tar.extractall(folder_path) remove(downloaded[0]) images_path = listdir(folder_path) for image_path in images_path: matched = pattern.match(image_path) file_path = join(folder_path, image_path) if matched and matched.group(1) in band_list: image_list.append([file_path, getsize(file_path)]) elif matched: remove(file_path) except tarfile.ReadError as error: logger.error('Error when extracting files: ', error) print('Error when extracting files.') return image_list
[ "Download", "remote", ".", "tar", ".", "bz", "file", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L131-L165
[ "def", "download", "(", "self", ",", "bands", ",", "download_dir", "=", "None", ",", "metadata", "=", "False", ")", ":", "super", "(", "GoogleDownloader", ",", "self", ")", ".", "validate_bands", "(", "bands", ")", "pattern", "=", "re", ".", "compile", "(", "'^[^\\s]+_(.+)\\.tiff?'", ",", "re", ".", "I", ")", "image_list", "=", "[", "]", "band_list", "=", "[", "'B%i'", "%", "(", "i", ",", ")", "if", "isinstance", "(", "i", ",", "int", ")", "else", "i", "for", "i", "in", "bands", "]", "if", "download_dir", "is", "None", ":", "download_dir", "=", "DOWNLOAD_DIR", "check_create_folder", "(", "join", "(", "download_dir", ",", "self", ".", "sceneInfo", ".", "name", ")", ")", "filename", "=", "\"%s%s\"", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "self", ".", "__remote_file_ext", ")", "downloaded", "=", "self", ".", "fetch", "(", "self", ".", "remote_file_url", ",", "download_dir", ",", "filename", ")", "try", ":", "tar", "=", "tarfile", ".", "open", "(", "downloaded", "[", "0", "]", ",", "'r'", ")", "folder_path", "=", "join", "(", "download_dir", ",", "self", ".", "sceneInfo", ".", "name", ")", "logger", ".", "debug", "(", "'Starting data extraction in directory '", ",", "folder_path", ")", "tar", ".", "extractall", "(", "folder_path", ")", "remove", "(", "downloaded", "[", "0", "]", ")", "images_path", "=", "listdir", "(", "folder_path", ")", "for", "image_path", "in", "images_path", ":", "matched", "=", "pattern", ".", "match", "(", "image_path", ")", "file_path", "=", "join", "(", "folder_path", ",", "image_path", ")", "if", "matched", "and", "matched", ".", "group", "(", "1", ")", "in", "band_list", ":", "image_list", ".", "append", "(", "[", "file_path", ",", "getsize", "(", "file_path", ")", "]", ")", "elif", "matched", ":", "remove", "(", "file_path", ")", "except", "tarfile", ".", "ReadError", "as", "error", ":", "logger", ".", "error", "(", "'Error when extracting files: '", ",", "error", ")", "print", "(", "'Error when extracting files.'", ")", "return", "image_list" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
AWSDownloader.validate_sceneInfo
Check whether sceneInfo is valid to download from AWS Storage.
lc8_download/lc8.py
def validate_sceneInfo(self): """Check whether sceneInfo is valid to download from AWS Storage.""" if self.sceneInfo.prefix not in self.__prefixesValid: raise WrongSceneNameError('AWS: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix))
def validate_sceneInfo(self): """Check whether sceneInfo is valid to download from AWS Storage.""" if self.sceneInfo.prefix not in self.__prefixesValid: raise WrongSceneNameError('AWS: Prefix of %s (%s) is invalid' % (self.sceneInfo.name, self.sceneInfo.prefix))
[ "Check", "whether", "sceneInfo", "is", "valid", "to", "download", "from", "AWS", "Storage", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L194-L198
[ "def", "validate_sceneInfo", "(", "self", ")", ":", "if", "self", ".", "sceneInfo", ".", "prefix", "not", "in", "self", ".", "__prefixesValid", ":", "raise", "WrongSceneNameError", "(", "'AWS: Prefix of %s (%s) is invalid'", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "self", ".", "sceneInfo", ".", "prefix", ")", ")" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
AWSDownloader.remote_file_exists
Verify whether the file (scene) exists on AWS Storage.
lc8_download/lc8.py
def remote_file_exists(self): """Verify whether the file (scene) exists on AWS Storage.""" url = join(self.base_url, 'index.html') return super(AWSDownloader, self).remote_file_exists(url)
def remote_file_exists(self): """Verify whether the file (scene) exists on AWS Storage.""" url = join(self.base_url, 'index.html') return super(AWSDownloader, self).remote_file_exists(url)
[ "Verify", "whether", "the", "file", "(", "scene", ")", "exists", "on", "AWS", "Storage", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L200-L203
[ "def", "remote_file_exists", "(", "self", ")", ":", "url", "=", "join", "(", "self", ".", "base_url", ",", "'index.html'", ")", "return", "super", "(", "AWSDownloader", ",", "self", ")", ".", "remote_file_exists", "(", "url", ")" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
AWSDownloader.download
Download each specified band and metadata.
lc8_download/lc8.py
def download(self, bands, download_dir=None, metadata=False): """Download each specified band and metadata.""" super(AWSDownloader, self).validate_bands(bands) if download_dir is None: download_dir = DOWNLOAD_DIR dest_dir = check_create_folder(join(download_dir, self.sceneInfo.name)) downloaded = [] for band in bands: if band == 'BQA': filename = '%s_%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext) else: filename = '%s_B%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext) band_url = join(self.base_url, filename) downloaded.append(self.fetch(band_url, dest_dir, filename)) if metadata: filename = '%s_MTL.txt' % (self.sceneInfo.name) url = join(self.base_url, filename) self.fetch(url, dest_dir, filename) return downloaded
def download(self, bands, download_dir=None, metadata=False): """Download each specified band and metadata.""" super(AWSDownloader, self).validate_bands(bands) if download_dir is None: download_dir = DOWNLOAD_DIR dest_dir = check_create_folder(join(download_dir, self.sceneInfo.name)) downloaded = [] for band in bands: if band == 'BQA': filename = '%s_%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext) else: filename = '%s_B%s.%s' % (self.sceneInfo.name, band, self.__remote_file_ext) band_url = join(self.base_url, filename) downloaded.append(self.fetch(band_url, dest_dir, filename)) if metadata: filename = '%s_MTL.txt' % (self.sceneInfo.name) url = join(self.base_url, filename) self.fetch(url, dest_dir, filename) return downloaded
[ "Download", "each", "specified", "band", "and", "metadata", "." ]
cenima-ibama/lc8_download
python
https://github.com/cenima-ibama/lc8_download/blob/d366e8b42b143597c71663ccb838bf8375c8d817/lc8_download/lc8.py#L205-L227
[ "def", "download", "(", "self", ",", "bands", ",", "download_dir", "=", "None", ",", "metadata", "=", "False", ")", ":", "super", "(", "AWSDownloader", ",", "self", ")", ".", "validate_bands", "(", "bands", ")", "if", "download_dir", "is", "None", ":", "download_dir", "=", "DOWNLOAD_DIR", "dest_dir", "=", "check_create_folder", "(", "join", "(", "download_dir", ",", "self", ".", "sceneInfo", ".", "name", ")", ")", "downloaded", "=", "[", "]", "for", "band", "in", "bands", ":", "if", "band", "==", "'BQA'", ":", "filename", "=", "'%s_%s.%s'", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "band", ",", "self", ".", "__remote_file_ext", ")", "else", ":", "filename", "=", "'%s_B%s.%s'", "%", "(", "self", ".", "sceneInfo", ".", "name", ",", "band", ",", "self", ".", "__remote_file_ext", ")", "band_url", "=", "join", "(", "self", ".", "base_url", ",", "filename", ")", "downloaded", ".", "append", "(", "self", ".", "fetch", "(", "band_url", ",", "dest_dir", ",", "filename", ")", ")", "if", "metadata", ":", "filename", "=", "'%s_MTL.txt'", "%", "(", "self", ".", "sceneInfo", ".", "name", ")", "url", "=", "join", "(", "self", ".", "base_url", ",", "filename", ")", "self", ".", "fetch", "(", "url", ",", "dest_dir", ",", "filename", ")", "return", "downloaded" ]
d366e8b42b143597c71663ccb838bf8375c8d817
valid
open_archive
Open an archive on a filesystem. This function tries to mimick the behaviour of `fs.open_fs` as closely as possible: it accepts either a FS URL or a filesystem instance, and will close all resources it had to open. Arguments: fs_url (FS or text_type): a FS URL, or a filesystem instance, where the archive file is located. archive (text_type): the path to the archive file on the given filesystem. Raises: `fs.opener._errors.Unsupported`: when the archive type is not supported (either the file extension is unknown or the opener requires unmet dependencies). Example: >>> from fs.archive import open_archive >>> with open_archive('mem://', 'test.tar.gz') as archive_fs: ... type(archive_fs) <class 'fs.archive.tarfs.TarFS'> Hint: This function finds the entry points defined in group ``fs.archive.open_archive``, using the names of the entry point as the registered extension.
fs/archive/opener.py
def open_archive(fs_url, archive): """Open an archive on a filesystem. This function tries to mimick the behaviour of `fs.open_fs` as closely as possible: it accepts either a FS URL or a filesystem instance, and will close all resources it had to open. Arguments: fs_url (FS or text_type): a FS URL, or a filesystem instance, where the archive file is located. archive (text_type): the path to the archive file on the given filesystem. Raises: `fs.opener._errors.Unsupported`: when the archive type is not supported (either the file extension is unknown or the opener requires unmet dependencies). Example: >>> from fs.archive import open_archive >>> with open_archive('mem://', 'test.tar.gz') as archive_fs: ... type(archive_fs) <class 'fs.archive.tarfs.TarFS'> Hint: This function finds the entry points defined in group ``fs.archive.open_archive``, using the names of the entry point as the registered extension. """ it = pkg_resources.iter_entry_points('fs.archive.open_archive') entry_point = next((ep for ep in it if archive.endswith(ep.name)), None) if entry_point is None: raise UnsupportedProtocol( 'unknown archive extension: {}'.format(archive)) try: archive_opener = entry_point.load() except pkg_resources.DistributionNotFound as df: # pragma: no cover six.raise_from(UnsupportedProtocol( 'extension {} requires {}'.format(entry_point.name, df.req)), None) try: binfile = None archive_fs = None fs = open_fs(fs_url) if issubclass(archive_opener, base.ArchiveFS): try: binfile = fs.openbin(archive, 'r+') except errors.ResourceNotFound: binfile = fs.openbin(archive, 'w') except errors.ResourceReadOnly: binfile = fs.openbin(archive, 'r') archive_opener = archive_opener._read_fs_cls elif issubclass(archive_opener, base.ArchiveReadFS): binfile = fs.openbin(archive, 'r') if not hasattr(binfile, 'name'): binfile.name = basename(archive) archive_fs = archive_opener(binfile) except Exception: getattr(archive_fs, 'close', lambda: None)() getattr(binfile, 'close', lambda: None)() raise else: return archive_fs
def open_archive(fs_url, archive): """Open an archive on a filesystem. This function tries to mimick the behaviour of `fs.open_fs` as closely as possible: it accepts either a FS URL or a filesystem instance, and will close all resources it had to open. Arguments: fs_url (FS or text_type): a FS URL, or a filesystem instance, where the archive file is located. archive (text_type): the path to the archive file on the given filesystem. Raises: `fs.opener._errors.Unsupported`: when the archive type is not supported (either the file extension is unknown or the opener requires unmet dependencies). Example: >>> from fs.archive import open_archive >>> with open_archive('mem://', 'test.tar.gz') as archive_fs: ... type(archive_fs) <class 'fs.archive.tarfs.TarFS'> Hint: This function finds the entry points defined in group ``fs.archive.open_archive``, using the names of the entry point as the registered extension. """ it = pkg_resources.iter_entry_points('fs.archive.open_archive') entry_point = next((ep for ep in it if archive.endswith(ep.name)), None) if entry_point is None: raise UnsupportedProtocol( 'unknown archive extension: {}'.format(archive)) try: archive_opener = entry_point.load() except pkg_resources.DistributionNotFound as df: # pragma: no cover six.raise_from(UnsupportedProtocol( 'extension {} requires {}'.format(entry_point.name, df.req)), None) try: binfile = None archive_fs = None fs = open_fs(fs_url) if issubclass(archive_opener, base.ArchiveFS): try: binfile = fs.openbin(archive, 'r+') except errors.ResourceNotFound: binfile = fs.openbin(archive, 'w') except errors.ResourceReadOnly: binfile = fs.openbin(archive, 'r') archive_opener = archive_opener._read_fs_cls elif issubclass(archive_opener, base.ArchiveReadFS): binfile = fs.openbin(archive, 'r') if not hasattr(binfile, 'name'): binfile.name = basename(archive) archive_fs = archive_opener(binfile) except Exception: getattr(archive_fs, 'close', lambda: None)() getattr(binfile, 'close', lambda: None)() raise else: return archive_fs
[ "Open", "an", "archive", "on", "a", "filesystem", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/opener.py#L18-L89
[ "def", "open_archive", "(", "fs_url", ",", "archive", ")", ":", "it", "=", "pkg_resources", ".", "iter_entry_points", "(", "'fs.archive.open_archive'", ")", "entry_point", "=", "next", "(", "(", "ep", "for", "ep", "in", "it", "if", "archive", ".", "endswith", "(", "ep", ".", "name", ")", ")", ",", "None", ")", "if", "entry_point", "is", "None", ":", "raise", "UnsupportedProtocol", "(", "'unknown archive extension: {}'", ".", "format", "(", "archive", ")", ")", "try", ":", "archive_opener", "=", "entry_point", ".", "load", "(", ")", "except", "pkg_resources", ".", "DistributionNotFound", "as", "df", ":", "# pragma: no cover", "six", ".", "raise_from", "(", "UnsupportedProtocol", "(", "'extension {} requires {}'", ".", "format", "(", "entry_point", ".", "name", ",", "df", ".", "req", ")", ")", ",", "None", ")", "try", ":", "binfile", "=", "None", "archive_fs", "=", "None", "fs", "=", "open_fs", "(", "fs_url", ")", "if", "issubclass", "(", "archive_opener", ",", "base", ".", "ArchiveFS", ")", ":", "try", ":", "binfile", "=", "fs", ".", "openbin", "(", "archive", ",", "'r+'", ")", "except", "errors", ".", "ResourceNotFound", ":", "binfile", "=", "fs", ".", "openbin", "(", "archive", ",", "'w'", ")", "except", "errors", ".", "ResourceReadOnly", ":", "binfile", "=", "fs", ".", "openbin", "(", "archive", ",", "'r'", ")", "archive_opener", "=", "archive_opener", ".", "_read_fs_cls", "elif", "issubclass", "(", "archive_opener", ",", "base", ".", "ArchiveReadFS", ")", ":", "binfile", "=", "fs", ".", "openbin", "(", "archive", ",", "'r'", ")", "if", "not", "hasattr", "(", "binfile", ",", "'name'", ")", ":", "binfile", ".", "name", "=", "basename", "(", "archive", ")", "archive_fs", "=", "archive_opener", "(", "binfile", ")", "except", "Exception", ":", "getattr", "(", "archive_fs", ",", "'close'", ",", "lambda", ":", "None", ")", "(", ")", "getattr", "(", "binfile", ",", "'close'", ",", "lambda", ":", "None", ")", "(", ")", "raise", "else", ":", "return", "archive_fs" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
iso_name_slugify
Slugify a name in the ISO-9660 way. Example: >>> slugify('épatant') "_patant"
fs/archive/isofs/_utils.py
def iso_name_slugify(name): """Slugify a name in the ISO-9660 way. Example: >>> slugify('épatant') "_patant" """ name = name.encode('ascii', 'replace').replace(b'?', b'_') return name.decode('ascii')
def iso_name_slugify(name): """Slugify a name in the ISO-9660 way. Example: >>> slugify('épatant') "_patant" """ name = name.encode('ascii', 'replace').replace(b'?', b'_') return name.decode('ascii')
[ "Slugify", "a", "name", "in", "the", "ISO", "-", "9660", "way", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/isofs/_utils.py#L11-L19
[ "def", "iso_name_slugify", "(", "name", ")", ":", "name", "=", "name", ".", "encode", "(", "'ascii'", ",", "'replace'", ")", ".", "replace", "(", "b'?'", ",", "b'_'", ")", "return", "name", ".", "decode", "(", "'ascii'", ")" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
iso_name_increment
Increment an ISO name to avoid name collision. Example: >>> iso_name_increment('foo.txt') 'foo1.txt' >>> iso_name_increment('bar10') 'bar11' >>> iso_name_increment('bar99', max_length=5) 'ba100'
fs/archive/isofs/_utils.py
def iso_name_increment(name, is_dir=False, max_length=8): """Increment an ISO name to avoid name collision. Example: >>> iso_name_increment('foo.txt') 'foo1.txt' >>> iso_name_increment('bar10') 'bar11' >>> iso_name_increment('bar99', max_length=5) 'ba100' """ # Split the extension if needed if not is_dir and '.' in name: name, ext = name.rsplit('.') ext = '.{}'.format(ext) else: ext = '' # Find the position of the last letter for position, char in reversed(list(enumerate(name))): if char not in string.digits: break # Extract the numbers and the text from the name base, tag = name[:position+1], name[position+1:] tag = str(int(tag or 0) + 1) # Crop the text if the numbers are too long if len(tag) + len(base) > max_length: base = base[:max_length - len(tag)] # Return the name with the extension return ''.join([base, tag, ext])
def iso_name_increment(name, is_dir=False, max_length=8): """Increment an ISO name to avoid name collision. Example: >>> iso_name_increment('foo.txt') 'foo1.txt' >>> iso_name_increment('bar10') 'bar11' >>> iso_name_increment('bar99', max_length=5) 'ba100' """ # Split the extension if needed if not is_dir and '.' in name: name, ext = name.rsplit('.') ext = '.{}'.format(ext) else: ext = '' # Find the position of the last letter for position, char in reversed(list(enumerate(name))): if char not in string.digits: break # Extract the numbers and the text from the name base, tag = name[:position+1], name[position+1:] tag = str(int(tag or 0) + 1) # Crop the text if the numbers are too long if len(tag) + len(base) > max_length: base = base[:max_length - len(tag)] # Return the name with the extension return ''.join([base, tag, ext])
[ "Increment", "an", "ISO", "name", "to", "avoid", "name", "collision", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/isofs/_utils.py#L22-L54
[ "def", "iso_name_increment", "(", "name", ",", "is_dir", "=", "False", ",", "max_length", "=", "8", ")", ":", "# Split the extension if needed", "if", "not", "is_dir", "and", "'.'", "in", "name", ":", "name", ",", "ext", "=", "name", ".", "rsplit", "(", "'.'", ")", "ext", "=", "'.{}'", ".", "format", "(", "ext", ")", "else", ":", "ext", "=", "''", "# Find the position of the last letter", "for", "position", ",", "char", "in", "reversed", "(", "list", "(", "enumerate", "(", "name", ")", ")", ")", ":", "if", "char", "not", "in", "string", ".", "digits", ":", "break", "# Extract the numbers and the text from the name", "base", ",", "tag", "=", "name", "[", ":", "position", "+", "1", "]", ",", "name", "[", "position", "+", "1", ":", "]", "tag", "=", "str", "(", "int", "(", "tag", "or", "0", ")", "+", "1", ")", "# Crop the text if the numbers are too long", "if", "len", "(", "tag", ")", "+", "len", "(", "base", ")", ">", "max_length", ":", "base", "=", "base", "[", ":", "max_length", "-", "len", "(", "tag", ")", "]", "# Return the name with the extension", "return", "''", ".", "join", "(", "[", "base", ",", "tag", ",", "ext", "]", ")" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
iso_path_slugify
Slugify a path, maintaining a map with the previously slugified paths. The path table is used to prevent slugified names from collisioning, using the `iso_name_increment` function to deduplicate slugs. Example: >>> path_table = {'/': '/'} >>> iso_path_slugify('/ébc.txt', path_table) '/_BC.TXT' >>> iso_path_slugify('/àbc.txt', path_table) '/_BC2.TXT'
fs/archive/isofs/_utils.py
def iso_path_slugify(path, path_table, is_dir=False, strict=True): """Slugify a path, maintaining a map with the previously slugified paths. The path table is used to prevent slugified names from collisioning, using the `iso_name_increment` function to deduplicate slugs. Example: >>> path_table = {'/': '/'} >>> iso_path_slugify('/ébc.txt', path_table) '/_BC.TXT' >>> iso_path_slugify('/àbc.txt', path_table) '/_BC2.TXT' """ # Split the path to extract the parent and basename parent, base = split(path) # Get the parent in slugified form slug_parent = path_table[parent] # Slugify the base name if is_dir: slug_base = iso_name_slugify(base)[:8] else: name, ext = base.rsplit('.', 1) if '.' in base else (base, '') slug_base = '.'.join([iso_name_slugify(name)[:8], ext]) if strict: slug_base = slug_base.upper() # Deduplicate slug if needed and update path_table slugs = set(path_table.values()) path_table[path] = slug = join(slug_parent, slug_base) while slug in slugs: slug_base = iso_name_increment(slug_base, is_dir) path_table[path] = slug = join(slug_parent, slug_base) # Return the unique slug return slug
def iso_path_slugify(path, path_table, is_dir=False, strict=True): """Slugify a path, maintaining a map with the previously slugified paths. The path table is used to prevent slugified names from collisioning, using the `iso_name_increment` function to deduplicate slugs. Example: >>> path_table = {'/': '/'} >>> iso_path_slugify('/ébc.txt', path_table) '/_BC.TXT' >>> iso_path_slugify('/àbc.txt', path_table) '/_BC2.TXT' """ # Split the path to extract the parent and basename parent, base = split(path) # Get the parent in slugified form slug_parent = path_table[parent] # Slugify the base name if is_dir: slug_base = iso_name_slugify(base)[:8] else: name, ext = base.rsplit('.', 1) if '.' in base else (base, '') slug_base = '.'.join([iso_name_slugify(name)[:8], ext]) if strict: slug_base = slug_base.upper() # Deduplicate slug if needed and update path_table slugs = set(path_table.values()) path_table[path] = slug = join(slug_parent, slug_base) while slug in slugs: slug_base = iso_name_increment(slug_base, is_dir) path_table[path] = slug = join(slug_parent, slug_base) # Return the unique slug return slug
[ "Slugify", "a", "path", "maintaining", "a", "map", "with", "the", "previously", "slugified", "paths", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/isofs/_utils.py#L57-L93
[ "def", "iso_path_slugify", "(", "path", ",", "path_table", ",", "is_dir", "=", "False", ",", "strict", "=", "True", ")", ":", "# Split the path to extract the parent and basename", "parent", ",", "base", "=", "split", "(", "path", ")", "# Get the parent in slugified form", "slug_parent", "=", "path_table", "[", "parent", "]", "# Slugify the base name", "if", "is_dir", ":", "slug_base", "=", "iso_name_slugify", "(", "base", ")", "[", ":", "8", "]", "else", ":", "name", ",", "ext", "=", "base", ".", "rsplit", "(", "'.'", ",", "1", ")", "if", "'.'", "in", "base", "else", "(", "base", ",", "''", ")", "slug_base", "=", "'.'", ".", "join", "(", "[", "iso_name_slugify", "(", "name", ")", "[", ":", "8", "]", ",", "ext", "]", ")", "if", "strict", ":", "slug_base", "=", "slug_base", ".", "upper", "(", ")", "# Deduplicate slug if needed and update path_table", "slugs", "=", "set", "(", "path_table", ".", "values", "(", ")", ")", "path_table", "[", "path", "]", "=", "slug", "=", "join", "(", "slug_parent", ",", "slug_base", ")", "while", "slug", "in", "slugs", ":", "slug_base", "=", "iso_name_increment", "(", "slug_base", ",", "is_dir", ")", "path_table", "[", "path", "]", "=", "slug", "=", "join", "(", "slug_parent", ",", "slug_base", ")", "# Return the unique slug", "return", "slug" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
EasyUIListMixin.get_querydict
这个函数跟 self.method有关 self.method 暂时没用, querydict都是POST的
easyui/mixins/easyui_mixins.py
def get_querydict(self): """ 这个函数跟 self.method有关 self.method 暂时没用, querydict都是POST的 """ if self.method: querydict = getattr(self.request, self.method.upper()) else: querydict = getattr(self.request, 'POST'.upper()) # copy make querydict mutable query_dict = dict(querydict.items()) return query_dict
def get_querydict(self): """ 这个函数跟 self.method有关 self.method 暂时没用, querydict都是POST的 """ if self.method: querydict = getattr(self.request, self.method.upper()) else: querydict = getattr(self.request, 'POST'.upper()) # copy make querydict mutable query_dict = dict(querydict.items()) return query_dict
[ "这个函数跟", "self", ".", "method有关", "self", ".", "method", "暂时没用", "querydict都是POST的" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L85-L97
[ "def", "get_querydict", "(", "self", ")", ":", "if", "self", ".", "method", ":", "querydict", "=", "getattr", "(", "self", ".", "request", ",", "self", ".", "method", ".", "upper", "(", ")", ")", "else", ":", "querydict", "=", "getattr", "(", "self", ".", "request", ",", "'POST'", ".", "upper", "(", ")", ")", "# copy make querydict mutable", "query_dict", "=", "dict", "(", "querydict", ".", "items", "(", ")", ")", "return", "query_dict" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_filter_dict
处理过滤字段 rows 一页显示多少行 page 第几页, 1开始 order desc, asc sort 指定排序的字段 order_by(sort) querydict 中的字段名和格式需要可以直接查询
easyui/mixins/easyui_mixins.py
def get_filter_dict(self): ''' 处理过滤字段 rows 一页显示多少行 page 第几页, 1开始 order desc, asc sort 指定排序的字段 order_by(sort) querydict 中的字段名和格式需要可以直接查询 ''' querydict = self.get_querydict() # post ,在cookie中设置了csrfmiddlewaretoken if querydict.has_key('csrfmiddlewaretoken'): querydict.pop('csrfmiddlewaretoken') try: page = int(querydict.pop('page')) rows = int(querydict.pop('rows')) setattr(self, 'easyui_page', page) setattr(self, 'easyui_rows', rows) except KeyError: setattr(self, 'easyui_page', None) setattr(self, 'easyui_rows', None) try: # order-> string The default sort order, can only be 'asc' or 'desc' # sort-> filed name # order_by('id') order_by('-id') order = querydict.pop('order') sort = querydict.pop('sort') # order = 1 # sort = 1 if order == 'asc': setattr(self, 'easyui_order', sort) else: setattr(self, 'easyui_order', '-%s'% sort) except KeyError: setattr(self, 'easyui_order', None) # 过滤掉那些没有填写数据的input字段 remove_key = [] for key in querydict: if querydict[key] == '': remove_key.append(key) for key in remove_key: querydict.pop(key) return querydict
def get_filter_dict(self): ''' 处理过滤字段 rows 一页显示多少行 page 第几页, 1开始 order desc, asc sort 指定排序的字段 order_by(sort) querydict 中的字段名和格式需要可以直接查询 ''' querydict = self.get_querydict() # post ,在cookie中设置了csrfmiddlewaretoken if querydict.has_key('csrfmiddlewaretoken'): querydict.pop('csrfmiddlewaretoken') try: page = int(querydict.pop('page')) rows = int(querydict.pop('rows')) setattr(self, 'easyui_page', page) setattr(self, 'easyui_rows', rows) except KeyError: setattr(self, 'easyui_page', None) setattr(self, 'easyui_rows', None) try: # order-> string The default sort order, can only be 'asc' or 'desc' # sort-> filed name # order_by('id') order_by('-id') order = querydict.pop('order') sort = querydict.pop('sort') # order = 1 # sort = 1 if order == 'asc': setattr(self, 'easyui_order', sort) else: setattr(self, 'easyui_order', '-%s'% sort) except KeyError: setattr(self, 'easyui_order', None) # 过滤掉那些没有填写数据的input字段 remove_key = [] for key in querydict: if querydict[key] == '': remove_key.append(key) for key in remove_key: querydict.pop(key) return querydict
[ "处理过滤字段", "rows", "一页显示多少行", "page", "第几页", "1开始", "order", "desc", "asc", "sort", "指定排序的字段", "order_by", "(", "sort", ")", "querydict", "中的字段名和格式需要可以直接查询" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L101-L149
[ "def", "get_filter_dict", "(", "self", ")", ":", "querydict", "=", "self", ".", "get_querydict", "(", ")", "# post ,在cookie中设置了csrfmiddlewaretoken", "if", "querydict", ".", "has_key", "(", "'csrfmiddlewaretoken'", ")", ":", "querydict", ".", "pop", "(", "'csrfmiddlewaretoken'", ")", "try", ":", "page", "=", "int", "(", "querydict", ".", "pop", "(", "'page'", ")", ")", "rows", "=", "int", "(", "querydict", ".", "pop", "(", "'rows'", ")", ")", "setattr", "(", "self", ",", "'easyui_page'", ",", "page", ")", "setattr", "(", "self", ",", "'easyui_rows'", ",", "rows", ")", "except", "KeyError", ":", "setattr", "(", "self", ",", "'easyui_page'", ",", "None", ")", "setattr", "(", "self", ",", "'easyui_rows'", ",", "None", ")", "try", ":", "# order-> string The default sort order, can only be 'asc' or 'desc' ", "# sort-> filed name", "# order_by('id') order_by('-id')", "order", "=", "querydict", ".", "pop", "(", "'order'", ")", "sort", "=", "querydict", ".", "pop", "(", "'sort'", ")", "# order = 1", "# sort = 1", "if", "order", "==", "'asc'", ":", "setattr", "(", "self", ",", "'easyui_order'", ",", "sort", ")", "else", ":", "setattr", "(", "self", ",", "'easyui_order'", ",", "'-%s'", "%", "sort", ")", "except", "KeyError", ":", "setattr", "(", "self", ",", "'easyui_order'", ",", "None", ")", "# 过滤掉那些没有填写数据的input字段", "remove_key", "=", "[", "]", "for", "key", "in", "querydict", ":", "if", "querydict", "[", "key", "]", "==", "''", ":", "remove_key", ".", "append", "(", "key", ")", "for", "key", "in", "remove_key", ":", "querydict", ".", "pop", "(", "key", ")", "return", "querydict" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_slice_start
返回queryset切片的头
easyui/mixins/easyui_mixins.py
def get_slice_start(self): """ 返回queryset切片的头 """ value = None if self.easyui_page: value = (self.easyui_page -1) * self.easyui_rows return value
def get_slice_start(self): """ 返回queryset切片的头 """ value = None if self.easyui_page: value = (self.easyui_page -1) * self.easyui_rows return value
[ "返回queryset切片的头" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L151-L158
[ "def", "get_slice_start", "(", "self", ")", ":", "value", "=", "None", "if", "self", ".", "easyui_page", ":", "value", "=", "(", "self", ".", "easyui_page", "-", "1", ")", "*", "self", ".", "easyui_rows", "return", "value" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_slice_end
返回queryset切片的尾巴
easyui/mixins/easyui_mixins.py
def get_slice_end(self): """ 返回queryset切片的尾巴 """ value = None if self.easyui_page: value = self.easyui_page * self.easyui_rows return value
def get_slice_end(self): """ 返回queryset切片的尾巴 """ value = None if self.easyui_page: value = self.easyui_page * self.easyui_rows return value
[ "返回queryset切片的尾巴" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L160-L167
[ "def", "get_slice_end", "(", "self", ")", ":", "value", "=", "None", "if", "self", ".", "easyui_page", ":", "value", "=", "self", ".", "easyui_page", "*", "self", ".", "easyui_rows", "return", "value" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_queryset
queryset
easyui/mixins/easyui_mixins.py
def get_queryset(self): """ queryset """ filter_dict = self.get_filter_dict() queryset = super(EasyUIListMixin, self).get_queryset() queryset = queryset.filter(**filter_dict) if self.easyui_order: # 如果指定了排序字段,返回排序的queryset queryset = queryset.order_by(self.easyui_order) return queryset
def get_queryset(self): """ queryset """ filter_dict = self.get_filter_dict() queryset = super(EasyUIListMixin, self).get_queryset() queryset = queryset.filter(**filter_dict) if self.easyui_order: # 如果指定了排序字段,返回排序的queryset queryset = queryset.order_by(self.easyui_order) return queryset
[ "queryset" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L169-L180
[ "def", "get_queryset", "(", "self", ")", ":", "filter_dict", "=", "self", ".", "get_filter_dict", "(", ")", "queryset", "=", "super", "(", "EasyUIListMixin", ",", "self", ")", ".", "get_queryset", "(", ")", "queryset", "=", "queryset", ".", "filter", "(", "*", "*", "filter_dict", ")", "if", "self", ".", "easyui_order", ":", "# 如果指定了排序字段,返回排序的queryset", "queryset", "=", "queryset", ".", "order_by", "(", "self", ".", "easyui_order", ")", "return", "queryset" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_limit_queryset
返回分页之后的queryset
easyui/mixins/easyui_mixins.py
def get_limit_queryset(self): """ 返回分页之后的queryset """ queryset = self.get_queryset() limit_queryset = queryset.all()[self.get_slice_start() :self.get_slice_end()] #等增加排序 return limit_queryset
def get_limit_queryset(self): """ 返回分页之后的queryset """ queryset = self.get_queryset() limit_queryset = queryset.all()[self.get_slice_start() :self.get_slice_end()] #等增加排序 return limit_queryset
[ "返回分页之后的queryset" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L183-L189
[ "def", "get_limit_queryset", "(", "self", ")", ":", "queryset", "=", "self", ".", "get_queryset", "(", ")", "limit_queryset", "=", "queryset", ".", "all", "(", ")", "[", "self", ".", "get_slice_start", "(", ")", ":", "self", ".", "get_slice_end", "(", ")", "]", "#等增加排序", "return", "limit_queryset" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIListMixin.get_easyui_context
初始化一个空的context
easyui/mixins/easyui_mixins.py
def get_easyui_context(self, **kwargs): """ 初始化一个空的context """ context = {} queryset = self.get_queryset() limit_queryset = self.get_limit_queryset() data = model_serialize(limit_queryset, self.extra_fields, self.remove_fields) count = queryset.count() # datagrid 返回的数据中,total是总的行数,rows是查询到的结果集 context.update(rows=data) context.update(total=count) return context
def get_easyui_context(self, **kwargs): """ 初始化一个空的context """ context = {} queryset = self.get_queryset() limit_queryset = self.get_limit_queryset() data = model_serialize(limit_queryset, self.extra_fields, self.remove_fields) count = queryset.count() # datagrid 返回的数据中,total是总的行数,rows是查询到的结果集 context.update(rows=data) context.update(total=count) return context
[ "初始化一个空的context" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/easyui_mixins.py#L191-L203
[ "def", "get_easyui_context", "(", "self", ",", "*", "*", "kwargs", ")", ":", "context", "=", "{", "}", "queryset", "=", "self", ".", "get_queryset", "(", ")", "limit_queryset", "=", "self", ".", "get_limit_queryset", "(", ")", "data", "=", "model_serialize", "(", "limit_queryset", ",", "self", ".", "extra_fields", ",", "self", ".", "remove_fields", ")", "count", "=", "queryset", ".", "count", "(", ")", "# datagrid 返回的数据中,total是总的行数,rows是查询到的结果集", "context", ".", "update", "(", "rows", "=", "data", ")", "context", ".", "update", "(", "total", "=", "count", ")", "return", "context" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
register_views
app_name APP名 view_filename views 所在的文件 urlpatterns url中已经存在的urlpatterns return urlpatterns 只导入View结尾的,是类的视图
easyui/utils.py
def register_views(app_name, view_filename, urlpatterns=None): """ app_name APP名 view_filename views 所在的文件 urlpatterns url中已经存在的urlpatterns return urlpatterns 只导入View结尾的,是类的视图 """ app_module = __import__(app_name) view_module = getattr(app_module, view_filename) views = dir(view_module) for view_name in views: if view_name.endswith('View'): view = getattr(view_module, view_name) if isinstance(view, object): if urlpatterns: urlpatterns += patterns('', url(r'^(?i)%s/$' % view_name, view.as_view(), name=view_name), ) else: urlpatterns = patterns('', url(r'^(?i)%s/$' % view_name, view.as_view(), name=view_name), ) else: pass return urlpatterns
def register_views(app_name, view_filename, urlpatterns=None): """ app_name APP名 view_filename views 所在的文件 urlpatterns url中已经存在的urlpatterns return urlpatterns 只导入View结尾的,是类的视图 """ app_module = __import__(app_name) view_module = getattr(app_module, view_filename) views = dir(view_module) for view_name in views: if view_name.endswith('View'): view = getattr(view_module, view_name) if isinstance(view, object): if urlpatterns: urlpatterns += patterns('', url(r'^(?i)%s/$' % view_name, view.as_view(), name=view_name), ) else: urlpatterns = patterns('', url(r'^(?i)%s/$' % view_name, view.as_view(), name=view_name), ) else: pass return urlpatterns
[ "app_name", "APP名", "view_filename", "views", "所在的文件", "urlpatterns", "url中已经存在的urlpatterns" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/utils.py#L21-L48
[ "def", "register_views", "(", "app_name", ",", "view_filename", ",", "urlpatterns", "=", "None", ")", ":", "app_module", "=", "__import__", "(", "app_name", ")", "view_module", "=", "getattr", "(", "app_module", ",", "view_filename", ")", "views", "=", "dir", "(", "view_module", ")", "for", "view_name", "in", "views", ":", "if", "view_name", ".", "endswith", "(", "'View'", ")", ":", "view", "=", "getattr", "(", "view_module", ",", "view_name", ")", "if", "isinstance", "(", "view", ",", "object", ")", ":", "if", "urlpatterns", ":", "urlpatterns", "+=", "patterns", "(", "''", ",", "url", "(", "r'^(?i)%s/$'", "%", "view_name", ",", "view", ".", "as_view", "(", ")", ",", "name", "=", "view_name", ")", ",", ")", "else", ":", "urlpatterns", "=", "patterns", "(", "''", ",", "url", "(", "r'^(?i)%s/$'", "%", "view_name", ",", "view", ".", "as_view", "(", ")", ",", "name", "=", "view_name", ")", ",", ")", "else", ":", "pass", "return", "urlpatterns" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIDatagridView.get_template_names
datagrid的默认模板
easyui/mixins/view_mixins.py
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIDatagridView, self).get_template_names() names.append('easyui/datagrid.html') return names
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIDatagridView, self).get_template_names() names.append('easyui/datagrid.html') return names
[ "datagrid的默认模板" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/view_mixins.py#L21-L27
[ "def", "get_template_names", "(", "self", ")", ":", "names", "=", "super", "(", "EasyUIDatagridView", ",", "self", ")", ".", "get_template_names", "(", ")", "names", ".", "append", "(", "'easyui/datagrid.html'", ")", "return", "names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUICreateView.get_template_names
datagrid的默认模板
easyui/mixins/view_mixins.py
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUICreateView, self).get_template_names() names.append('easyui/form.html') return names
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUICreateView, self).get_template_names() names.append('easyui/form.html') return names
[ "datagrid的默认模板" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/view_mixins.py#L35-L41
[ "def", "get_template_names", "(", "self", ")", ":", "names", "=", "super", "(", "EasyUICreateView", ",", "self", ")", ".", "get_template_names", "(", ")", "names", ".", "append", "(", "'easyui/form.html'", ")", "return", "names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIUpdateView.get_template_names
datagrid的默认模板
easyui/mixins/view_mixins.py
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIUpdateView, self).get_template_names() names.append('easyui/form.html') return names
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIUpdateView, self).get_template_names() names.append('easyui/form.html') return names
[ "datagrid的默认模板" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/view_mixins.py#L50-L56
[ "def", "get_template_names", "(", "self", ")", ":", "names", "=", "super", "(", "EasyUIUpdateView", ",", "self", ")", ".", "get_template_names", "(", ")", "names", ".", "append", "(", "'easyui/form.html'", ")", "return", "names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
EasyUIDeleteView.get_template_names
datagrid的默认模板
easyui/mixins/view_mixins.py
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIDeleteView, self).get_template_names() names.append('easyui/confirm_delete.html') return names
def get_template_names(self): """ datagrid的默认模板 """ names = super(EasyUIDeleteView, self).get_template_names() names.append('easyui/confirm_delete.html') return names
[ "datagrid的默认模板" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/view_mixins.py#L65-L71
[ "def", "get_template_names", "(", "self", ")", ":", "names", "=", "super", "(", "EasyUIDeleteView", ",", "self", ")", ".", "get_template_names", "(", ")", "names", ".", "append", "(", "'easyui/confirm_delete.html'", ")", "return", "names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
CommandDatagridView.get_template_names
datagrid的默认模板
easyui/mixins/view_mixins.py
def get_template_names(self): """ datagrid的默认模板 """ names = super(CommandDatagridView, self).get_template_names() names.append('easyui/command_datagrid.html') return names
def get_template_names(self): """ datagrid的默认模板 """ names = super(CommandDatagridView, self).get_template_names() names.append('easyui/command_datagrid.html') return names
[ "datagrid的默认模板" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/view_mixins.py#L79-L85
[ "def", "get_template_names", "(", "self", ")", ":", "names", "=", "super", "(", "CommandDatagridView", ",", "self", ")", ".", "get_template_names", "(", ")", "names", ".", "append", "(", "'easyui/command_datagrid.html'", ")", "return", "names" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
LoginRequiredMixin.dispatch
增加了权限控制,当self存在model和permission_required时,才会检查权限
easyui/mixins/permission_mixins.py
def dispatch(self, request, *args, **kwargs): """ 增加了权限控制,当self存在model和permission_required时,才会检查权限 """ if getattr(self, 'model', None) and self.permission_required: app_label = self.model._meta.app_label model_name = self.model.__name__.lower() permission_required = self.permission_required.lower() permission = '%(app_label)s.%(permission_required)s_%(model_name)s' % { 'app_label':app_label, 'permission_required':permission_required, 'model_name': model_name } if not self.request.user.has_perm(permission): return HttpResponseRedirect(reverse_lazy('easyui:login')) return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
def dispatch(self, request, *args, **kwargs): """ 增加了权限控制,当self存在model和permission_required时,才会检查权限 """ if getattr(self, 'model', None) and self.permission_required: app_label = self.model._meta.app_label model_name = self.model.__name__.lower() permission_required = self.permission_required.lower() permission = '%(app_label)s.%(permission_required)s_%(model_name)s' % { 'app_label':app_label, 'permission_required':permission_required, 'model_name': model_name } if not self.request.user.has_perm(permission): return HttpResponseRedirect(reverse_lazy('easyui:login')) return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
[ "增加了权限控制,当self存在model和permission_required时,才会检查权限" ]
xu2243051/easyui-menu
python
https://github.com/xu2243051/easyui-menu/blob/4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb/easyui/mixins/permission_mixins.py#L25-L41
[ "def", "dispatch", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "getattr", "(", "self", ",", "'model'", ",", "None", ")", "and", "self", ".", "permission_required", ":", "app_label", "=", "self", ".", "model", ".", "_meta", ".", "app_label", "model_name", "=", "self", ".", "model", ".", "__name__", ".", "lower", "(", ")", "permission_required", "=", "self", ".", "permission_required", ".", "lower", "(", ")", "permission", "=", "'%(app_label)s.%(permission_required)s_%(model_name)s'", "%", "{", "'app_label'", ":", "app_label", ",", "'permission_required'", ":", "permission_required", ",", "'model_name'", ":", "model_name", "}", "if", "not", "self", ".", "request", ".", "user", ".", "has_perm", "(", "permission", ")", ":", "return", "HttpResponseRedirect", "(", "reverse_lazy", "(", "'easyui:login'", ")", ")", "return", "super", "(", "LoginRequiredMixin", ",", "self", ")", ".", "dispatch", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
4da0b50cf2d3ddb0f1ec7a4da65fd3c4339f8dfb
valid
writable_path
Test whether a path can be written to.
fs/archive/_utils.py
def writable_path(path): """Test whether a path can be written to. """ if os.path.exists(path): return os.access(path, os.W_OK) try: with open(path, 'w'): pass except (OSError, IOError): return False else: os.remove(path) return True
def writable_path(path): """Test whether a path can be written to. """ if os.path.exists(path): return os.access(path, os.W_OK) try: with open(path, 'w'): pass except (OSError, IOError): return False else: os.remove(path) return True
[ "Test", "whether", "a", "path", "can", "be", "written", "to", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/_utils.py#L98-L110
[ "def", "writable_path", "(", "path", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "os", ".", "access", "(", "path", ",", "os", ".", "W_OK", ")", "try", ":", "with", "open", "(", "path", ",", "'w'", ")", ":", "pass", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "False", "else", ":", "os", ".", "remove", "(", "path", ")", "return", "True" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
writable_stream
Test whether a stream can be written to.
fs/archive/_utils.py
def writable_stream(handle): """Test whether a stream can be written to. """ if isinstance(handle, io.IOBase) and sys.version_info >= (3, 5): return handle.writable() try: handle.write(b'') except (io.UnsupportedOperation, IOError): return False else: return True
def writable_stream(handle): """Test whether a stream can be written to. """ if isinstance(handle, io.IOBase) and sys.version_info >= (3, 5): return handle.writable() try: handle.write(b'') except (io.UnsupportedOperation, IOError): return False else: return True
[ "Test", "whether", "a", "stream", "can", "be", "written", "to", "." ]
althonos/fs.archive
python
https://github.com/althonos/fs.archive/blob/a09bb5da56da6b96aca3e20841fa86dea7c5b79a/fs/archive/_utils.py#L113-L123
[ "def", "writable_stream", "(", "handle", ")", ":", "if", "isinstance", "(", "handle", ",", "io", ".", "IOBase", ")", "and", "sys", ".", "version_info", ">=", "(", "3", ",", "5", ")", ":", "return", "handle", ".", "writable", "(", ")", "try", ":", "handle", ".", "write", "(", "b''", ")", "except", "(", "io", ".", "UnsupportedOperation", ",", "IOError", ")", ":", "return", "False", "else", ":", "return", "True" ]
a09bb5da56da6b96aca3e20841fa86dea7c5b79a
valid
QuadContourGenerator.from_curvilinear
Construct a contour generator from a curvilinear grid. Note ---- This is an alias for the default constructor. Parameters ---------- x : array_like x coordinates of each point in `z`. Must be the same size as `z`. y : array_like y coordinates of each point in `z`. Must be the same size as `z`. z : array_like The 2-dimensional curvilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator.
contours/quad.py
def from_curvilinear(cls, x, y, z, formatter=numpy_formatter): """Construct a contour generator from a curvilinear grid. Note ---- This is an alias for the default constructor. Parameters ---------- x : array_like x coordinates of each point in `z`. Must be the same size as `z`. y : array_like y coordinates of each point in `z`. Must be the same size as `z`. z : array_like The 2-dimensional curvilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ return cls(x, y, z, formatter)
def from_curvilinear(cls, x, y, z, formatter=numpy_formatter): """Construct a contour generator from a curvilinear grid. Note ---- This is an alias for the default constructor. Parameters ---------- x : array_like x coordinates of each point in `z`. Must be the same size as `z`. y : array_like y coordinates of each point in `z`. Must be the same size as `z`. z : array_like The 2-dimensional curvilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ return cls(x, y, z, formatter)
[ "Construct", "a", "contour", "generator", "from", "a", "curvilinear", "grid", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/quad.py#L114-L141
[ "def", "from_curvilinear", "(", "cls", ",", "x", ",", "y", ",", "z", ",", "formatter", "=", "numpy_formatter", ")", ":", "return", "cls", "(", "x", ",", "y", ",", "z", ",", "formatter", ")" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
QuadContourGenerator.from_rectilinear
Construct a contour generator from a rectilinear grid. Parameters ---------- x : array_like x coordinates of each column of `z`. Must be the same length as the number of columns in `z`. (len(x) == z.shape[1]) y : array_like y coordinates of each row of `z`. Must be the same length as the number of columns in `z`. (len(y) == z.shape[0]) z : array_like The 2-dimensional rectilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator.
contours/quad.py
def from_rectilinear(cls, x, y, z, formatter=numpy_formatter): """Construct a contour generator from a rectilinear grid. Parameters ---------- x : array_like x coordinates of each column of `z`. Must be the same length as the number of columns in `z`. (len(x) == z.shape[1]) y : array_like y coordinates of each row of `z`. Must be the same length as the number of columns in `z`. (len(y) == z.shape[0]) z : array_like The 2-dimensional rectilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ x = np.asarray(x, dtype=np.float64) y = np.asarray(y, dtype=np.float64) z = np.ma.asarray(z, dtype=np.float64) # Check arguments. if x.ndim != 1: raise TypeError( "'x' must be a 1D array but is a {:d}D array".format(x.ndim)) if y.ndim != 1: raise TypeError( "'y' must be a 1D array but is a {:d}D array".format(y.ndim)) if z.ndim != 2: raise TypeError( "'z' must be a 2D array but it a {:d}D array".format(z.ndim)) if x.size != z.shape[1]: raise TypeError( ("the length of 'x' must be equal to the number of columns in " "'z' but the length of 'x' is {:d} and 'z' has {:d} " "columns").format(x.size, z.shape[1])) if y.size != z.shape[0]: raise TypeError( ("the length of 'y' must be equal to the number of rows in " "'z' but the length of 'y' is {:d} and 'z' has {:d} " "rows").format(y.size, z.shape[0])) # Convert to curvilinear format and call constructor. y, x = np.meshgrid(y, x, indexing='ij') return cls(x, y, z, formatter)
def from_rectilinear(cls, x, y, z, formatter=numpy_formatter): """Construct a contour generator from a rectilinear grid. Parameters ---------- x : array_like x coordinates of each column of `z`. Must be the same length as the number of columns in `z`. (len(x) == z.shape[1]) y : array_like y coordinates of each row of `z`. Must be the same length as the number of columns in `z`. (len(y) == z.shape[0]) z : array_like The 2-dimensional rectilinear grid of data to compute contours for. Masked arrays are supported. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ x = np.asarray(x, dtype=np.float64) y = np.asarray(y, dtype=np.float64) z = np.ma.asarray(z, dtype=np.float64) # Check arguments. if x.ndim != 1: raise TypeError( "'x' must be a 1D array but is a {:d}D array".format(x.ndim)) if y.ndim != 1: raise TypeError( "'y' must be a 1D array but is a {:d}D array".format(y.ndim)) if z.ndim != 2: raise TypeError( "'z' must be a 2D array but it a {:d}D array".format(z.ndim)) if x.size != z.shape[1]: raise TypeError( ("the length of 'x' must be equal to the number of columns in " "'z' but the length of 'x' is {:d} and 'z' has {:d} " "columns").format(x.size, z.shape[1])) if y.size != z.shape[0]: raise TypeError( ("the length of 'y' must be equal to the number of rows in " "'z' but the length of 'y' is {:d} and 'z' has {:d} " "rows").format(y.size, z.shape[0])) # Convert to curvilinear format and call constructor. y, x = np.meshgrid(y, x, indexing='ij') return cls(x, y, z, formatter)
[ "Construct", "a", "contour", "generator", "from", "a", "rectilinear", "grid", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/quad.py#L144-L194
[ "def", "from_rectilinear", "(", "cls", ",", "x", ",", "y", ",", "z", ",", "formatter", "=", "numpy_formatter", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ",", "dtype", "=", "np", ".", "float64", ")", "y", "=", "np", ".", "asarray", "(", "y", ",", "dtype", "=", "np", ".", "float64", ")", "z", "=", "np", ".", "ma", ".", "asarray", "(", "z", ",", "dtype", "=", "np", ".", "float64", ")", "# Check arguments.", "if", "x", ".", "ndim", "!=", "1", ":", "raise", "TypeError", "(", "\"'x' must be a 1D array but is a {:d}D array\"", ".", "format", "(", "x", ".", "ndim", ")", ")", "if", "y", ".", "ndim", "!=", "1", ":", "raise", "TypeError", "(", "\"'y' must be a 1D array but is a {:d}D array\"", ".", "format", "(", "y", ".", "ndim", ")", ")", "if", "z", ".", "ndim", "!=", "2", ":", "raise", "TypeError", "(", "\"'z' must be a 2D array but it a {:d}D array\"", ".", "format", "(", "z", ".", "ndim", ")", ")", "if", "x", ".", "size", "!=", "z", ".", "shape", "[", "1", "]", ":", "raise", "TypeError", "(", "(", "\"the length of 'x' must be equal to the number of columns in \"", "\"'z' but the length of 'x' is {:d} and 'z' has {:d} \"", "\"columns\"", ")", ".", "format", "(", "x", ".", "size", ",", "z", ".", "shape", "[", "1", "]", ")", ")", "if", "y", ".", "size", "!=", "z", ".", "shape", "[", "0", "]", ":", "raise", "TypeError", "(", "(", "\"the length of 'y' must be equal to the number of rows in \"", "\"'z' but the length of 'y' is {:d} and 'z' has {:d} \"", "\"rows\"", ")", ".", "format", "(", "y", ".", "size", ",", "z", ".", "shape", "[", "0", "]", ")", ")", "# Convert to curvilinear format and call constructor.", "y", ",", "x", "=", "np", ".", "meshgrid", "(", "y", ",", "x", ",", "indexing", "=", "'ij'", ")", "return", "cls", "(", "x", ",", "y", ",", "z", ",", "formatter", ")" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
QuadContourGenerator.from_uniform
Construct a contour generator from a uniform grid. NOTE ---- The default `origin` and `step` values is equivalent to calling :meth:`matplotlib.axes.Axes.contour` with only the `z` argument. Parameters ---------- z : array_like The 2-dimensional uniform grid of data to compute contours for. Masked arrays are supported. origin : (number.Number, number.Number) The (x, y) coordinate of data point `z[0,0]`. step : (number.Number, number.Number) The (x, y) distance between data points in `z`. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator.
contours/quad.py
def from_uniform( cls, z, origin=(0, 0), step=(1, 1), formatter=numpy_formatter): """Construct a contour generator from a uniform grid. NOTE ---- The default `origin` and `step` values is equivalent to calling :meth:`matplotlib.axes.Axes.contour` with only the `z` argument. Parameters ---------- z : array_like The 2-dimensional uniform grid of data to compute contours for. Masked arrays are supported. origin : (number.Number, number.Number) The (x, y) coordinate of data point `z[0,0]`. step : (number.Number, number.Number) The (x, y) distance between data points in `z`. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ z = np.ma.asarray(z, dtype=np.float64) # Check arguments. if z.ndim != 2: raise TypeError( "'z' must be a 2D array but it a {:d}D array".format(z.ndim)) if len(origin) != 2: raise TypeError( "'origin' must be of length 2 but has length {:d}".format( len(origin))) if len(step) != 2: raise TypeError( "'step' must be of length 2 but has length {:d}".format( len(step))) if any(s == 0 for s in step): raise ValueError( "'step' must have non-zero values but is {:s}".format( str(step))) # Convert to curvilinear format and call constructor. y, x = np.mgrid[ origin[0]:(origin[0]+step[0]*z.shape[0]):step[0], origin[1]:(origin[1]+step[1]*z.shape[1]):step[1]] return cls(x, y, z, formatter)
def from_uniform( cls, z, origin=(0, 0), step=(1, 1), formatter=numpy_formatter): """Construct a contour generator from a uniform grid. NOTE ---- The default `origin` and `step` values is equivalent to calling :meth:`matplotlib.axes.Axes.contour` with only the `z` argument. Parameters ---------- z : array_like The 2-dimensional uniform grid of data to compute contours for. Masked arrays are supported. origin : (number.Number, number.Number) The (x, y) coordinate of data point `z[0,0]`. step : (number.Number, number.Number) The (x, y) distance between data points in `z`. formatter : callable A conversion function to convert from the internal `Matplotlib`_ contour format to an external format. See :ref:`formatters` for more information. Returns ------- : :class:`QuadContourGenerator` Initialized contour generator. """ z = np.ma.asarray(z, dtype=np.float64) # Check arguments. if z.ndim != 2: raise TypeError( "'z' must be a 2D array but it a {:d}D array".format(z.ndim)) if len(origin) != 2: raise TypeError( "'origin' must be of length 2 but has length {:d}".format( len(origin))) if len(step) != 2: raise TypeError( "'step' must be of length 2 but has length {:d}".format( len(step))) if any(s == 0 for s in step): raise ValueError( "'step' must have non-zero values but is {:s}".format( str(step))) # Convert to curvilinear format and call constructor. y, x = np.mgrid[ origin[0]:(origin[0]+step[0]*z.shape[0]):step[0], origin[1]:(origin[1]+step[1]*z.shape[1]):step[1]] return cls(x, y, z, formatter)
[ "Construct", "a", "contour", "generator", "from", "a", "uniform", "grid", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/quad.py#L197-L247
[ "def", "from_uniform", "(", "cls", ",", "z", ",", "origin", "=", "(", "0", ",", "0", ")", ",", "step", "=", "(", "1", ",", "1", ")", ",", "formatter", "=", "numpy_formatter", ")", ":", "z", "=", "np", ".", "ma", ".", "asarray", "(", "z", ",", "dtype", "=", "np", ".", "float64", ")", "# Check arguments.", "if", "z", ".", "ndim", "!=", "2", ":", "raise", "TypeError", "(", "\"'z' must be a 2D array but it a {:d}D array\"", ".", "format", "(", "z", ".", "ndim", ")", ")", "if", "len", "(", "origin", ")", "!=", "2", ":", "raise", "TypeError", "(", "\"'origin' must be of length 2 but has length {:d}\"", ".", "format", "(", "len", "(", "origin", ")", ")", ")", "if", "len", "(", "step", ")", "!=", "2", ":", "raise", "TypeError", "(", "\"'step' must be of length 2 but has length {:d}\"", ".", "format", "(", "len", "(", "step", ")", ")", ")", "if", "any", "(", "s", "==", "0", "for", "s", "in", "step", ")", ":", "raise", "ValueError", "(", "\"'step' must have non-zero values but is {:s}\"", ".", "format", "(", "str", "(", "step", ")", ")", ")", "# Convert to curvilinear format and call constructor.", "y", ",", "x", "=", "np", ".", "mgrid", "[", "origin", "[", "0", "]", ":", "(", "origin", "[", "0", "]", "+", "step", "[", "0", "]", "*", "z", ".", "shape", "[", "0", "]", ")", ":", "step", "[", "0", "]", ",", "origin", "[", "1", "]", ":", "(", "origin", "[", "1", "]", "+", "step", "[", "1", "]", "*", "z", ".", "shape", "[", "1", "]", ")", ":", "step", "[", "1", "]", "]", "return", "cls", "(", "x", ",", "y", ",", "z", ",", "formatter", ")" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
SphinxSearchPlugin.options
Sphinx config file that can optionally take the following python template string arguments: ``database_name`` ``database_password`` ``database_username`` ``database_host`` ``database_port`` ``sphinx_search_data_dir`` ``searchd_log_dir``
nosedjango/plugins/sphinxsearch_plugin.py
def options(self, parser, env=None): """ Sphinx config file that can optionally take the following python template string arguments: ``database_name`` ``database_password`` ``database_username`` ``database_host`` ``database_port`` ``sphinx_search_data_dir`` ``searchd_log_dir`` """ if env is None: env = os.environ parser.add_option( '--sphinx-config-tpl', help='Path to the Sphinx configuration file template.', ) super(SphinxSearchPlugin, self).options(parser, env)
def options(self, parser, env=None): """ Sphinx config file that can optionally take the following python template string arguments: ``database_name`` ``database_password`` ``database_username`` ``database_host`` ``database_port`` ``sphinx_search_data_dir`` ``searchd_log_dir`` """ if env is None: env = os.environ parser.add_option( '--sphinx-config-tpl', help='Path to the Sphinx configuration file template.', ) super(SphinxSearchPlugin, self).options(parser, env)
[ "Sphinx", "config", "file", "that", "can", "optionally", "take", "the", "following", "python", "template", "string", "arguments", ":" ]
nosedjango/nosedjango
python
https://github.com/nosedjango/nosedjango/blob/cd4d06857c88291769bc38e5c9573f43b7ffcd6a/nosedjango/plugins/sphinxsearch_plugin.py#L31-L51
[ "def", "options", "(", "self", ",", "parser", ",", "env", "=", "None", ")", ":", "if", "env", "is", "None", ":", "env", "=", "os", ".", "environ", "parser", ".", "add_option", "(", "'--sphinx-config-tpl'", ",", "help", "=", "'Path to the Sphinx configuration file template.'", ",", ")", "super", "(", "SphinxSearchPlugin", ",", "self", ")", ".", "options", "(", "parser", ",", "env", ")" ]
cd4d06857c88291769bc38e5c9573f43b7ffcd6a
valid
SphinxSearchPlugin._wait_for_connection
Wait until we can make a socket connection to sphinx.
nosedjango/plugins/sphinxsearch_plugin.py
def _wait_for_connection(self, port): """ Wait until we can make a socket connection to sphinx. """ connected = False max_tries = 10 num_tries = 0 wait_time = 0.5 while not connected or num_tries >= max_tries: time.sleep(wait_time) try: af = socket.AF_INET addr = ('127.0.0.1', port) sock = socket.socket(af, socket.SOCK_STREAM) sock.connect(addr) except socket.error: if sock: sock.close() num_tries += 1 continue connected = True if not connected: print("Error connecting to sphinx searchd", file=sys.stderr)
def _wait_for_connection(self, port): """ Wait until we can make a socket connection to sphinx. """ connected = False max_tries = 10 num_tries = 0 wait_time = 0.5 while not connected or num_tries >= max_tries: time.sleep(wait_time) try: af = socket.AF_INET addr = ('127.0.0.1', port) sock = socket.socket(af, socket.SOCK_STREAM) sock.connect(addr) except socket.error: if sock: sock.close() num_tries += 1 continue connected = True if not connected: print("Error connecting to sphinx searchd", file=sys.stderr)
[ "Wait", "until", "we", "can", "make", "a", "socket", "connection", "to", "sphinx", "." ]
nosedjango/nosedjango
python
https://github.com/nosedjango/nosedjango/blob/cd4d06857c88291769bc38e5c9573f43b7ffcd6a/nosedjango/plugins/sphinxsearch_plugin.py#L151-L174
[ "def", "_wait_for_connection", "(", "self", ",", "port", ")", ":", "connected", "=", "False", "max_tries", "=", "10", "num_tries", "=", "0", "wait_time", "=", "0.5", "while", "not", "connected", "or", "num_tries", ">=", "max_tries", ":", "time", ".", "sleep", "(", "wait_time", ")", "try", ":", "af", "=", "socket", ".", "AF_INET", "addr", "=", "(", "'127.0.0.1'", ",", "port", ")", "sock", "=", "socket", ".", "socket", "(", "af", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "connect", "(", "addr", ")", "except", "socket", ".", "error", ":", "if", "sock", ":", "sock", ".", "close", "(", ")", "num_tries", "+=", "1", "continue", "connected", "=", "True", "if", "not", "connected", ":", "print", "(", "\"Error connecting to sphinx searchd\"", ",", "file", "=", "sys", ".", "stderr", ")" ]
cd4d06857c88291769bc38e5c9573f43b7ffcd6a
valid
Plugin.get_unique_token
Get a unique token for usage in differentiating test runs that need to run in parallel.
nosedjango/plugins/base_plugin.py
def get_unique_token(self): """ Get a unique token for usage in differentiating test runs that need to run in parallel. """ if self._unique_token is None: self._unique_token = self._random_token() return self._unique_token
def get_unique_token(self): """ Get a unique token for usage in differentiating test runs that need to run in parallel. """ if self._unique_token is None: self._unique_token = self._random_token() return self._unique_token
[ "Get", "a", "unique", "token", "for", "usage", "in", "differentiating", "test", "runs", "that", "need", "to", "run", "in", "parallel", "." ]
nosedjango/nosedjango
python
https://github.com/nosedjango/nosedjango/blob/cd4d06857c88291769bc38e5c9573f43b7ffcd6a/nosedjango/plugins/base_plugin.py#L12-L20
[ "def", "get_unique_token", "(", "self", ")", ":", "if", "self", ".", "_unique_token", "is", "None", ":", "self", ".", "_unique_token", "=", "self", ".", "_random_token", "(", ")", "return", "self", ".", "_unique_token" ]
cd4d06857c88291769bc38e5c9573f43b7ffcd6a
valid
Plugin._random_token
Generates a random token, using the url-safe base64 alphabet. The "bits" argument specifies the bits of randomness to use.
nosedjango/plugins/base_plugin.py
def _random_token(self, bits=128): """ Generates a random token, using the url-safe base64 alphabet. The "bits" argument specifies the bits of randomness to use. """ alphabet = string.ascii_letters + string.digits + '-_' # alphabet length is 64, so each letter provides lg(64) = 6 bits num_letters = int(math.ceil(bits / 6.0)) return ''.join(random.choice(alphabet) for i in range(num_letters))
def _random_token(self, bits=128): """ Generates a random token, using the url-safe base64 alphabet. The "bits" argument specifies the bits of randomness to use. """ alphabet = string.ascii_letters + string.digits + '-_' # alphabet length is 64, so each letter provides lg(64) = 6 bits num_letters = int(math.ceil(bits / 6.0)) return ''.join(random.choice(alphabet) for i in range(num_letters))
[ "Generates", "a", "random", "token", "using", "the", "url", "-", "safe", "base64", "alphabet", ".", "The", "bits", "argument", "specifies", "the", "bits", "of", "randomness", "to", "use", "." ]
nosedjango/nosedjango
python
https://github.com/nosedjango/nosedjango/blob/cd4d06857c88291769bc38e5c9573f43b7ffcd6a/nosedjango/plugins/base_plugin.py#L22-L30
[ "def", "_random_token", "(", "self", ",", "bits", "=", "128", ")", ":", "alphabet", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "+", "'-_'", "# alphabet length is 64, so each letter provides lg(64) = 6 bits", "num_letters", "=", "int", "(", "math", ".", "ceil", "(", "bits", "/", "6.0", ")", ")", "return", "''", ".", "join", "(", "random", ".", "choice", "(", "alphabet", ")", "for", "i", "in", "range", "(", "num_letters", ")", ")" ]
cd4d06857c88291769bc38e5c9573f43b7ffcd6a
valid
Poll.url
Returns the url of the poll. If the poll has not been submitted yet, an empty string is returned instead.
strawpoll/poll.py
def url(self): """Returns the url of the poll. If the poll has not been submitted yet, an empty string is returned instead. """ if self.id is None: return '' return '{}/{}'.format(strawpoll.API._BASE_URL, self.id)
def url(self): """Returns the url of the poll. If the poll has not been submitted yet, an empty string is returned instead. """ if self.id is None: return '' return '{}/{}'.format(strawpoll.API._BASE_URL, self.id)
[ "Returns", "the", "url", "of", "the", "poll", ".", "If", "the", "poll", "has", "not", "been", "submitted", "yet", "an", "empty", "string", "is", "returned", "instead", "." ]
PapyrusThePlant/strawpoll.py
python
https://github.com/PapyrusThePlant/strawpoll.py/blob/bce8a8d89d2d9d44c86431b5993b4da196bdd8eb/strawpoll/poll.py#L68-L74
[ "def", "url", "(", "self", ")", ":", "if", "self", ".", "id", "is", "None", ":", "return", "''", "return", "'{}/{}'", ".", "format", "(", "strawpoll", ".", "API", ".", "_BASE_URL", ",", "self", ".", "id", ")" ]
bce8a8d89d2d9d44c86431b5993b4da196bdd8eb
valid
API.get_poll
Retrieves a poll from strawpoll. :param arg: Either the ID of the poll or its strawpoll url. :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises HTTPException: Requesting the poll failed. :returns: A poll constructed with the requested data. :rtype: :class:`Poll`
strawpoll/api.py
def get_poll(self, arg, *, request_policy=None): """Retrieves a poll from strawpoll. :param arg: Either the ID of the poll or its strawpoll url. :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises HTTPException: Requesting the poll failed. :returns: A poll constructed with the requested data. :rtype: :class:`Poll` """ if isinstance(arg, str): # Maybe we received an url to parse match = self._url_re.match(arg) if match: arg = match.group('id') return self._http_client.get('{}/{}'.format(self._POLLS, arg), request_policy=request_policy, cls=strawpoll.Poll)
def get_poll(self, arg, *, request_policy=None): """Retrieves a poll from strawpoll. :param arg: Either the ID of the poll or its strawpoll url. :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises HTTPException: Requesting the poll failed. :returns: A poll constructed with the requested data. :rtype: :class:`Poll` """ if isinstance(arg, str): # Maybe we received an url to parse match = self._url_re.match(arg) if match: arg = match.group('id') return self._http_client.get('{}/{}'.format(self._POLLS, arg), request_policy=request_policy, cls=strawpoll.Poll)
[ "Retrieves", "a", "poll", "from", "strawpoll", "." ]
PapyrusThePlant/strawpoll.py
python
https://github.com/PapyrusThePlant/strawpoll.py/blob/bce8a8d89d2d9d44c86431b5993b4da196bdd8eb/strawpoll/api.py#L38-L59
[ "def", "get_poll", "(", "self", ",", "arg", ",", "*", ",", "request_policy", "=", "None", ")", ":", "if", "isinstance", "(", "arg", ",", "str", ")", ":", "# Maybe we received an url to parse", "match", "=", "self", ".", "_url_re", ".", "match", "(", "arg", ")", "if", "match", ":", "arg", "=", "match", ".", "group", "(", "'id'", ")", "return", "self", ".", "_http_client", ".", "get", "(", "'{}/{}'", ".", "format", "(", "self", ".", "_POLLS", ",", "arg", ")", ",", "request_policy", "=", "request_policy", ",", "cls", "=", "strawpoll", ".", "Poll", ")" ]
bce8a8d89d2d9d44c86431b5993b4da196bdd8eb
valid
API.submit_poll
Submits a poll on strawpoll. :param poll: The poll to submit. :type poll: :class:`Poll` :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises ExistingPoll: This poll instance has already been submitted. :raises HTTPException: The submission failed. :returns: The given poll updated with the data sent back from the submission. :rtype: :class:`Poll` .. note:: Only polls that have a non empty title and between 2 and 30 options can be submitted.
strawpoll/api.py
def submit_poll(self, poll, *, request_policy=None): """Submits a poll on strawpoll. :param poll: The poll to submit. :type poll: :class:`Poll` :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises ExistingPoll: This poll instance has already been submitted. :raises HTTPException: The submission failed. :returns: The given poll updated with the data sent back from the submission. :rtype: :class:`Poll` .. note:: Only polls that have a non empty title and between 2 and 30 options can be submitted. """ if poll.id is not None: raise ExistingPoll() options = poll.options data = { 'title': poll.title, 'options': options, 'multi': poll.multi, 'dupcheck': poll.dupcheck, 'captcha': poll.captcha } return self._http_client.post(self._POLLS, data=data, request_policy=request_policy, cls=strawpoll.Poll)
def submit_poll(self, poll, *, request_policy=None): """Submits a poll on strawpoll. :param poll: The poll to submit. :type poll: :class:`Poll` :param request_policy: Overrides :attr:`API.requests_policy` for that \ request. :type request_policy: Optional[:class:`RequestsPolicy`] :raises ExistingPoll: This poll instance has already been submitted. :raises HTTPException: The submission failed. :returns: The given poll updated with the data sent back from the submission. :rtype: :class:`Poll` .. note:: Only polls that have a non empty title and between 2 and 30 options can be submitted. """ if poll.id is not None: raise ExistingPoll() options = poll.options data = { 'title': poll.title, 'options': options, 'multi': poll.multi, 'dupcheck': poll.dupcheck, 'captcha': poll.captcha } return self._http_client.post(self._POLLS, data=data, request_policy=request_policy, cls=strawpoll.Poll)
[ "Submits", "a", "poll", "on", "strawpoll", "." ]
PapyrusThePlant/strawpoll.py
python
https://github.com/PapyrusThePlant/strawpoll.py/blob/bce8a8d89d2d9d44c86431b5993b4da196bdd8eb/strawpoll/api.py#L61-L95
[ "def", "submit_poll", "(", "self", ",", "poll", ",", "*", ",", "request_policy", "=", "None", ")", ":", "if", "poll", ".", "id", "is", "not", "None", ":", "raise", "ExistingPoll", "(", ")", "options", "=", "poll", ".", "options", "data", "=", "{", "'title'", ":", "poll", ".", "title", ",", "'options'", ":", "options", ",", "'multi'", ":", "poll", ".", "multi", ",", "'dupcheck'", ":", "poll", ".", "dupcheck", ",", "'captcha'", ":", "poll", ".", "captcha", "}", "return", "self", ".", "_http_client", ".", "post", "(", "self", ".", "_POLLS", ",", "data", "=", "data", ",", "request_policy", "=", "request_policy", ",", "cls", "=", "strawpoll", ".", "Poll", ")" ]
bce8a8d89d2d9d44c86431b5993b4da196bdd8eb
valid
numpy_formatter
`NumPy`_ style contour formatter. Contours are returned as a list of Nx2 arrays containing the x and y vertices of the contour line. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. .. note:: This is the fastest format. .. _NumPy: http://www.numpy.org
contours/core.py
def numpy_formatter(_, vertices, codes=None): """`NumPy`_ style contour formatter. Contours are returned as a list of Nx2 arrays containing the x and y vertices of the contour line. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. .. note:: This is the fastest format. .. _NumPy: http://www.numpy.org """ if codes is None: return vertices numpy_vertices = [] for vertices_, codes_ in zip(vertices, codes): starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0] stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0] for start, stop in zip(starts, stops): numpy_vertices.append(vertices_[start:stop+1, :]) return numpy_vertices
def numpy_formatter(_, vertices, codes=None): """`NumPy`_ style contour formatter. Contours are returned as a list of Nx2 arrays containing the x and y vertices of the contour line. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. .. note:: This is the fastest format. .. _NumPy: http://www.numpy.org """ if codes is None: return vertices numpy_vertices = [] for vertices_, codes_ in zip(vertices, codes): starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0] stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0] for start, stop in zip(starts, stops): numpy_vertices.append(vertices_[start:stop+1, :]) return numpy_vertices
[ "NumPy", "_", "style", "contour", "formatter", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/core.py#L80-L105
[ "def", "numpy_formatter", "(", "_", ",", "vertices", ",", "codes", "=", "None", ")", ":", "if", "codes", "is", "None", ":", "return", "vertices", "numpy_vertices", "=", "[", "]", "for", "vertices_", ",", "codes_", "in", "zip", "(", "vertices", ",", "codes", ")", ":", "starts", "=", "np", ".", "nonzero", "(", "codes_", "==", "MPLPATHCODE", ".", "MOVETO", ")", "[", "0", "]", "stops", "=", "np", ".", "nonzero", "(", "codes_", "==", "MPLPATHCODE", ".", "CLOSEPOLY", ")", "[", "0", "]", "for", "start", ",", "stop", "in", "zip", "(", "starts", ",", "stops", ")", ":", "numpy_vertices", ".", "append", "(", "vertices_", "[", "start", ":", "stop", "+", "1", ",", ":", "]", ")", "return", "numpy_vertices" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
matlab_formatter
`MATLAB`_ style contour formatter. Contours are returned as a single Nx2, `MATLAB`_ style, contour array. There are two types of rows in this format: * Header: The first element of a header row is the level of the contour (the lower level for filled contours) and the second element is the number of vertices (to follow) belonging to this contour line. * Vertex: x,y coordinate pairs of the vertex. A header row is always followed by the coresponding number of vertices. Another header row may follow if there are more contour lines. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. For further explanation of this format see the `Mathworks documentation <https://www.mathworks.com/help/matlab/ref/contour-properties.html#prop_ContourMatrix>`_ noting that the MATLAB format used in the `contours` package is the transpose of that used by `MATLAB`_ (since `MATLAB`_ is column-major and `NumPy`_ is row-major by default). .. _NumPy: http://www.numpy.org .. _MATLAB: https://www.mathworks.com/products/matlab.html
contours/core.py
def matlab_formatter(level, vertices, codes=None): """`MATLAB`_ style contour formatter. Contours are returned as a single Nx2, `MATLAB`_ style, contour array. There are two types of rows in this format: * Header: The first element of a header row is the level of the contour (the lower level for filled contours) and the second element is the number of vertices (to follow) belonging to this contour line. * Vertex: x,y coordinate pairs of the vertex. A header row is always followed by the coresponding number of vertices. Another header row may follow if there are more contour lines. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. For further explanation of this format see the `Mathworks documentation <https://www.mathworks.com/help/matlab/ref/contour-properties.html#prop_ContourMatrix>`_ noting that the MATLAB format used in the `contours` package is the transpose of that used by `MATLAB`_ (since `MATLAB`_ is column-major and `NumPy`_ is row-major by default). .. _NumPy: http://www.numpy.org .. _MATLAB: https://www.mathworks.com/products/matlab.html """ vertices = numpy_formatter(level, vertices, codes) if codes is not None: level = level[0] headers = np.vstack(( [v.shape[0] for v in vertices], [level]*len(vertices))).T vertices = np.vstack( list(it.__next__() for it in itertools.cycle((iter(headers), iter(vertices))))) return vertices
def matlab_formatter(level, vertices, codes=None): """`MATLAB`_ style contour formatter. Contours are returned as a single Nx2, `MATLAB`_ style, contour array. There are two types of rows in this format: * Header: The first element of a header row is the level of the contour (the lower level for filled contours) and the second element is the number of vertices (to follow) belonging to this contour line. * Vertex: x,y coordinate pairs of the vertex. A header row is always followed by the coresponding number of vertices. Another header row may follow if there are more contour lines. For filled contours the direction of vertices matters: * CCW (ACW): The vertices give the exterior of a contour polygon. * CW: The vertices give a hole of a contour polygon. This hole will always be inside the exterior of the last contour exterior. For further explanation of this format see the `Mathworks documentation <https://www.mathworks.com/help/matlab/ref/contour-properties.html#prop_ContourMatrix>`_ noting that the MATLAB format used in the `contours` package is the transpose of that used by `MATLAB`_ (since `MATLAB`_ is column-major and `NumPy`_ is row-major by default). .. _NumPy: http://www.numpy.org .. _MATLAB: https://www.mathworks.com/products/matlab.html """ vertices = numpy_formatter(level, vertices, codes) if codes is not None: level = level[0] headers = np.vstack(( [v.shape[0] for v in vertices], [level]*len(vertices))).T vertices = np.vstack( list(it.__next__() for it in itertools.cycle((iter(headers), iter(vertices))))) return vertices
[ "MATLAB", "_", "style", "contour", "formatter", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/core.py#L108-L148
[ "def", "matlab_formatter", "(", "level", ",", "vertices", ",", "codes", "=", "None", ")", ":", "vertices", "=", "numpy_formatter", "(", "level", ",", "vertices", ",", "codes", ")", "if", "codes", "is", "not", "None", ":", "level", "=", "level", "[", "0", "]", "headers", "=", "np", ".", "vstack", "(", "(", "[", "v", ".", "shape", "[", "0", "]", "for", "v", "in", "vertices", "]", ",", "[", "level", "]", "*", "len", "(", "vertices", ")", ")", ")", ".", "T", "vertices", "=", "np", ".", "vstack", "(", "list", "(", "it", ".", "__next__", "(", ")", "for", "it", "in", "itertools", ".", "cycle", "(", "(", "iter", "(", "headers", ")", ",", "iter", "(", "vertices", ")", ")", ")", ")", ")", "return", "vertices" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
shapely_formatter
`Shapely`_ style contour formatter. Contours are returned as a list of :class:`shapely.geometry.LineString`, :class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point` geometry elements. Filled contours return a list of :class:`shapely.geometry.Polygon` elements instead. .. note:: If possible, `Shapely speedups`_ will be enabled. .. _Shapely: http://toblerity.org/shapely/manual.html .. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance See Also -------- `descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_ or GeoJSON-like geometric objects as matplotlib paths and patches.
contours/core.py
def shapely_formatter(_, vertices, codes=None): """`Shapely`_ style contour formatter. Contours are returned as a list of :class:`shapely.geometry.LineString`, :class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point` geometry elements. Filled contours return a list of :class:`shapely.geometry.Polygon` elements instead. .. note:: If possible, `Shapely speedups`_ will be enabled. .. _Shapely: http://toblerity.org/shapely/manual.html .. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance See Also -------- `descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_ or GeoJSON-like geometric objects as matplotlib paths and patches. """ elements = [] if codes is None: for vertices_ in vertices: if np.all(vertices_[0, :] == vertices_[-1, :]): # Contour is single point. if len(vertices) < 3: elements.append(Point(vertices_[0, :])) # Contour is closed. else: elements.append(LinearRing(vertices_)) # Contour is open. else: elements.append(LineString(vertices_)) else: for vertices_, codes_ in zip(vertices, codes): starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0] stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0] try: rings = [LinearRing(vertices_[start:stop+1, :]) for start, stop in zip(starts, stops)] elements.append(Polygon(rings[0], rings[1:])) except ValueError as err: # Verify error is from degenerate (single point) polygon. if np.any(stop - start - 1 == 0): # Polygon is single point, remove the polygon. if stops[0] < starts[0]+2: pass # Polygon has single point hole, remove the hole. else: rings = [ LinearRing(vertices_[start:stop+1, :]) for start, stop in zip(starts, stops) if stop >= start+2] elements.append(Polygon(rings[0], rings[1:])) else: raise(err) return elements
def shapely_formatter(_, vertices, codes=None): """`Shapely`_ style contour formatter. Contours are returned as a list of :class:`shapely.geometry.LineString`, :class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point` geometry elements. Filled contours return a list of :class:`shapely.geometry.Polygon` elements instead. .. note:: If possible, `Shapely speedups`_ will be enabled. .. _Shapely: http://toblerity.org/shapely/manual.html .. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance See Also -------- `descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_ or GeoJSON-like geometric objects as matplotlib paths and patches. """ elements = [] if codes is None: for vertices_ in vertices: if np.all(vertices_[0, :] == vertices_[-1, :]): # Contour is single point. if len(vertices) < 3: elements.append(Point(vertices_[0, :])) # Contour is closed. else: elements.append(LinearRing(vertices_)) # Contour is open. else: elements.append(LineString(vertices_)) else: for vertices_, codes_ in zip(vertices, codes): starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0] stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0] try: rings = [LinearRing(vertices_[start:stop+1, :]) for start, stop in zip(starts, stops)] elements.append(Polygon(rings[0], rings[1:])) except ValueError as err: # Verify error is from degenerate (single point) polygon. if np.any(stop - start - 1 == 0): # Polygon is single point, remove the polygon. if stops[0] < starts[0]+2: pass # Polygon has single point hole, remove the hole. else: rings = [ LinearRing(vertices_[start:stop+1, :]) for start, stop in zip(starts, stops) if stop >= start+2] elements.append(Polygon(rings[0], rings[1:])) else: raise(err) return elements
[ "Shapely", "_", "style", "contour", "formatter", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/core.py#L151-L210
[ "def", "shapely_formatter", "(", "_", ",", "vertices", ",", "codes", "=", "None", ")", ":", "elements", "=", "[", "]", "if", "codes", "is", "None", ":", "for", "vertices_", "in", "vertices", ":", "if", "np", ".", "all", "(", "vertices_", "[", "0", ",", ":", "]", "==", "vertices_", "[", "-", "1", ",", ":", "]", ")", ":", "# Contour is single point.", "if", "len", "(", "vertices", ")", "<", "3", ":", "elements", ".", "append", "(", "Point", "(", "vertices_", "[", "0", ",", ":", "]", ")", ")", "# Contour is closed.", "else", ":", "elements", ".", "append", "(", "LinearRing", "(", "vertices_", ")", ")", "# Contour is open.", "else", ":", "elements", ".", "append", "(", "LineString", "(", "vertices_", ")", ")", "else", ":", "for", "vertices_", ",", "codes_", "in", "zip", "(", "vertices", ",", "codes", ")", ":", "starts", "=", "np", ".", "nonzero", "(", "codes_", "==", "MPLPATHCODE", ".", "MOVETO", ")", "[", "0", "]", "stops", "=", "np", ".", "nonzero", "(", "codes_", "==", "MPLPATHCODE", ".", "CLOSEPOLY", ")", "[", "0", "]", "try", ":", "rings", "=", "[", "LinearRing", "(", "vertices_", "[", "start", ":", "stop", "+", "1", ",", ":", "]", ")", "for", "start", ",", "stop", "in", "zip", "(", "starts", ",", "stops", ")", "]", "elements", ".", "append", "(", "Polygon", "(", "rings", "[", "0", "]", ",", "rings", "[", "1", ":", "]", ")", ")", "except", "ValueError", "as", "err", ":", "# Verify error is from degenerate (single point) polygon.", "if", "np", ".", "any", "(", "stop", "-", "start", "-", "1", "==", "0", ")", ":", "# Polygon is single point, remove the polygon.", "if", "stops", "[", "0", "]", "<", "starts", "[", "0", "]", "+", "2", ":", "pass", "# Polygon has single point hole, remove the hole.", "else", ":", "rings", "=", "[", "LinearRing", "(", "vertices_", "[", "start", ":", "stop", "+", "1", ",", ":", "]", ")", "for", "start", ",", "stop", "in", "zip", "(", "starts", ",", "stops", ")", "if", "stop", ">=", "start", "+", "2", "]", "elements", ".", "append", "(", "Polygon", "(", "rings", "[", "0", "]", ",", "rings", "[", "1", ":", "]", ")", ")", "else", ":", "raise", "(", "err", ")", "return", "elements" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
ContourMixin.contour
Get contour lines at the given level. Parameters ---------- level : numbers.Number The data level to calculate the contour lines for. Returns ------- : The result of the :attr:`formatter` called on the contour at the given `level`.
contours/core.py
def contour(self, level): """Get contour lines at the given level. Parameters ---------- level : numbers.Number The data level to calculate the contour lines for. Returns ------- : The result of the :attr:`formatter` called on the contour at the given `level`. """ if not isinstance(level, numbers.Number): raise TypeError( ("'_level' must be of type 'numbers.Number' but is " "'{:s}'").format(type(level))) vertices = self._contour_generator.create_contour(level) return self.formatter(level, vertices)
def contour(self, level): """Get contour lines at the given level. Parameters ---------- level : numbers.Number The data level to calculate the contour lines for. Returns ------- : The result of the :attr:`formatter` called on the contour at the given `level`. """ if not isinstance(level, numbers.Number): raise TypeError( ("'_level' must be of type 'numbers.Number' but is " "'{:s}'").format(type(level))) vertices = self._contour_generator.create_contour(level) return self.formatter(level, vertices)
[ "Get", "contour", "lines", "at", "the", "given", "level", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/core.py#L239-L259
[ "def", "contour", "(", "self", ",", "level", ")", ":", "if", "not", "isinstance", "(", "level", ",", "numbers", ".", "Number", ")", ":", "raise", "TypeError", "(", "(", "\"'_level' must be of type 'numbers.Number' but is \"", "\"'{:s}'\"", ")", ".", "format", "(", "type", "(", "level", ")", ")", ")", "vertices", "=", "self", ".", "_contour_generator", ".", "create_contour", "(", "level", ")", "return", "self", ".", "formatter", "(", "level", ",", "vertices", ")" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622
valid
ContourMixin.filled_contour
Get contour polygons between the given levels. Parameters ---------- min : numbers.Number or None The minimum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).min`` will be used. max : numbers.Number or None The maximum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).max`` will be used. Returns ------- : The result of the :attr:`formatter` called on the filled contour between `min` and `max`.
contours/core.py
def filled_contour(self, min=None, max=None): """Get contour polygons between the given levels. Parameters ---------- min : numbers.Number or None The minimum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).min`` will be used. max : numbers.Number or None The maximum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).max`` will be used. Returns ------- : The result of the :attr:`formatter` called on the filled contour between `min` and `max`. """ # pylint: disable=redefined-builtin,redefined-outer-name # Get the contour vertices. if min is None: min = np.finfo(np.float64).min if max is None: max = np.finfo(np.float64).max vertices, codes = ( self._contour_generator.create_filled_contour(min, max)) return self.formatter((min, max), vertices, codes)
def filled_contour(self, min=None, max=None): """Get contour polygons between the given levels. Parameters ---------- min : numbers.Number or None The minimum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).min`` will be used. max : numbers.Number or None The maximum data level of the contour polygon. If :obj:`None`, ``numpy.finfo(numpy.float64).max`` will be used. Returns ------- : The result of the :attr:`formatter` called on the filled contour between `min` and `max`. """ # pylint: disable=redefined-builtin,redefined-outer-name # Get the contour vertices. if min is None: min = np.finfo(np.float64).min if max is None: max = np.finfo(np.float64).max vertices, codes = ( self._contour_generator.create_filled_contour(min, max)) return self.formatter((min, max), vertices, codes)
[ "Get", "contour", "polygons", "between", "the", "given", "levels", "." ]
ccarocean/python-contours
python
https://github.com/ccarocean/python-contours/blob/d154a679a2ea6a324c3308c1d087d88d0eb79622/contours/core.py#L261-L288
[ "def", "filled_contour", "(", "self", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "# pylint: disable=redefined-builtin,redefined-outer-name", "# Get the contour vertices.", "if", "min", "is", "None", ":", "min", "=", "np", ".", "finfo", "(", "np", ".", "float64", ")", ".", "min", "if", "max", "is", "None", ":", "max", "=", "np", ".", "finfo", "(", "np", ".", "float64", ")", ".", "max", "vertices", ",", "codes", "=", "(", "self", ".", "_contour_generator", ".", "create_filled_contour", "(", "min", ",", "max", ")", ")", "return", "self", ".", "formatter", "(", "(", "min", ",", "max", ")", ",", "vertices", ",", "codes", ")" ]
d154a679a2ea6a324c3308c1d087d88d0eb79622