partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
RegistryThing.createFromSource
returns a registry component for anything that's a valid package name (this does not guarantee that the component actually exists in the registry: use availableVersions() for that).
yotta/lib/registry_access.py
def createFromSource(cls, vs, name, registry): ''' returns a registry component for anything that's a valid package name (this does not guarantee that the component actually exists in the registry: use availableVersions() for that). ''' # we deliberately allow only lowercase, hyphen, and (unfortunately) # numbers in package names, to reduce the possibility of confusingly # similar names: if the name doesn't match this then escalate to make # the user fix it. Targets also allow + if registry == 'targets': name_match = re.match('^[a-z]+[a-z0-9+-]*$', name) if not name_match: raise access_common.AccessException( 'Target name "%s" is not valid (must contain only lowercase letters, hyphen, plus, and numbers)' % name ) else: name_match = re.match('^[a-z]+[a-z0-9-]*$', name) if not name_match: raise access_common.AccessException( 'Module name "%s" is not valid (must contain only lowercase letters, hyphen, and numbers)' % name ) assert(vs.semantic_spec) return RegistryThing(name, vs.semantic_spec, registry)
def createFromSource(cls, vs, name, registry): ''' returns a registry component for anything that's a valid package name (this does not guarantee that the component actually exists in the registry: use availableVersions() for that). ''' # we deliberately allow only lowercase, hyphen, and (unfortunately) # numbers in package names, to reduce the possibility of confusingly # similar names: if the name doesn't match this then escalate to make # the user fix it. Targets also allow + if registry == 'targets': name_match = re.match('^[a-z]+[a-z0-9+-]*$', name) if not name_match: raise access_common.AccessException( 'Target name "%s" is not valid (must contain only lowercase letters, hyphen, plus, and numbers)' % name ) else: name_match = re.match('^[a-z]+[a-z0-9-]*$', name) if not name_match: raise access_common.AccessException( 'Module name "%s" is not valid (must contain only lowercase letters, hyphen, and numbers)' % name ) assert(vs.semantic_spec) return RegistryThing(name, vs.semantic_spec, registry)
[ "returns", "a", "registry", "component", "for", "anything", "that", "s", "a", "valid", "package", "name", "(", "this", "does", "not", "guarantee", "that", "the", "component", "actually", "exists", "in", "the", "registry", ":", "use", "availableVersions", "()", "for", "that", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/registry_access.py#L455-L477
[ "def", "createFromSource", "(", "cls", ",", "vs", ",", "name", ",", "registry", ")", ":", "# we deliberately allow only lowercase, hyphen, and (unfortunately)", "# numbers in package names, to reduce the possibility of confusingly", "# similar names: if the name doesn't match this then escalate to make", "# the user fix it. Targets also allow +", "if", "registry", "==", "'targets'", ":", "name_match", "=", "re", ".", "match", "(", "'^[a-z]+[a-z0-9+-]*$'", ",", "name", ")", "if", "not", "name_match", ":", "raise", "access_common", ".", "AccessException", "(", "'Target name \"%s\" is not valid (must contain only lowercase letters, hyphen, plus, and numbers)'", "%", "name", ")", "else", ":", "name_match", "=", "re", ".", "match", "(", "'^[a-z]+[a-z0-9-]*$'", ",", "name", ")", "if", "not", "name_match", ":", "raise", "access_common", ".", "AccessException", "(", "'Module name \"%s\" is not valid (must contain only lowercase letters, hyphen, and numbers)'", "%", "name", ")", "assert", "(", "vs", ".", "semantic_spec", ")", "return", "RegistryThing", "(", "name", ",", "vs", ".", "semantic_spec", ",", "registry", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_JSONConfigParser.read
Read a list of files. Their configuration values are merged, with preference to values from files earlier in the list.
yotta/lib/settings.py
def read(self, filenames): '''' Read a list of files. Their configuration values are merged, with preference to values from files earlier in the list. ''' for fn in filenames: try: self.configs[fn] = ordered_json.load(fn) except IOError: self.configs[fn] = OrderedDict() except Exception as e: self.configs[fn] = OrderedDict() logging.warning( "Failed to read settings file %s, it will be ignored. The error was: %s", fn, e )
def read(self, filenames): '''' Read a list of files. Their configuration values are merged, with preference to values from files earlier in the list. ''' for fn in filenames: try: self.configs[fn] = ordered_json.load(fn) except IOError: self.configs[fn] = OrderedDict() except Exception as e: self.configs[fn] = OrderedDict() logging.warning( "Failed to read settings file %s, it will be ignored. The error was: %s", fn, e )
[ "Read", "a", "list", "of", "files", ".", "Their", "configuration", "values", "are", "merged", "with", "preference", "to", "values", "from", "files", "earlier", "in", "the", "list", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/settings.py#L60-L74
[ "def", "read", "(", "self", ",", "filenames", ")", ":", "for", "fn", "in", "filenames", ":", "try", ":", "self", ".", "configs", "[", "fn", "]", "=", "ordered_json", ".", "load", "(", "fn", ")", "except", "IOError", ":", "self", ".", "configs", "[", "fn", "]", "=", "OrderedDict", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "configs", "[", "fn", "]", "=", "OrderedDict", "(", ")", "logging", ".", "warning", "(", "\"Failed to read settings file %s, it will be ignored. The error was: %s\"", ",", "fn", ",", "e", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_JSONConfigParser.get
return a configuration value usage: get('section.property') Note that currently array indexes are not supported. You must get the whole array. returns None if any path element or the property is missing
yotta/lib/settings.py
def get(self, path): ''' return a configuration value usage: get('section.property') Note that currently array indexes are not supported. You must get the whole array. returns None if any path element or the property is missing ''' path = _splitPath(path) for config in self.configs.values(): cur = config for el in path: if el in cur: cur = cur[el] else: cur = None break if cur is not None: return cur return None
def get(self, path): ''' return a configuration value usage: get('section.property') Note that currently array indexes are not supported. You must get the whole array. returns None if any path element or the property is missing ''' path = _splitPath(path) for config in self.configs.values(): cur = config for el in path: if el in cur: cur = cur[el] else: cur = None break if cur is not None: return cur return None
[ "return", "a", "configuration", "value" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/settings.py#L76-L98
[ "def", "get", "(", "self", ",", "path", ")", ":", "path", "=", "_splitPath", "(", "path", ")", "for", "config", "in", "self", ".", "configs", ".", "values", "(", ")", ":", "cur", "=", "config", "for", "el", "in", "path", ":", "if", "el", "in", "cur", ":", "cur", "=", "cur", "[", "el", "]", "else", ":", "cur", "=", "None", "break", "if", "cur", "is", "not", "None", ":", "return", "cur", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_JSONConfigParser.set
Set a configuration value. If no filename is specified, the property is set in the first configuration file. Note that if a filename is specified and the property path is present in an earlier filename then set property will be hidden. usage: set('section.property', value='somevalue') Note that currently array indexes are not supported. You must set the whole array.
yotta/lib/settings.py
def set(self, path, value=None, filename=None): ''' Set a configuration value. If no filename is specified, the property is set in the first configuration file. Note that if a filename is specified and the property path is present in an earlier filename then set property will be hidden. usage: set('section.property', value='somevalue') Note that currently array indexes are not supported. You must set the whole array. ''' if filename is None: config = self._firstConfig()[1] else: config = self.configs[filename] path = _splitPath(path) for el in path[:-1]: if el in config: config = config[el] else: config[el] = OrderedDict() config = config[el] config[path[-1]] = value
def set(self, path, value=None, filename=None): ''' Set a configuration value. If no filename is specified, the property is set in the first configuration file. Note that if a filename is specified and the property path is present in an earlier filename then set property will be hidden. usage: set('section.property', value='somevalue') Note that currently array indexes are not supported. You must set the whole array. ''' if filename is None: config = self._firstConfig()[1] else: config = self.configs[filename] path = _splitPath(path) for el in path[:-1]: if el in config: config = config[el] else: config[el] = OrderedDict() config = config[el] config[path[-1]] = value
[ "Set", "a", "configuration", "value", ".", "If", "no", "filename", "is", "specified", "the", "property", "is", "set", "in", "the", "first", "configuration", "file", ".", "Note", "that", "if", "a", "filename", "is", "specified", "and", "the", "property", "path", "is", "present", "in", "an", "earlier", "filename", "then", "set", "property", "will", "be", "hidden", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/settings.py#L100-L124
[ "def", "set", "(", "self", ",", "path", ",", "value", "=", "None", ",", "filename", "=", "None", ")", ":", "if", "filename", "is", "None", ":", "config", "=", "self", ".", "_firstConfig", "(", ")", "[", "1", "]", "else", ":", "config", "=", "self", ".", "configs", "[", "filename", "]", "path", "=", "_splitPath", "(", "path", ")", "for", "el", "in", "path", "[", ":", "-", "1", "]", ":", "if", "el", "in", "config", ":", "config", "=", "config", "[", "el", "]", "else", ":", "config", "[", "el", "]", "=", "OrderedDict", "(", ")", "config", "=", "config", "[", "el", "]", "config", "[", "path", "[", "-", "1", "]", "]", "=", "value" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
islast
indicate whether the current item is the last one in a generator
yotta/lib/utils.py
def islast(generator): ''' indicate whether the current item is the last one in a generator ''' next_x = None first = True for x in generator: if not first: yield (next_x, False) next_x = x first = False if not first: yield (next_x, True)
def islast(generator): ''' indicate whether the current item is the last one in a generator ''' next_x = None first = True for x in generator: if not first: yield (next_x, False) next_x = x first = False if not first: yield (next_x, True)
[ "indicate", "whether", "the", "current", "item", "is", "the", "last", "one", "in", "a", "generator" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/utils.py#L9-L20
[ "def", "islast", "(", "generator", ")", ":", "next_x", "=", "None", "first", "=", "True", "for", "x", "in", "generator", ":", "if", "not", "first", ":", "yield", "(", "next_x", ",", "False", ")", "next_x", "=", "x", "first", "=", "False", "if", "not", "first", ":", "yield", "(", "next_x", ",", "True", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
remoteComponentFor
Return a RemoteComponent sublclass for the specified component name and source url (or version specification) Raises an exception if any arguments are invalid.
yotta/lib/access.py
def remoteComponentFor(name, version_required, registry='modules'): ''' Return a RemoteComponent sublclass for the specified component name and source url (or version specification) Raises an exception if any arguments are invalid. ''' try: vs = sourceparse.parseSourceURL(version_required) except ValueError as e: raise access_common.Unavailable( '%s' % (e) ) if vs.source_type == 'registry': if registry not in ('modules', 'targets'): raise Exception('no known registry namespace "%s"' % registry) return registry_access.RegistryThing.createFromSource( vs, name, registry=registry ) elif vs.source_type == 'github': return github_access.GithubComponent.createFromSource(vs, name) elif vs.source_type == 'git': return git_access.GitComponent.createFromSource(vs, name) elif vs.source_type == 'hg': return hg_access.HGComponent.createFromSource(vs, name) else: raise Exception('unsupported module source: "%s"' % vs.source_type)
def remoteComponentFor(name, version_required, registry='modules'): ''' Return a RemoteComponent sublclass for the specified component name and source url (or version specification) Raises an exception if any arguments are invalid. ''' try: vs = sourceparse.parseSourceURL(version_required) except ValueError as e: raise access_common.Unavailable( '%s' % (e) ) if vs.source_type == 'registry': if registry not in ('modules', 'targets'): raise Exception('no known registry namespace "%s"' % registry) return registry_access.RegistryThing.createFromSource( vs, name, registry=registry ) elif vs.source_type == 'github': return github_access.GithubComponent.createFromSource(vs, name) elif vs.source_type == 'git': return git_access.GitComponent.createFromSource(vs, name) elif vs.source_type == 'hg': return hg_access.HGComponent.createFromSource(vs, name) else: raise Exception('unsupported module source: "%s"' % vs.source_type)
[ "Return", "a", "RemoteComponent", "sublclass", "for", "the", "specified", "component", "name", "and", "source", "url", "(", "or", "version", "specification", ")", "Raises", "an", "exception", "if", "any", "arguments", "are", "invalid", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L48-L74
[ "def", "remoteComponentFor", "(", "name", ",", "version_required", ",", "registry", "=", "'modules'", ")", ":", "try", ":", "vs", "=", "sourceparse", ".", "parseSourceURL", "(", "version_required", ")", "except", "ValueError", "as", "e", ":", "raise", "access_common", ".", "Unavailable", "(", "'%s'", "%", "(", "e", ")", ")", "if", "vs", ".", "source_type", "==", "'registry'", ":", "if", "registry", "not", "in", "(", "'modules'", ",", "'targets'", ")", ":", "raise", "Exception", "(", "'no known registry namespace \"%s\"'", "%", "registry", ")", "return", "registry_access", ".", "RegistryThing", ".", "createFromSource", "(", "vs", ",", "name", ",", "registry", "=", "registry", ")", "elif", "vs", ".", "source_type", "==", "'github'", ":", "return", "github_access", ".", "GithubComponent", ".", "createFromSource", "(", "vs", ",", "name", ")", "elif", "vs", ".", "source_type", "==", "'git'", ":", "return", "git_access", ".", "GitComponent", ".", "createFromSource", "(", "vs", ",", "name", ")", "elif", "vs", ".", "source_type", "==", "'hg'", ":", "return", "hg_access", ".", "HGComponent", ".", "createFromSource", "(", "vs", ",", "name", ")", "else", ":", "raise", "Exception", "(", "'unsupported module source: \"%s\"'", "%", "vs", ".", "source_type", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
satisfyVersionFromSearchPaths
returns a Component/Target for the specified version, if found in the list of search paths. If `update' is True, then also check for newer versions of the found component, and update it in-place (unless it was installed via a symlink).
yotta/lib/access.py
def satisfyVersionFromSearchPaths(name, version_required, search_paths, update=False, type='module', inherit_shrinkwrap=None): ''' returns a Component/Target for the specified version, if found in the list of search paths. If `update' is True, then also check for newer versions of the found component, and update it in-place (unless it was installed via a symlink). ''' # Pack, , base class for targets and components, internal from yotta.lib import pack v = None try: sv = sourceparse.parseSourceURL(version_required) except ValueError as e: logging.error(e) return None try: local_version = searchPathsFor( name, sv.semanticSpec(), search_paths, type, inherit_shrinkwrap = inherit_shrinkwrap ) except pack.InvalidDescription as e: logger.error(e) return None logger.debug("%s %s locally" % (('found', 'not found')[not local_version], name)) if local_version: if update and not local_version.installedLinked(): #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required)) v = latestSuitableVersion(name, version_required, registry=_registryNamespaceForType(type)) if local_version: local_version.setLatestAvailable(v) # if we don't need to update, then we're done if local_version.installedLinked() or not local_version.outdated(): logger.debug("satisfy component from directory: %s" % local_version.path) # if a component exists (has a valid description file), and either is # not outdated, or we are not updating if name != local_version.getName(): raise Exception('Component %s found in incorrectly named directory %s (%s)' % ( local_version.getName(), name, local_version.path )) return local_version # otherwise, we need to update the installed component logger.info('update outdated: %s@%s -> %s' % ( name, local_version.getVersion(), v )) # must rm the old component before continuing fsutils.rmRf(local_version.path) return _satisfyVersionByInstallingVersion( name, version_required, local_version.path, v, type=type, inherit_shrinkwrap=inherit_shrinkwrap ) return None
def satisfyVersionFromSearchPaths(name, version_required, search_paths, update=False, type='module', inherit_shrinkwrap=None): ''' returns a Component/Target for the specified version, if found in the list of search paths. If `update' is True, then also check for newer versions of the found component, and update it in-place (unless it was installed via a symlink). ''' # Pack, , base class for targets and components, internal from yotta.lib import pack v = None try: sv = sourceparse.parseSourceURL(version_required) except ValueError as e: logging.error(e) return None try: local_version = searchPathsFor( name, sv.semanticSpec(), search_paths, type, inherit_shrinkwrap = inherit_shrinkwrap ) except pack.InvalidDescription as e: logger.error(e) return None logger.debug("%s %s locally" % (('found', 'not found')[not local_version], name)) if local_version: if update and not local_version.installedLinked(): #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required)) v = latestSuitableVersion(name, version_required, registry=_registryNamespaceForType(type)) if local_version: local_version.setLatestAvailable(v) # if we don't need to update, then we're done if local_version.installedLinked() or not local_version.outdated(): logger.debug("satisfy component from directory: %s" % local_version.path) # if a component exists (has a valid description file), and either is # not outdated, or we are not updating if name != local_version.getName(): raise Exception('Component %s found in incorrectly named directory %s (%s)' % ( local_version.getName(), name, local_version.path )) return local_version # otherwise, we need to update the installed component logger.info('update outdated: %s@%s -> %s' % ( name, local_version.getVersion(), v )) # must rm the old component before continuing fsutils.rmRf(local_version.path) return _satisfyVersionByInstallingVersion( name, version_required, local_version.path, v, type=type, inherit_shrinkwrap=inherit_shrinkwrap ) return None
[ "returns", "a", "Component", "/", "Target", "for", "the", "specified", "version", "if", "found", "in", "the", "list", "of", "search", "paths", ".", "If", "update", "is", "True", "then", "also", "check", "for", "newer", "versions", "of", "the", "found", "component", "and", "update", "it", "in", "-", "place", "(", "unless", "it", "was", "installed", "via", "a", "symlink", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L262-L320
[ "def", "satisfyVersionFromSearchPaths", "(", "name", ",", "version_required", ",", "search_paths", ",", "update", "=", "False", ",", "type", "=", "'module'", ",", "inherit_shrinkwrap", "=", "None", ")", ":", "# Pack, , base class for targets and components, internal", "from", "yotta", ".", "lib", "import", "pack", "v", "=", "None", "try", ":", "sv", "=", "sourceparse", ".", "parseSourceURL", "(", "version_required", ")", "except", "ValueError", "as", "e", ":", "logging", ".", "error", "(", "e", ")", "return", "None", "try", ":", "local_version", "=", "searchPathsFor", "(", "name", ",", "sv", ".", "semanticSpec", "(", ")", ",", "search_paths", ",", "type", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")", "except", "pack", ".", "InvalidDescription", "as", "e", ":", "logger", ".", "error", "(", "e", ")", "return", "None", "logger", ".", "debug", "(", "\"%s %s locally\"", "%", "(", "(", "'found'", ",", "'not found'", ")", "[", "not", "local_version", "]", ",", "name", ")", ")", "if", "local_version", ":", "if", "update", "and", "not", "local_version", ".", "installedLinked", "(", ")", ":", "#logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))", "v", "=", "latestSuitableVersion", "(", "name", ",", "version_required", ",", "registry", "=", "_registryNamespaceForType", "(", "type", ")", ")", "if", "local_version", ":", "local_version", ".", "setLatestAvailable", "(", "v", ")", "# if we don't need to update, then we're done", "if", "local_version", ".", "installedLinked", "(", ")", "or", "not", "local_version", ".", "outdated", "(", ")", ":", "logger", ".", "debug", "(", "\"satisfy component from directory: %s\"", "%", "local_version", ".", "path", ")", "# if a component exists (has a valid description file), and either is", "# not outdated, or we are not updating", "if", "name", "!=", "local_version", ".", "getName", "(", ")", ":", "raise", "Exception", "(", "'Component %s found in incorrectly named directory %s (%s)'", "%", "(", "local_version", ".", "getName", "(", ")", ",", "name", ",", "local_version", ".", "path", ")", ")", "return", "local_version", "# otherwise, we need to update the installed component", "logger", ".", "info", "(", "'update outdated: %s@%s -> %s'", "%", "(", "name", ",", "local_version", ".", "getVersion", "(", ")", ",", "v", ")", ")", "# must rm the old component before continuing", "fsutils", ".", "rmRf", "(", "local_version", ".", "path", ")", "return", "_satisfyVersionByInstallingVersion", "(", "name", ",", "version_required", ",", "local_version", ".", "path", ",", "v", ",", "type", "=", "type", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
satisfyVersionByInstalling
installs and returns a Component/Target for the specified name+version requirement, into a subdirectory of `working_directory'
yotta/lib/access.py
def satisfyVersionByInstalling(name, version_required, working_directory, type='module', inherit_shrinkwrap=None): ''' installs and returns a Component/Target for the specified name+version requirement, into a subdirectory of `working_directory' ''' v = latestSuitableVersion(name, version_required, _registryNamespaceForType(type)) install_into = os.path.join(working_directory, name) return _satisfyVersionByInstallingVersion( name, version_required, install_into, v, type=type, inherit_shrinkwrap = inherit_shrinkwrap )
def satisfyVersionByInstalling(name, version_required, working_directory, type='module', inherit_shrinkwrap=None): ''' installs and returns a Component/Target for the specified name+version requirement, into a subdirectory of `working_directory' ''' v = latestSuitableVersion(name, version_required, _registryNamespaceForType(type)) install_into = os.path.join(working_directory, name) return _satisfyVersionByInstallingVersion( name, version_required, install_into, v, type=type, inherit_shrinkwrap = inherit_shrinkwrap )
[ "installs", "and", "returns", "a", "Component", "/", "Target", "for", "the", "specified", "name", "+", "version", "requirement", "into", "a", "subdirectory", "of", "working_directory" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L322-L330
[ "def", "satisfyVersionByInstalling", "(", "name", ",", "version_required", ",", "working_directory", ",", "type", "=", "'module'", ",", "inherit_shrinkwrap", "=", "None", ")", ":", "v", "=", "latestSuitableVersion", "(", "name", ",", "version_required", ",", "_registryNamespaceForType", "(", "type", ")", ")", "install_into", "=", "os", ".", "path", ".", "join", "(", "working_directory", ",", "name", ")", "return", "_satisfyVersionByInstallingVersion", "(", "name", ",", "version_required", ",", "install_into", ",", "v", ",", "type", "=", "type", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_satisfyVersionByInstallingVersion
installs and returns a Component/Target for the specified version requirement into 'working_directory' using the provided remote version object. This function is not normally called via `satisfyVersionByInstalling', which looks up a suitable remote version object.
yotta/lib/access.py
def _satisfyVersionByInstallingVersion(name, version_required, working_directory, version, type='module', inherit_shrinkwrap=None): ''' installs and returns a Component/Target for the specified version requirement into 'working_directory' using the provided remote version object. This function is not normally called via `satisfyVersionByInstalling', which looks up a suitable remote version object. ''' assert(version) logger.info('download %s', version) version.unpackInto(working_directory) r = _clsForType(type)(working_directory, inherit_shrinkwrap = inherit_shrinkwrap) if not r: raise Exception( 'Dependency "%s":"%s" is not a valid %s.' % (name, version_required, type) ) if name != r.getName(): raise Exception('%s %s (specification %s) has incorrect name %s' % ( type, name, version_required, r.getName() )) # error code deliberately ignored here for now, it isn't clear what the # behaviour should be (abort? remove the unpacked state then abort? # continue?) r.runScript('postInstall') return r
def _satisfyVersionByInstallingVersion(name, version_required, working_directory, version, type='module', inherit_shrinkwrap=None): ''' installs and returns a Component/Target for the specified version requirement into 'working_directory' using the provided remote version object. This function is not normally called via `satisfyVersionByInstalling', which looks up a suitable remote version object. ''' assert(version) logger.info('download %s', version) version.unpackInto(working_directory) r = _clsForType(type)(working_directory, inherit_shrinkwrap = inherit_shrinkwrap) if not r: raise Exception( 'Dependency "%s":"%s" is not a valid %s.' % (name, version_required, type) ) if name != r.getName(): raise Exception('%s %s (specification %s) has incorrect name %s' % ( type, name, version_required, r.getName() )) # error code deliberately ignored here for now, it isn't clear what the # behaviour should be (abort? remove the unpacked state then abort? # continue?) r.runScript('postInstall') return r
[ "installs", "and", "returns", "a", "Component", "/", "Target", "for", "the", "specified", "version", "requirement", "into", "working_directory", "using", "the", "provided", "remote", "version", "object", ".", "This", "function", "is", "not", "normally", "called", "via", "satisfyVersionByInstalling", "which", "looks", "up", "a", "suitable", "remote", "version", "object", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L332-L354
[ "def", "_satisfyVersionByInstallingVersion", "(", "name", ",", "version_required", ",", "working_directory", ",", "version", ",", "type", "=", "'module'", ",", "inherit_shrinkwrap", "=", "None", ")", ":", "assert", "(", "version", ")", "logger", ".", "info", "(", "'download %s'", ",", "version", ")", "version", ".", "unpackInto", "(", "working_directory", ")", "r", "=", "_clsForType", "(", "type", ")", "(", "working_directory", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")", "if", "not", "r", ":", "raise", "Exception", "(", "'Dependency \"%s\":\"%s\" is not a valid %s.'", "%", "(", "name", ",", "version_required", ",", "type", ")", ")", "if", "name", "!=", "r", ".", "getName", "(", ")", ":", "raise", "Exception", "(", "'%s %s (specification %s) has incorrect name %s'", "%", "(", "type", ",", "name", ",", "version_required", ",", "r", ".", "getName", "(", ")", ")", ")", "# error code deliberately ignored here for now, it isn't clear what the", "# behaviour should be (abort? remove the unpacked state then abort?", "# continue?)", "r", ".", "runScript", "(", "'postInstall'", ")", "return", "r" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
satisfyVersion
returns a Component/Target for the specified version (either to an already installed copy (from the available list, or from disk), or to a newly downloaded one), or None if the version could not be satisfied. update_installed = None / 'Update' None: prevent any attempt to look for new versions if the component/target already exists Update: replace any existing version with the newest available, if the newest available has a higher version
yotta/lib/access.py
def satisfyVersion( name, version_required, available, search_paths, working_directory, update_installed=None, type='module', # or 'target' inherit_shrinkwrap=None ): ''' returns a Component/Target for the specified version (either to an already installed copy (from the available list, or from disk), or to a newly downloaded one), or None if the version could not be satisfied. update_installed = None / 'Update' None: prevent any attempt to look for new versions if the component/target already exists Update: replace any existing version with the newest available, if the newest available has a higher version ''' r = satisfyFromAvailable(name, available, type=type) if r is not None: if not sourceparse.parseSourceURL(version_required).semanticSpecMatches(r.getVersion()): raise access_common.SpecificationNotMet( "Installed %s %s doesn't match specification %s" % (type, name, version_required) ) return r r = satisfyVersionFromSearchPaths( name, version_required, search_paths, (update_installed == 'Update'), type = type, inherit_shrinkwrap = inherit_shrinkwrap ) if r is not None: return r return satisfyVersionByInstalling( name, version_required, working_directory, type=type, inherit_shrinkwrap = inherit_shrinkwrap )
def satisfyVersion( name, version_required, available, search_paths, working_directory, update_installed=None, type='module', # or 'target' inherit_shrinkwrap=None ): ''' returns a Component/Target for the specified version (either to an already installed copy (from the available list, or from disk), or to a newly downloaded one), or None if the version could not be satisfied. update_installed = None / 'Update' None: prevent any attempt to look for new versions if the component/target already exists Update: replace any existing version with the newest available, if the newest available has a higher version ''' r = satisfyFromAvailable(name, available, type=type) if r is not None: if not sourceparse.parseSourceURL(version_required).semanticSpecMatches(r.getVersion()): raise access_common.SpecificationNotMet( "Installed %s %s doesn't match specification %s" % (type, name, version_required) ) return r r = satisfyVersionFromSearchPaths( name, version_required, search_paths, (update_installed == 'Update'), type = type, inherit_shrinkwrap = inherit_shrinkwrap ) if r is not None: return r return satisfyVersionByInstalling( name, version_required, working_directory, type=type, inherit_shrinkwrap = inherit_shrinkwrap )
[ "returns", "a", "Component", "/", "Target", "for", "the", "specified", "version", "(", "either", "to", "an", "already", "installed", "copy", "(", "from", "the", "available", "list", "or", "from", "disk", ")", "or", "to", "a", "newly", "downloaded", "one", ")", "or", "None", "if", "the", "version", "could", "not", "be", "satisfied", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access.py#L356-L398
[ "def", "satisfyVersion", "(", "name", ",", "version_required", ",", "available", ",", "search_paths", ",", "working_directory", ",", "update_installed", "=", "None", ",", "type", "=", "'module'", ",", "# or 'target'", "inherit_shrinkwrap", "=", "None", ")", ":", "r", "=", "satisfyFromAvailable", "(", "name", ",", "available", ",", "type", "=", "type", ")", "if", "r", "is", "not", "None", ":", "if", "not", "sourceparse", ".", "parseSourceURL", "(", "version_required", ")", ".", "semanticSpecMatches", "(", "r", ".", "getVersion", "(", ")", ")", ":", "raise", "access_common", ".", "SpecificationNotMet", "(", "\"Installed %s %s doesn't match specification %s\"", "%", "(", "type", ",", "name", ",", "version_required", ")", ")", "return", "r", "r", "=", "satisfyVersionFromSearchPaths", "(", "name", ",", "version_required", ",", "search_paths", ",", "(", "update_installed", "==", "'Update'", ")", ",", "type", "=", "type", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")", "if", "r", "is", "not", "None", ":", "return", "r", "return", "satisfyVersionByInstalling", "(", "name", ",", "version_required", ",", "working_directory", ",", "type", "=", "type", ",", "inherit_shrinkwrap", "=", "inherit_shrinkwrap", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
sourceDirValidationError
validate source directory names in components
yotta/lib/validate.py
def sourceDirValidationError(dirname, component_name): ''' validate source directory names in components ''' if dirname == component_name: return 'Module %s public include directory %s should not contain source files' % (component_name, dirname) elif dirname.lower() in ('source', 'src') and dirname != 'source': return 'Module %s has non-standard source directory name: "%s" should be "source"' % (component_name, dirname) elif isPotentialTestDir(dirname) and dirname != 'test': return 'Module %s has non-standard test directory name: "%s" should be "test"' % (component_name, dirname) elif not Source_Dir_Regex.match(dirname): corrected = Source_Dir_Invalid_Regex.sub('', dirname.lower()) if not corrected: corrected = 'source' return 'Module %s has non-standard source directory name: "%s" should be "%s"' % (component_name, dirname, corrected) else: return None
def sourceDirValidationError(dirname, component_name): ''' validate source directory names in components ''' if dirname == component_name: return 'Module %s public include directory %s should not contain source files' % (component_name, dirname) elif dirname.lower() in ('source', 'src') and dirname != 'source': return 'Module %s has non-standard source directory name: "%s" should be "source"' % (component_name, dirname) elif isPotentialTestDir(dirname) and dirname != 'test': return 'Module %s has non-standard test directory name: "%s" should be "test"' % (component_name, dirname) elif not Source_Dir_Regex.match(dirname): corrected = Source_Dir_Invalid_Regex.sub('', dirname.lower()) if not corrected: corrected = 'source' return 'Module %s has non-standard source directory name: "%s" should be "%s"' % (component_name, dirname, corrected) else: return None
[ "validate", "source", "directory", "names", "in", "components" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/validate.py#L28-L42
[ "def", "sourceDirValidationError", "(", "dirname", ",", "component_name", ")", ":", "if", "dirname", "==", "component_name", ":", "return", "'Module %s public include directory %s should not contain source files'", "%", "(", "component_name", ",", "dirname", ")", "elif", "dirname", ".", "lower", "(", ")", "in", "(", "'source'", ",", "'src'", ")", "and", "dirname", "!=", "'source'", ":", "return", "'Module %s has non-standard source directory name: \"%s\" should be \"source\"'", "%", "(", "component_name", ",", "dirname", ")", "elif", "isPotentialTestDir", "(", "dirname", ")", "and", "dirname", "!=", "'test'", ":", "return", "'Module %s has non-standard test directory name: \"%s\" should be \"test\"'", "%", "(", "component_name", ",", "dirname", ")", "elif", "not", "Source_Dir_Regex", ".", "match", "(", "dirname", ")", ":", "corrected", "=", "Source_Dir_Invalid_Regex", ".", "sub", "(", "''", ",", "dirname", ".", "lower", "(", ")", ")", "if", "not", "corrected", ":", "corrected", "=", "'source'", "return", "'Module %s has non-standard source directory name: \"%s\" should be \"%s\"'", "%", "(", "component_name", ",", "dirname", ",", "corrected", ")", "else", ":", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
displayOutdated
print information about outdated modules, return 0 if there is nothing to be done and nonzero otherwise
yotta/outdated.py
def displayOutdated(modules, dependency_specs, use_colours): ''' print information about outdated modules, return 0 if there is nothing to be done and nonzero otherwise ''' if use_colours: DIM = colorama.Style.DIM #pylint: disable=no-member NORMAL = colorama.Style.NORMAL #pylint: disable=no-member BRIGHT = colorama.Style.BRIGHT #pylint: disable=no-member YELLOW = colorama.Fore.YELLOW #pylint: disable=no-member RED = colorama.Fore.RED #pylint: disable=no-member GREEN = colorama.Fore.GREEN #pylint: disable=no-member RESET = colorama.Style.RESET_ALL #pylint: disable=no-member else: DIM = BRIGHT = YELLOW = RED = GREEN = RESET = u'' status = 0 # access, , get components, internal from yotta.lib import access from yotta.lib import access_common # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse for name, m in modules.items(): if m.isTestDependency(): continue try: latest_v = access.latestSuitableVersion(name, '*', registry='modules', quiet=True) except access_common.Unavailable as e: latest_v = None if not m: m_version = u' ' + RESET + BRIGHT + RED + u"missing" + RESET else: m_version = DIM + u'@%s' % (m.version) if not latest_v: print(u'%s%s%s%s not available from the registry%s' % (RED, name, m_version, NORMAL, RESET)) status = 2 continue elif not m or m.version < latest_v: update_prevented_by = '' if m: specs_preventing_update = [ x for x in dependency_specs if x.name == name and not sourceparse.parseSourceURL(x.nonShrinkwrappedVersionReq()).semanticSpecMatches(latest_v) ] shrinkwrap_prevents_update = [ x for x in dependency_specs if x.name == name and x.isShrinkwrapped() and not sourceparse.parseSourceURL(x.versionReq()).semanticSpecMatches(latest_v) ] if len(specs_preventing_update): update_prevented_by = ' (update prevented by specifications: %s)' % ( ', '.join(['%s from %s' % (x.version_req, x.specifying_module) for x in specs_preventing_update]) ) if len(shrinkwrap_prevents_update): update_prevented_by += ' yotta-shrinkwrap.json prevents update' if m.version.major() < latest_v.major(): # major versions being outdated might be deliberate, so not # that bad: colour = GREEN elif m.version.minor() < latest_v.minor(): # minor outdated versions is moderately bad colour = YELLOW else: # patch-outdated versions is really bad, because there should # be no reason not to update: colour = RED else: colour = RED print(u'%s%s%s latest: %s%s%s%s' % (name, m_version, RESET, colour, latest_v.version, update_prevented_by, RESET)) if not status: status = 1 return status
def displayOutdated(modules, dependency_specs, use_colours): ''' print information about outdated modules, return 0 if there is nothing to be done and nonzero otherwise ''' if use_colours: DIM = colorama.Style.DIM #pylint: disable=no-member NORMAL = colorama.Style.NORMAL #pylint: disable=no-member BRIGHT = colorama.Style.BRIGHT #pylint: disable=no-member YELLOW = colorama.Fore.YELLOW #pylint: disable=no-member RED = colorama.Fore.RED #pylint: disable=no-member GREEN = colorama.Fore.GREEN #pylint: disable=no-member RESET = colorama.Style.RESET_ALL #pylint: disable=no-member else: DIM = BRIGHT = YELLOW = RED = GREEN = RESET = u'' status = 0 # access, , get components, internal from yotta.lib import access from yotta.lib import access_common # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse for name, m in modules.items(): if m.isTestDependency(): continue try: latest_v = access.latestSuitableVersion(name, '*', registry='modules', quiet=True) except access_common.Unavailable as e: latest_v = None if not m: m_version = u' ' + RESET + BRIGHT + RED + u"missing" + RESET else: m_version = DIM + u'@%s' % (m.version) if not latest_v: print(u'%s%s%s%s not available from the registry%s' % (RED, name, m_version, NORMAL, RESET)) status = 2 continue elif not m or m.version < latest_v: update_prevented_by = '' if m: specs_preventing_update = [ x for x in dependency_specs if x.name == name and not sourceparse.parseSourceURL(x.nonShrinkwrappedVersionReq()).semanticSpecMatches(latest_v) ] shrinkwrap_prevents_update = [ x for x in dependency_specs if x.name == name and x.isShrinkwrapped() and not sourceparse.parseSourceURL(x.versionReq()).semanticSpecMatches(latest_v) ] if len(specs_preventing_update): update_prevented_by = ' (update prevented by specifications: %s)' % ( ', '.join(['%s from %s' % (x.version_req, x.specifying_module) for x in specs_preventing_update]) ) if len(shrinkwrap_prevents_update): update_prevented_by += ' yotta-shrinkwrap.json prevents update' if m.version.major() < latest_v.major(): # major versions being outdated might be deliberate, so not # that bad: colour = GREEN elif m.version.minor() < latest_v.minor(): # minor outdated versions is moderately bad colour = YELLOW else: # patch-outdated versions is really bad, because there should # be no reason not to update: colour = RED else: colour = RED print(u'%s%s%s latest: %s%s%s%s' % (name, m_version, RESET, colour, latest_v.version, update_prevented_by, RESET)) if not status: status = 1 return status
[ "print", "information", "about", "outdated", "modules", "return", "0", "if", "there", "is", "nothing", "to", "be", "done", "and", "nonzero", "otherwise" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/outdated.py#L40-L114
[ "def", "displayOutdated", "(", "modules", ",", "dependency_specs", ",", "use_colours", ")", ":", "if", "use_colours", ":", "DIM", "=", "colorama", ".", "Style", ".", "DIM", "#pylint: disable=no-member", "NORMAL", "=", "colorama", ".", "Style", ".", "NORMAL", "#pylint: disable=no-member", "BRIGHT", "=", "colorama", ".", "Style", ".", "BRIGHT", "#pylint: disable=no-member", "YELLOW", "=", "colorama", ".", "Fore", ".", "YELLOW", "#pylint: disable=no-member", "RED", "=", "colorama", ".", "Fore", ".", "RED", "#pylint: disable=no-member", "GREEN", "=", "colorama", ".", "Fore", ".", "GREEN", "#pylint: disable=no-member", "RESET", "=", "colorama", ".", "Style", ".", "RESET_ALL", "#pylint: disable=no-member", "else", ":", "DIM", "=", "BRIGHT", "=", "YELLOW", "=", "RED", "=", "GREEN", "=", "RESET", "=", "u''", "status", "=", "0", "# access, , get components, internal", "from", "yotta", ".", "lib", "import", "access", "from", "yotta", ".", "lib", "import", "access_common", "# sourceparse, , parse version source urls, internal", "from", "yotta", ".", "lib", "import", "sourceparse", "for", "name", ",", "m", "in", "modules", ".", "items", "(", ")", ":", "if", "m", ".", "isTestDependency", "(", ")", ":", "continue", "try", ":", "latest_v", "=", "access", ".", "latestSuitableVersion", "(", "name", ",", "'*'", ",", "registry", "=", "'modules'", ",", "quiet", "=", "True", ")", "except", "access_common", ".", "Unavailable", "as", "e", ":", "latest_v", "=", "None", "if", "not", "m", ":", "m_version", "=", "u' '", "+", "RESET", "+", "BRIGHT", "+", "RED", "+", "u\"missing\"", "+", "RESET", "else", ":", "m_version", "=", "DIM", "+", "u'@%s'", "%", "(", "m", ".", "version", ")", "if", "not", "latest_v", ":", "print", "(", "u'%s%s%s%s not available from the registry%s'", "%", "(", "RED", ",", "name", ",", "m_version", ",", "NORMAL", ",", "RESET", ")", ")", "status", "=", "2", "continue", "elif", "not", "m", "or", "m", ".", "version", "<", "latest_v", ":", "update_prevented_by", "=", "''", "if", "m", ":", "specs_preventing_update", "=", "[", "x", "for", "x", "in", "dependency_specs", "if", "x", ".", "name", "==", "name", "and", "not", "sourceparse", ".", "parseSourceURL", "(", "x", ".", "nonShrinkwrappedVersionReq", "(", ")", ")", ".", "semanticSpecMatches", "(", "latest_v", ")", "]", "shrinkwrap_prevents_update", "=", "[", "x", "for", "x", "in", "dependency_specs", "if", "x", ".", "name", "==", "name", "and", "x", ".", "isShrinkwrapped", "(", ")", "and", "not", "sourceparse", ".", "parseSourceURL", "(", "x", ".", "versionReq", "(", ")", ")", ".", "semanticSpecMatches", "(", "latest_v", ")", "]", "if", "len", "(", "specs_preventing_update", ")", ":", "update_prevented_by", "=", "' (update prevented by specifications: %s)'", "%", "(", "', '", ".", "join", "(", "[", "'%s from %s'", "%", "(", "x", ".", "version_req", ",", "x", ".", "specifying_module", ")", "for", "x", "in", "specs_preventing_update", "]", ")", ")", "if", "len", "(", "shrinkwrap_prevents_update", ")", ":", "update_prevented_by", "+=", "' yotta-shrinkwrap.json prevents update'", "if", "m", ".", "version", ".", "major", "(", ")", "<", "latest_v", ".", "major", "(", ")", ":", "# major versions being outdated might be deliberate, so not", "# that bad:", "colour", "=", "GREEN", "elif", "m", ".", "version", ".", "minor", "(", ")", "<", "latest_v", ".", "minor", "(", ")", ":", "# minor outdated versions is moderately bad", "colour", "=", "YELLOW", "else", ":", "# patch-outdated versions is really bad, because there should", "# be no reason not to update:", "colour", "=", "RED", "else", ":", "colour", "=", "RED", "print", "(", "u'%s%s%s latest: %s%s%s%s'", "%", "(", "name", ",", "m_version", ",", "RESET", ",", "colour", ",", "latest_v", ".", "version", ",", "update_prevented_by", ",", "RESET", ")", ")", "if", "not", "status", ":", "status", "=", "1", "return", "status" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.origin
Read the .yotta_origin.json file (if present), and return the value of the 'url' property
yotta/lib/pack.py
def origin(self): ''' Read the .yotta_origin.json file (if present), and return the value of the 'url' property ''' if self.origin_info is None: self.origin_info = {} try: self.origin_info = ordered_json.load(os.path.join(self.path, Origin_Info_Fname)) except IOError: pass return self.origin_info.get('url', None)
def origin(self): ''' Read the .yotta_origin.json file (if present), and return the value of the 'url' property ''' if self.origin_info is None: self.origin_info = {} try: self.origin_info = ordered_json.load(os.path.join(self.path, Origin_Info_Fname)) except IOError: pass return self.origin_info.get('url', None)
[ "Read", "the", ".", "yotta_origin", ".", "json", "file", "(", "if", "present", ")", "and", "return", "the", "value", "of", "the", "url", "property" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L282-L291
[ "def", "origin", "(", "self", ")", ":", "if", "self", ".", "origin_info", "is", "None", ":", "self", ".", "origin_info", "=", "{", "}", "try", ":", "self", ".", "origin_info", "=", "ordered_json", ".", "load", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "Origin_Info_Fname", ")", ")", "except", "IOError", ":", "pass", "return", "self", ".", "origin_info", ".", "get", "(", "'url'", ",", "None", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.outdated
Return a truthy object if a newer suitable version is available, otherwise return None. (in fact the object returned is a ComponentVersion that can be used to get the newer version)
yotta/lib/pack.py
def outdated(self): ''' Return a truthy object if a newer suitable version is available, otherwise return None. (in fact the object returned is a ComponentVersion that can be used to get the newer version) ''' if self.latest_suitable_version and self.latest_suitable_version > self.version: return self.latest_suitable_version else: return None
def outdated(self): ''' Return a truthy object if a newer suitable version is available, otherwise return None. (in fact the object returned is a ComponentVersion that can be used to get the newer version) ''' if self.latest_suitable_version and self.latest_suitable_version > self.version: return self.latest_suitable_version else: return None
[ "Return", "a", "truthy", "object", "if", "a", "newer", "suitable", "version", "is", "available", "otherwise", "return", "None", ".", "(", "in", "fact", "the", "object", "returned", "is", "a", "ComponentVersion", "that", "can", "be", "used", "to", "get", "the", "newer", "version", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L319-L328
[ "def", "outdated", "(", "self", ")", ":", "if", "self", ".", "latest_suitable_version", "and", "self", ".", "latest_suitable_version", ">", "self", ".", "version", ":", "return", "self", ".", "latest_suitable_version", "else", ":", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.commitVCS
Commit the current working directory state (or do nothing if the working directory is not version controlled)
yotta/lib/pack.py
def commitVCS(self, tag=None): ''' Commit the current working directory state (or do nothing if the working directory is not version controlled) ''' if not self.vcs: return self.vcs.commit(message='version %s' % tag, tag=tag)
def commitVCS(self, tag=None): ''' Commit the current working directory state (or do nothing if the working directory is not version controlled) ''' if not self.vcs: return self.vcs.commit(message='version %s' % tag, tag=tag)
[ "Commit", "the", "current", "working", "directory", "state", "(", "or", "do", "nothing", "if", "the", "working", "directory", "is", "not", "version", "controlled", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L338-L344
[ "def", "commitVCS", "(", "self", ",", "tag", "=", "None", ")", ":", "if", "not", "self", ".", "vcs", ":", "return", "self", ".", "vcs", ".", "commit", "(", "message", "=", "'version %s'", "%", "tag", ",", "tag", "=", "tag", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.ignores
Test if this module ignores the file at "path", which must be a path relative to the root of the module. If a file is within a directory that is ignored, the file is also ignored.
yotta/lib/pack.py
def ignores(self, path): ''' Test if this module ignores the file at "path", which must be a path relative to the root of the module. If a file is within a directory that is ignored, the file is also ignored. ''' test_path = PurePath('/', path) # also check any parent directories of this path against the ignore # patterns: test_paths = tuple([test_path] + list(test_path.parents)) for exp in self.ignore_patterns: for tp in test_paths: if tp.match(exp): logger.debug('"%s" ignored ("%s" matched "%s")', path, tp, exp) return True return False
def ignores(self, path): ''' Test if this module ignores the file at "path", which must be a path relative to the root of the module. If a file is within a directory that is ignored, the file is also ignored. ''' test_path = PurePath('/', path) # also check any parent directories of this path against the ignore # patterns: test_paths = tuple([test_path] + list(test_path.parents)) for exp in self.ignore_patterns: for tp in test_paths: if tp.match(exp): logger.debug('"%s" ignored ("%s" matched "%s")', path, tp, exp) return True return False
[ "Test", "if", "this", "module", "ignores", "the", "file", "at", "path", "which", "must", "be", "a", "path", "relative", "to", "the", "root", "of", "the", "module", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L378-L396
[ "def", "ignores", "(", "self", ",", "path", ")", ":", "test_path", "=", "PurePath", "(", "'/'", ",", "path", ")", "# also check any parent directories of this path against the ignore", "# patterns:", "test_paths", "=", "tuple", "(", "[", "test_path", "]", "+", "list", "(", "test_path", ".", "parents", ")", ")", "for", "exp", "in", "self", ".", "ignore_patterns", ":", "for", "tp", "in", "test_paths", ":", "if", "tp", ".", "match", "(", "exp", ")", ":", "logger", ".", "debug", "(", "'\"%s\" ignored (\"%s\" matched \"%s\")'", ",", "path", ",", "tp", ",", "exp", ")", "return", "True", "return", "False" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.writeDescription
Write the current (possibly modified) component description to a package description file in the component directory.
yotta/lib/pack.py
def writeDescription(self): ''' Write the current (possibly modified) component description to a package description file in the component directory. ''' ordered_json.dump(os.path.join(self.path, self.description_filename), self.description) if self.vcs: self.vcs.markForCommit(self.description_filename)
def writeDescription(self): ''' Write the current (possibly modified) component description to a package description file in the component directory. ''' ordered_json.dump(os.path.join(self.path, self.description_filename), self.description) if self.vcs: self.vcs.markForCommit(self.description_filename)
[ "Write", "the", "current", "(", "possibly", "modified", ")", "component", "description", "to", "a", "package", "description", "file", "in", "the", "component", "directory", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L405-L411
[ "def", "writeDescription", "(", "self", ")", ":", "ordered_json", ".", "dump", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "self", ".", "description_filename", ")", ",", "self", ".", "description", ")", "if", "self", ".", "vcs", ":", "self", ".", "vcs", ".", "markForCommit", "(", "self", ".", "description_filename", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.generateTarball
Write a tarball of the current component/target to the file object "file_object", which must already be open for writing at position 0
yotta/lib/pack.py
def generateTarball(self, file_object): ''' Write a tarball of the current component/target to the file object "file_object", which must already be open for writing at position 0 ''' archive_name = '%s-%s' % (self.getName(), self.getVersion()) def filterArchive(tarinfo): if tarinfo.name.find(archive_name) == 0 : unprefixed_name = tarinfo.name[len(archive_name)+1:] tarinfo.mode &= 0o775 else: unprefixed_name = tarinfo.name if self.ignores(unprefixed_name): return None else: return tarinfo with tarfile.open(fileobj=file_object, mode='w:gz') as tf: logger.info('generate archive extracting to "%s"' % archive_name) tf.add(self.path, arcname=archive_name, filter=filterArchive)
def generateTarball(self, file_object): ''' Write a tarball of the current component/target to the file object "file_object", which must already be open for writing at position 0 ''' archive_name = '%s-%s' % (self.getName(), self.getVersion()) def filterArchive(tarinfo): if tarinfo.name.find(archive_name) == 0 : unprefixed_name = tarinfo.name[len(archive_name)+1:] tarinfo.mode &= 0o775 else: unprefixed_name = tarinfo.name if self.ignores(unprefixed_name): return None else: return tarinfo with tarfile.open(fileobj=file_object, mode='w:gz') as tf: logger.info('generate archive extracting to "%s"' % archive_name) tf.add(self.path, arcname=archive_name, filter=filterArchive)
[ "Write", "a", "tarball", "of", "the", "current", "component", "/", "target", "to", "the", "file", "object", "file_object", "which", "must", "already", "be", "open", "for", "writing", "at", "position", "0" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L413-L430
[ "def", "generateTarball", "(", "self", ",", "file_object", ")", ":", "archive_name", "=", "'%s-%s'", "%", "(", "self", ".", "getName", "(", ")", ",", "self", ".", "getVersion", "(", ")", ")", "def", "filterArchive", "(", "tarinfo", ")", ":", "if", "tarinfo", ".", "name", ".", "find", "(", "archive_name", ")", "==", "0", ":", "unprefixed_name", "=", "tarinfo", ".", "name", "[", "len", "(", "archive_name", ")", "+", "1", ":", "]", "tarinfo", ".", "mode", "&=", "0o775", "else", ":", "unprefixed_name", "=", "tarinfo", ".", "name", "if", "self", ".", "ignores", "(", "unprefixed_name", ")", ":", "return", "None", "else", ":", "return", "tarinfo", "with", "tarfile", ".", "open", "(", "fileobj", "=", "file_object", ",", "mode", "=", "'w:gz'", ")", "as", "tf", ":", "logger", ".", "info", "(", "'generate archive extracting to \"%s\"'", "%", "archive_name", ")", "tf", ".", "add", "(", "self", ".", "path", ",", "arcname", "=", "archive_name", ",", "filter", "=", "filterArchive", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.publish
Publish to the appropriate registry, return a description of any errors that occured, or None if successful. No VCS tagging is performed.
yotta/lib/pack.py
def publish(self, registry=None): ''' Publish to the appropriate registry, return a description of any errors that occured, or None if successful. No VCS tagging is performed. ''' if (registry is None) or (registry == registry_access.Registry_Base_URL): if 'private' in self.description and self.description['private']: return "this %s is private and cannot be published" % (self.description_filename.split('.')[0]) upload_archive = os.path.join(self.path, 'upload.tar.gz') fsutils.rmF(upload_archive) fd = os.open(upload_archive, os.O_CREAT | os.O_EXCL | os.O_RDWR | getattr(os, "O_BINARY", 0)) with os.fdopen(fd, 'rb+') as tar_file: tar_file.truncate() self.generateTarball(tar_file) logger.debug('generated tar file of length %s', tar_file.tell()) tar_file.seek(0) # calculate the hash of the file before we upload it: shasum = hashlib.sha256() while True: chunk = tar_file.read(1000) if not chunk: break shasum.update(chunk) logger.debug('generated tar file has hash %s', shasum.hexdigest()) tar_file.seek(0) with self.findAndOpenReadme() as readme_file_wrapper: if not readme_file_wrapper: logger.warning("no readme.md file detected") with open(self.getDescriptionFile(), 'r') as description_file: return registry_access.publish( self.getRegistryNamespace(), self.getName(), self.getVersion(), description_file, tar_file, readme_file_wrapper.file, readme_file_wrapper.extension().lower(), registry=registry )
def publish(self, registry=None): ''' Publish to the appropriate registry, return a description of any errors that occured, or None if successful. No VCS tagging is performed. ''' if (registry is None) or (registry == registry_access.Registry_Base_URL): if 'private' in self.description and self.description['private']: return "this %s is private and cannot be published" % (self.description_filename.split('.')[0]) upload_archive = os.path.join(self.path, 'upload.tar.gz') fsutils.rmF(upload_archive) fd = os.open(upload_archive, os.O_CREAT | os.O_EXCL | os.O_RDWR | getattr(os, "O_BINARY", 0)) with os.fdopen(fd, 'rb+') as tar_file: tar_file.truncate() self.generateTarball(tar_file) logger.debug('generated tar file of length %s', tar_file.tell()) tar_file.seek(0) # calculate the hash of the file before we upload it: shasum = hashlib.sha256() while True: chunk = tar_file.read(1000) if not chunk: break shasum.update(chunk) logger.debug('generated tar file has hash %s', shasum.hexdigest()) tar_file.seek(0) with self.findAndOpenReadme() as readme_file_wrapper: if not readme_file_wrapper: logger.warning("no readme.md file detected") with open(self.getDescriptionFile(), 'r') as description_file: return registry_access.publish( self.getRegistryNamespace(), self.getName(), self.getVersion(), description_file, tar_file, readme_file_wrapper.file, readme_file_wrapper.extension().lower(), registry=registry )
[ "Publish", "to", "the", "appropriate", "registry", "return", "a", "description", "of", "any", "errors", "that", "occured", "or", "None", "if", "successful", ".", "No", "VCS", "tagging", "is", "performed", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L448-L486
[ "def", "publish", "(", "self", ",", "registry", "=", "None", ")", ":", "if", "(", "registry", "is", "None", ")", "or", "(", "registry", "==", "registry_access", ".", "Registry_Base_URL", ")", ":", "if", "'private'", "in", "self", ".", "description", "and", "self", ".", "description", "[", "'private'", "]", ":", "return", "\"this %s is private and cannot be published\"", "%", "(", "self", ".", "description_filename", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "upload_archive", "=", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "'upload.tar.gz'", ")", "fsutils", ".", "rmF", "(", "upload_archive", ")", "fd", "=", "os", ".", "open", "(", "upload_archive", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_EXCL", "|", "os", ".", "O_RDWR", "|", "getattr", "(", "os", ",", "\"O_BINARY\"", ",", "0", ")", ")", "with", "os", ".", "fdopen", "(", "fd", ",", "'rb+'", ")", "as", "tar_file", ":", "tar_file", ".", "truncate", "(", ")", "self", ".", "generateTarball", "(", "tar_file", ")", "logger", ".", "debug", "(", "'generated tar file of length %s'", ",", "tar_file", ".", "tell", "(", ")", ")", "tar_file", ".", "seek", "(", "0", ")", "# calculate the hash of the file before we upload it:", "shasum", "=", "hashlib", ".", "sha256", "(", ")", "while", "True", ":", "chunk", "=", "tar_file", ".", "read", "(", "1000", ")", "if", "not", "chunk", ":", "break", "shasum", ".", "update", "(", "chunk", ")", "logger", ".", "debug", "(", "'generated tar file has hash %s'", ",", "shasum", ".", "hexdigest", "(", ")", ")", "tar_file", ".", "seek", "(", "0", ")", "with", "self", ".", "findAndOpenReadme", "(", ")", "as", "readme_file_wrapper", ":", "if", "not", "readme_file_wrapper", ":", "logger", ".", "warning", "(", "\"no readme.md file detected\"", ")", "with", "open", "(", "self", ".", "getDescriptionFile", "(", ")", ",", "'r'", ")", "as", "description_file", ":", "return", "registry_access", ".", "publish", "(", "self", ".", "getRegistryNamespace", "(", ")", ",", "self", ".", "getName", "(", ")", ",", "self", ".", "getVersion", "(", ")", ",", "description_file", ",", "tar_file", ",", "readme_file_wrapper", ".", "file", ",", "readme_file_wrapper", ".", "extension", "(", ")", ".", "lower", "(", ")", ",", "registry", "=", "registry", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.unpublish
Try to un-publish the current version. Return a description of any errors that occured, or None if successful.
yotta/lib/pack.py
def unpublish(self, registry=None): ''' Try to un-publish the current version. Return a description of any errors that occured, or None if successful. ''' return registry_access.unpublish( self.getRegistryNamespace(), self.getName(), self.getVersion(), registry=registry )
def unpublish(self, registry=None): ''' Try to un-publish the current version. Return a description of any errors that occured, or None if successful. ''' return registry_access.unpublish( self.getRegistryNamespace(), self.getName(), self.getVersion(), registry=registry )
[ "Try", "to", "un", "-", "publish", "the", "current", "version", ".", "Return", "a", "description", "of", "any", "errors", "that", "occured", "or", "None", "if", "successful", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L488-L497
[ "def", "unpublish", "(", "self", ",", "registry", "=", "None", ")", ":", "return", "registry_access", ".", "unpublish", "(", "self", ".", "getRegistryNamespace", "(", ")", ",", "self", ".", "getName", "(", ")", ",", "self", ".", "getVersion", "(", ")", ",", "registry", "=", "registry", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.getScript
Return the specified script command. If the first part of the command is a .py file, then the current python interpreter is prepended. If the script is a single string, rather than an array, it is shlex-split.
yotta/lib/pack.py
def getScript(self, scriptname): ''' Return the specified script command. If the first part of the command is a .py file, then the current python interpreter is prepended. If the script is a single string, rather than an array, it is shlex-split. ''' script = self.description.get('scripts', {}).get(scriptname, None) if script is not None: if isinstance(script, str) or isinstance(script, type(u'unicode string')): import shlex script = shlex.split(script) # if the command is a python script, run it with the python # interpreter being used to run yotta, also fetch the absolute path # to the script relative to this module (so that the script can be # distributed with the module, no matter what current working # directory it will be executed in): if len(script) and script[0].lower().endswith('.py'): if not os.path.isabs(script[0]): absscript = os.path.abspath(os.path.join(self.path, script[0])) logger.debug('rewriting script %s to be absolute path %s', script[0], absscript) script[0] = absscript import sys script = [sys.executable] + script return script
def getScript(self, scriptname): ''' Return the specified script command. If the first part of the command is a .py file, then the current python interpreter is prepended. If the script is a single string, rather than an array, it is shlex-split. ''' script = self.description.get('scripts', {}).get(scriptname, None) if script is not None: if isinstance(script, str) or isinstance(script, type(u'unicode string')): import shlex script = shlex.split(script) # if the command is a python script, run it with the python # interpreter being used to run yotta, also fetch the absolute path # to the script relative to this module (so that the script can be # distributed with the module, no matter what current working # directory it will be executed in): if len(script) and script[0].lower().endswith('.py'): if not os.path.isabs(script[0]): absscript = os.path.abspath(os.path.join(self.path, script[0])) logger.debug('rewriting script %s to be absolute path %s', script[0], absscript) script[0] = absscript import sys script = [sys.executable] + script return script
[ "Return", "the", "specified", "script", "command", ".", "If", "the", "first", "part", "of", "the", "command", "is", "a", ".", "py", "file", "then", "the", "current", "python", "interpreter", "is", "prepended", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L499-L525
[ "def", "getScript", "(", "self", ",", "scriptname", ")", ":", "script", "=", "self", ".", "description", ".", "get", "(", "'scripts'", ",", "{", "}", ")", ".", "get", "(", "scriptname", ",", "None", ")", "if", "script", "is", "not", "None", ":", "if", "isinstance", "(", "script", ",", "str", ")", "or", "isinstance", "(", "script", ",", "type", "(", "u'unicode string'", ")", ")", ":", "import", "shlex", "script", "=", "shlex", ".", "split", "(", "script", ")", "# if the command is a python script, run it with the python", "# interpreter being used to run yotta, also fetch the absolute path", "# to the script relative to this module (so that the script can be", "# distributed with the module, no matter what current working", "# directory it will be executed in):", "if", "len", "(", "script", ")", "and", "script", "[", "0", "]", ".", "lower", "(", ")", ".", "endswith", "(", "'.py'", ")", ":", "if", "not", "os", ".", "path", ".", "isabs", "(", "script", "[", "0", "]", ")", ":", "absscript", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "script", "[", "0", "]", ")", ")", "logger", ".", "debug", "(", "'rewriting script %s to be absolute path %s'", ",", "script", "[", "0", "]", ",", "absscript", ")", "script", "[", "0", "]", "=", "absscript", "import", "sys", "script", "=", "[", "sys", ".", "executable", "]", "+", "script", "return", "script" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Pack.runScript
Run the specified script from the scripts section of the module.json file in the directory of this module.
yotta/lib/pack.py
def runScript(self, scriptname, additional_environment=None): ''' Run the specified script from the scripts section of the module.json file in the directory of this module. ''' import subprocess import shlex command = self.getScript(scriptname) if command is None: logger.debug('%s has no script %s', self, scriptname) return 0 if not len(command): logger.error("script %s of %s is empty", scriptname, self.getName()) return 1 # define additional environment variables for scripts: env = os.environ.copy() if additional_environment is not None: env.update(additional_environment) errcode = 0 child = None try: logger.debug('running script: %s', command) child = subprocess.Popen( command, cwd = self.path, env = env ) child.wait() if child.returncode: logger.error( "script %s (from %s) exited with non-zero status %s", scriptname, self.getName(), child.returncode ) errcode = child.returncode child = None finally: if child is not None: tryTerminate(child) return errcode
def runScript(self, scriptname, additional_environment=None): ''' Run the specified script from the scripts section of the module.json file in the directory of this module. ''' import subprocess import shlex command = self.getScript(scriptname) if command is None: logger.debug('%s has no script %s', self, scriptname) return 0 if not len(command): logger.error("script %s of %s is empty", scriptname, self.getName()) return 1 # define additional environment variables for scripts: env = os.environ.copy() if additional_environment is not None: env.update(additional_environment) errcode = 0 child = None try: logger.debug('running script: %s', command) child = subprocess.Popen( command, cwd = self.path, env = env ) child.wait() if child.returncode: logger.error( "script %s (from %s) exited with non-zero status %s", scriptname, self.getName(), child.returncode ) errcode = child.returncode child = None finally: if child is not None: tryTerminate(child) return errcode
[ "Run", "the", "specified", "script", "from", "the", "scripts", "section", "of", "the", "module", ".", "json", "file", "in", "the", "directory", "of", "this", "module", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/pack.py#L529-L570
[ "def", "runScript", "(", "self", ",", "scriptname", ",", "additional_environment", "=", "None", ")", ":", "import", "subprocess", "import", "shlex", "command", "=", "self", ".", "getScript", "(", "scriptname", ")", "if", "command", "is", "None", ":", "logger", ".", "debug", "(", "'%s has no script %s'", ",", "self", ",", "scriptname", ")", "return", "0", "if", "not", "len", "(", "command", ")", ":", "logger", ".", "error", "(", "\"script %s of %s is empty\"", ",", "scriptname", ",", "self", ".", "getName", "(", ")", ")", "return", "1", "# define additional environment variables for scripts:", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "additional_environment", "is", "not", "None", ":", "env", ".", "update", "(", "additional_environment", ")", "errcode", "=", "0", "child", "=", "None", "try", ":", "logger", ".", "debug", "(", "'running script: %s'", ",", "command", ")", "child", "=", "subprocess", ".", "Popen", "(", "command", ",", "cwd", "=", "self", ".", "path", ",", "env", "=", "env", ")", "child", ".", "wait", "(", ")", "if", "child", ".", "returncode", ":", "logger", ".", "error", "(", "\"script %s (from %s) exited with non-zero status %s\"", ",", "scriptname", ",", "self", ".", "getName", "(", ")", ",", "child", ".", "returncode", ")", "errcode", "=", "child", ".", "returncode", "child", "=", "None", "finally", ":", "if", "child", "is", "not", "None", ":", "tryTerminate", "(", "child", ")", "return", "errcode" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_truthyConfValue
Determine yotta-config truthiness. In yotta config land truthiness is different to python or json truthiness (in order to map nicely only preprocessor and CMake definediness): json -> python -> truthy/falsey false -> False -> Falsey null -> None -> Falsey undefined -> None -> Falsey 0 -> 0 -> Falsey "" -> "" -> Truthy (different from python) "0" -> "0" -> Truthy {} -> {} -> Truthy (different from python) [] -> [] -> Truthy (different from python) everything else is truthy
yotta/lib/component.py
def _truthyConfValue(v): ''' Determine yotta-config truthiness. In yotta config land truthiness is different to python or json truthiness (in order to map nicely only preprocessor and CMake definediness): json -> python -> truthy/falsey false -> False -> Falsey null -> None -> Falsey undefined -> None -> Falsey 0 -> 0 -> Falsey "" -> "" -> Truthy (different from python) "0" -> "0" -> Truthy {} -> {} -> Truthy (different from python) [] -> [] -> Truthy (different from python) everything else is truthy ''' if v is False: return False elif v is None: return False elif v == 0: return False else: # everything else is truthy! return True
def _truthyConfValue(v): ''' Determine yotta-config truthiness. In yotta config land truthiness is different to python or json truthiness (in order to map nicely only preprocessor and CMake definediness): json -> python -> truthy/falsey false -> False -> Falsey null -> None -> Falsey undefined -> None -> Falsey 0 -> 0 -> Falsey "" -> "" -> Truthy (different from python) "0" -> "0" -> Truthy {} -> {} -> Truthy (different from python) [] -> [] -> Truthy (different from python) everything else is truthy ''' if v is False: return False elif v is None: return False elif v == 0: return False else: # everything else is truthy! return True
[ "Determine", "yotta", "-", "config", "truthiness", ".", "In", "yotta", "config", "land", "truthiness", "is", "different", "to", "python", "or", "json", "truthiness", "(", "in", "order", "to", "map", "nicely", "only", "preprocessor", "and", "CMake", "definediness", ")", ":" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L41-L65
[ "def", "_truthyConfValue", "(", "v", ")", ":", "if", "v", "is", "False", ":", "return", "False", "elif", "v", "is", "None", ":", "return", "False", "elif", "v", "==", "0", ":", "return", "False", "else", ":", "# everything else is truthy!", "return", "True" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getDependencySpecs
Returns [DependencySpec] These are returned in the order that they are listed in the component description file: this is so that dependency resolution proceeds in a predictable way.
yotta/lib/component.py
def getDependencySpecs(self, target=None): ''' Returns [DependencySpec] These are returned in the order that they are listed in the component description file: this is so that dependency resolution proceeds in a predictable way. ''' deps = [] def specForDependency(name, version_spec, istest): shrinkwrap = self.getShrinkwrapMapping() shrinkwrap_version_req = None if name in shrinkwrap: # exact version, and pull from registry: shrinkwrap_version_req = shrinkwrap[name] logger.debug( 'respecting %s shrinkwrap version %s for %s', self.getName(), shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_spec, istest, shrinkwrap_version_req = shrinkwrap_version_req, specifying_module = self.getName() ) deps += [specForDependency(x[0], x[1], False) for x in self.description.get('dependencies', {}).items()] target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], False) for x in target_conf_deps.items()] deps += [specForDependency(x[0], x[1], True) for x in self.description.get('testDependencies', {}).items()] target_deps = self.description.get('testTargetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding test-target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], True) for x in target_conf_deps.items()] # remove duplicates (use the first occurrence) seen = set() r = [] for dep in deps: if not dep.name in seen: r.append(dep) seen.add(dep.name) return r
def getDependencySpecs(self, target=None): ''' Returns [DependencySpec] These are returned in the order that they are listed in the component description file: this is so that dependency resolution proceeds in a predictable way. ''' deps = [] def specForDependency(name, version_spec, istest): shrinkwrap = self.getShrinkwrapMapping() shrinkwrap_version_req = None if name in shrinkwrap: # exact version, and pull from registry: shrinkwrap_version_req = shrinkwrap[name] logger.debug( 'respecting %s shrinkwrap version %s for %s', self.getName(), shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_spec, istest, shrinkwrap_version_req = shrinkwrap_version_req, specifying_module = self.getName() ) deps += [specForDependency(x[0], x[1], False) for x in self.description.get('dependencies', {}).items()] target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], False) for x in target_conf_deps.items()] deps += [specForDependency(x[0], x[1], True) for x in self.description.get('testDependencies', {}).items()] target_deps = self.description.get('testTargetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding test-target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], True) for x in target_conf_deps.items()] # remove duplicates (use the first occurrence) seen = set() r = [] for dep in deps: if not dep.name in seen: r.append(dep) seen.add(dep.name) return r
[ "Returns", "[", "DependencySpec", "]" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L147-L204
[ "def", "getDependencySpecs", "(", "self", ",", "target", "=", "None", ")", ":", "deps", "=", "[", "]", "def", "specForDependency", "(", "name", ",", "version_spec", ",", "istest", ")", ":", "shrinkwrap", "=", "self", ".", "getShrinkwrapMapping", "(", ")", "shrinkwrap_version_req", "=", "None", "if", "name", "in", "shrinkwrap", ":", "# exact version, and pull from registry:", "shrinkwrap_version_req", "=", "shrinkwrap", "[", "name", "]", "logger", ".", "debug", "(", "'respecting %s shrinkwrap version %s for %s'", ",", "self", ".", "getName", "(", ")", ",", "shrinkwrap_version_req", ",", "name", ")", "return", "pack", ".", "DependencySpec", "(", "name", ",", "version_spec", ",", "istest", ",", "shrinkwrap_version_req", "=", "shrinkwrap_version_req", ",", "specifying_module", "=", "self", ".", "getName", "(", ")", ")", "deps", "+=", "[", "specForDependency", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "False", ")", "for", "x", "in", "self", ".", "description", ".", "get", "(", "'dependencies'", ",", "{", "}", ")", ".", "items", "(", ")", "]", "target_deps", "=", "self", ".", "description", ".", "get", "(", "'targetDependencies'", ",", "{", "}", ")", "if", "target", "is", "not", "None", ":", "for", "conf_key", ",", "target_conf_deps", "in", "target_deps", ".", "items", "(", ")", ":", "if", "_truthyConfValue", "(", "target", ".", "getConfigValue", "(", "conf_key", ")", ")", "or", "conf_key", "in", "target", ".", "getSimilarTo_Deprecated", "(", ")", ":", "logger", ".", "debug", "(", "'Adding target-dependent dependency specs for target config %s to component %s'", "%", "(", "conf_key", ",", "self", ".", "getName", "(", ")", ")", ")", "deps", "+=", "[", "specForDependency", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "False", ")", "for", "x", "in", "target_conf_deps", ".", "items", "(", ")", "]", "deps", "+=", "[", "specForDependency", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "True", ")", "for", "x", "in", "self", ".", "description", ".", "get", "(", "'testDependencies'", ",", "{", "}", ")", ".", "items", "(", ")", "]", "target_deps", "=", "self", ".", "description", ".", "get", "(", "'testTargetDependencies'", ",", "{", "}", ")", "if", "target", "is", "not", "None", ":", "for", "conf_key", ",", "target_conf_deps", "in", "target_deps", ".", "items", "(", ")", ":", "if", "_truthyConfValue", "(", "target", ".", "getConfigValue", "(", "conf_key", ")", ")", "or", "conf_key", "in", "target", ".", "getSimilarTo_Deprecated", "(", ")", ":", "logger", ".", "debug", "(", "'Adding test-target-dependent dependency specs for target config %s to component %s'", "%", "(", "conf_key", ",", "self", ".", "getName", "(", ")", ")", ")", "deps", "+=", "[", "specForDependency", "(", "x", "[", "0", "]", ",", "x", "[", "1", "]", ",", "True", ")", "for", "x", "in", "target_conf_deps", ".", "items", "(", ")", "]", "# remove duplicates (use the first occurrence)", "seen", "=", "set", "(", ")", "r", "=", "[", "]", "for", "dep", "in", "deps", ":", "if", "not", "dep", ".", "name", "in", "seen", ":", "r", ".", "append", "(", "dep", ")", "seen", ".", "add", "(", "dep", ".", "name", ")", "return", "r" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.hasDependency
Check if this module has any dependencies with the specified name in its dependencies list, or in target dependencies for the specified target
yotta/lib/component.py
def hasDependency(self, name, target=None, test_dependencies=False): ''' Check if this module has any dependencies with the specified name in its dependencies list, or in target dependencies for the specified target ''' if name in self.description.get('dependencies', {}).keys(): return True target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True if test_dependencies: if name in self.description.get('testDependencies', {}).keys(): return True if target is not None: test_target_deps = self.description.get('testTargetDependencies', {}) for conf_key, target_conf_deps in test_target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True return False
def hasDependency(self, name, target=None, test_dependencies=False): ''' Check if this module has any dependencies with the specified name in its dependencies list, or in target dependencies for the specified target ''' if name in self.description.get('dependencies', {}).keys(): return True target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True if test_dependencies: if name in self.description.get('testDependencies', {}).keys(): return True if target is not None: test_target_deps = self.description.get('testTargetDependencies', {}) for conf_key, target_conf_deps in test_target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True return False
[ "Check", "if", "this", "module", "has", "any", "dependencies", "with", "the", "specified", "name", "in", "its", "dependencies", "list", "or", "in", "target", "dependencies", "for", "the", "specified", "target" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L206-L231
[ "def", "hasDependency", "(", "self", ",", "name", ",", "target", "=", "None", ",", "test_dependencies", "=", "False", ")", ":", "if", "name", "in", "self", ".", "description", ".", "get", "(", "'dependencies'", ",", "{", "}", ")", ".", "keys", "(", ")", ":", "return", "True", "target_deps", "=", "self", ".", "description", ".", "get", "(", "'targetDependencies'", ",", "{", "}", ")", "if", "target", "is", "not", "None", ":", "for", "conf_key", ",", "target_conf_deps", "in", "target_deps", ".", "items", "(", ")", ":", "if", "_truthyConfValue", "(", "target", ".", "getConfigValue", "(", "conf_key", ")", ")", "or", "conf_key", "in", "target", ".", "getSimilarTo_Deprecated", "(", ")", ":", "if", "name", "in", "target_conf_deps", ":", "return", "True", "if", "test_dependencies", ":", "if", "name", "in", "self", ".", "description", ".", "get", "(", "'testDependencies'", ",", "{", "}", ")", ".", "keys", "(", ")", ":", "return", "True", "if", "target", "is", "not", "None", ":", "test_target_deps", "=", "self", ".", "description", ".", "get", "(", "'testTargetDependencies'", ",", "{", "}", ")", "for", "conf_key", ",", "target_conf_deps", "in", "test_target_deps", ".", "items", "(", ")", ":", "if", "_truthyConfValue", "(", "target", ".", "getConfigValue", "(", "conf_key", ")", ")", "or", "conf_key", "in", "target", ".", "getSimilarTo_Deprecated", "(", ")", ":", "if", "name", "in", "target_conf_deps", ":", "return", "True", "return", "False" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.hasDependencyRecursively
Check if this module, or any of its dependencies, have a dependencies with the specified name in their dependencies, or in their targetDependencies corresponding to the specified target. Note that if recursive dependencies are not installed, this test may return a false-negative.
yotta/lib/component.py
def hasDependencyRecursively(self, name, target=None, test_dependencies=False): ''' Check if this module, or any of its dependencies, have a dependencies with the specified name in their dependencies, or in their targetDependencies corresponding to the specified target. Note that if recursive dependencies are not installed, this test may return a false-negative. ''' # checking dependencies recursively isn't entirely straightforward, so # use the existing method to resolve them all before checking: dependencies = self.getDependenciesRecursive( target = target, test = test_dependencies ) return (name in dependencies)
def hasDependencyRecursively(self, name, target=None, test_dependencies=False): ''' Check if this module, or any of its dependencies, have a dependencies with the specified name in their dependencies, or in their targetDependencies corresponding to the specified target. Note that if recursive dependencies are not installed, this test may return a false-negative. ''' # checking dependencies recursively isn't entirely straightforward, so # use the existing method to resolve them all before checking: dependencies = self.getDependenciesRecursive( target = target, test = test_dependencies ) return (name in dependencies)
[ "Check", "if", "this", "module", "or", "any", "of", "its", "dependencies", "have", "a", "dependencies", "with", "the", "specified", "name", "in", "their", "dependencies", "or", "in", "their", "targetDependencies", "corresponding", "to", "the", "specified", "target", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L233-L247
[ "def", "hasDependencyRecursively", "(", "self", ",", "name", ",", "target", "=", "None", ",", "test_dependencies", "=", "False", ")", ":", "# checking dependencies recursively isn't entirely straightforward, so", "# use the existing method to resolve them all before checking:", "dependencies", "=", "self", ".", "getDependenciesRecursive", "(", "target", "=", "target", ",", "test", "=", "test_dependencies", ")", "return", "(", "name", "in", "dependencies", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getDependencies
Returns {component_name:component}
yotta/lib/component.py
def getDependencies(self, available_components = None, search_dirs = None, target = None, available_only = False, test = False, warnings = True ): ''' Returns {component_name:component} ''' if search_dirs is None: search_dirs = [self.modulesPath()] available_components = self.ensureOrderedDict(available_components) components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, update_installed = False, provider = self.provideInstalled, test = test ) if warnings: for error in errors: logger.warning(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components
def getDependencies(self, available_components = None, search_dirs = None, target = None, available_only = False, test = False, warnings = True ): ''' Returns {component_name:component} ''' if search_dirs is None: search_dirs = [self.modulesPath()] available_components = self.ensureOrderedDict(available_components) components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, update_installed = False, provider = self.provideInstalled, test = test ) if warnings: for error in errors: logger.warning(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components
[ "Returns", "{", "component_name", ":", "component", "}" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L250-L277
[ "def", "getDependencies", "(", "self", ",", "available_components", "=", "None", ",", "search_dirs", "=", "None", ",", "target", "=", "None", ",", "available_only", "=", "False", ",", "test", "=", "False", ",", "warnings", "=", "True", ")", ":", "if", "search_dirs", "is", "None", ":", "search_dirs", "=", "[", "self", ".", "modulesPath", "(", ")", "]", "available_components", "=", "self", ".", "ensureOrderedDict", "(", "available_components", ")", "components", ",", "errors", "=", "self", ".", "__getDependenciesWithProvider", "(", "available_components", "=", "available_components", ",", "search_dirs", "=", "search_dirs", ",", "target", "=", "target", ",", "update_installed", "=", "False", ",", "provider", "=", "self", ".", "provideInstalled", ",", "test", "=", "test", ")", "if", "warnings", ":", "for", "error", "in", "errors", ":", "logger", ".", "warning", "(", "error", ")", "if", "available_only", ":", "components", "=", "OrderedDict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "components", ".", "items", "(", ")", "if", "v", ")", "return", "components" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.__getDependenciesWithProvider
Get installed components using "provider" to find (and possibly install) components. See documentation for __getDependenciesRecursiveWithProvider returns (components, errors)
yotta/lib/component.py
def __getDependenciesWithProvider(self, available_components = None, search_dirs = None, target = None, update_installed = False, provider = None, test = False ): ''' Get installed components using "provider" to find (and possibly install) components. See documentation for __getDependenciesRecursiveWithProvider returns (components, errors) ''' # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse errors = [] modules_path = self.modulesPath() def satisfyDep(dspec): try: r = provider( dspec, available_components, search_dirs, modules_path, update_installed, self ) if r and not sourceparse.parseSourceURL(dspec.versionReq()).semanticSpecMatches(r.getVersion()): shrinkwrap_msg = '' if dspec.isShrinkwrapped(): shrinkwrap_msg = 'shrinkwrap on ' msg = 'does not meet specification %s required by %s%s' % ( dspec.versionReq(), shrinkwrap_msg, self.getName() ) logger.debug('%s %s', r.getName(), msg) r.setError(msg) return r except access_common.Unavailable as e: errors.append(e) self.dependencies_failed = True except vcs.VCSError as e: errors.append(e) self.dependencies_failed = True specs = self.getDependencySpecs(target=target) if not test: # filter out things that aren't test dependencies if necessary: specs = [x for x in specs if not x.is_test_dependency] #dependencies = pool.map( dependencies = map( satisfyDep, specs ) self.installed_dependencies = True # stable order is important! return (OrderedDict([((d and d.getName()) or specs[i].name, d) for i, d in enumerate(dependencies)]), errors)
def __getDependenciesWithProvider(self, available_components = None, search_dirs = None, target = None, update_installed = False, provider = None, test = False ): ''' Get installed components using "provider" to find (and possibly install) components. See documentation for __getDependenciesRecursiveWithProvider returns (components, errors) ''' # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse errors = [] modules_path = self.modulesPath() def satisfyDep(dspec): try: r = provider( dspec, available_components, search_dirs, modules_path, update_installed, self ) if r and not sourceparse.parseSourceURL(dspec.versionReq()).semanticSpecMatches(r.getVersion()): shrinkwrap_msg = '' if dspec.isShrinkwrapped(): shrinkwrap_msg = 'shrinkwrap on ' msg = 'does not meet specification %s required by %s%s' % ( dspec.versionReq(), shrinkwrap_msg, self.getName() ) logger.debug('%s %s', r.getName(), msg) r.setError(msg) return r except access_common.Unavailable as e: errors.append(e) self.dependencies_failed = True except vcs.VCSError as e: errors.append(e) self.dependencies_failed = True specs = self.getDependencySpecs(target=target) if not test: # filter out things that aren't test dependencies if necessary: specs = [x for x in specs if not x.is_test_dependency] #dependencies = pool.map( dependencies = map( satisfyDep, specs ) self.installed_dependencies = True # stable order is important! return (OrderedDict([((d and d.getName()) or specs[i].name, d) for i, d in enumerate(dependencies)]), errors)
[ "Get", "installed", "components", "using", "provider", "to", "find", "(", "and", "possibly", "install", ")", "components", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L279-L334
[ "def", "__getDependenciesWithProvider", "(", "self", ",", "available_components", "=", "None", ",", "search_dirs", "=", "None", ",", "target", "=", "None", ",", "update_installed", "=", "False", ",", "provider", "=", "None", ",", "test", "=", "False", ")", ":", "# sourceparse, , parse version source urls, internal", "from", "yotta", ".", "lib", "import", "sourceparse", "errors", "=", "[", "]", "modules_path", "=", "self", ".", "modulesPath", "(", ")", "def", "satisfyDep", "(", "dspec", ")", ":", "try", ":", "r", "=", "provider", "(", "dspec", ",", "available_components", ",", "search_dirs", ",", "modules_path", ",", "update_installed", ",", "self", ")", "if", "r", "and", "not", "sourceparse", ".", "parseSourceURL", "(", "dspec", ".", "versionReq", "(", ")", ")", ".", "semanticSpecMatches", "(", "r", ".", "getVersion", "(", ")", ")", ":", "shrinkwrap_msg", "=", "''", "if", "dspec", ".", "isShrinkwrapped", "(", ")", ":", "shrinkwrap_msg", "=", "'shrinkwrap on '", "msg", "=", "'does not meet specification %s required by %s%s'", "%", "(", "dspec", ".", "versionReq", "(", ")", ",", "shrinkwrap_msg", ",", "self", ".", "getName", "(", ")", ")", "logger", ".", "debug", "(", "'%s %s'", ",", "r", ".", "getName", "(", ")", ",", "msg", ")", "r", ".", "setError", "(", "msg", ")", "return", "r", "except", "access_common", ".", "Unavailable", "as", "e", ":", "errors", ".", "append", "(", "e", ")", "self", ".", "dependencies_failed", "=", "True", "except", "vcs", ".", "VCSError", "as", "e", ":", "errors", ".", "append", "(", "e", ")", "self", ".", "dependencies_failed", "=", "True", "specs", "=", "self", ".", "getDependencySpecs", "(", "target", "=", "target", ")", "if", "not", "test", ":", "# filter out things that aren't test dependencies if necessary:", "specs", "=", "[", "x", "for", "x", "in", "specs", "if", "not", "x", ".", "is_test_dependency", "]", "#dependencies = pool.map(", "dependencies", "=", "map", "(", "satisfyDep", ",", "specs", ")", "self", ".", "installed_dependencies", "=", "True", "# stable order is important!", "return", "(", "OrderedDict", "(", "[", "(", "(", "d", "and", "d", ".", "getName", "(", ")", ")", "or", "specs", "[", "i", "]", ".", "name", ",", "d", ")", "for", "i", ",", "d", "in", "enumerate", "(", "dependencies", ")", "]", ")", ",", "errors", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.__getDependenciesRecursiveWithProvider
Get installed components using "provider" to find (and possibly install) components. This function is called with different provider functions in order to retrieve a list of all of the dependencies, or install all dependencies. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed traverse_links: False (default) or True: whether to recurse into linked dependencies. You normally want to set this to "True" when getting a list of dependencies, and False when installing them (unless the user has explicitly asked dependencies to be installed in linked components). provider: None (default) or function: provider( dependency_spec, available_components, search_dirs, working_directory, update_if_installed ) test: True, False, 'toplevel': should test-only dependencies be included (yes, no, or only at this level, not recursively)
yotta/lib/component.py
def __getDependenciesRecursiveWithProvider(self, available_components = None, search_dirs = None, target = None, traverse_links = False, update_installed = False, provider = None, test = False, _processed = None ): ''' Get installed components using "provider" to find (and possibly install) components. This function is called with different provider functions in order to retrieve a list of all of the dependencies, or install all dependencies. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed traverse_links: False (default) or True: whether to recurse into linked dependencies. You normally want to set this to "True" when getting a list of dependencies, and False when installing them (unless the user has explicitly asked dependencies to be installed in linked components). provider: None (default) or function: provider( dependency_spec, available_components, search_dirs, working_directory, update_if_installed ) test: True, False, 'toplevel': should test-only dependencies be included (yes, no, or only at this level, not recursively) ''' def recursionFilter(c): if not c: logger.debug('do not recurse into failed component') # don't recurse into failed components return False if c.getName() in _processed: logger.debug('do not recurse into already processed component: %s' % c) return False if c.installedLinked() and not traverse_links: return False return True available_components = self.ensureOrderedDict(available_components) if search_dirs is None: search_dirs = [] if _processed is None: _processed = set() assert(test in [True, False, 'toplevel']) search_dirs.append(self.modulesPath()) logger.debug('process %s\nsearch dirs:%s' % (self.getName(), search_dirs)) if self.isTestDependency(): logger.debug("won't provide test dependencies recursively for test dependency %s", self.getName()) test = False components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, update_installed = update_installed, target = target, provider = provider, test = test ) _processed.add(self.getName()) if errors: errors = ['Failed to satisfy dependencies of %s:' % self.path] + errors need_recursion = [x for x in filter(recursionFilter, components.values())] available_components.update(components) logger.debug('processed %s\nneed recursion: %s\navailable:%s\nsearch dirs:%s' % (self.getName(), need_recursion, available_components, search_dirs)) if test == 'toplevel': test = False # NB: can't perform this step in parallel, since the available # components list must be updated in order for c in need_recursion: dep_components, dep_errors = c.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test, _processed = _processed ) available_components.update(dep_components) components.update(dep_components) errors += dep_errors return (components, errors)
def __getDependenciesRecursiveWithProvider(self, available_components = None, search_dirs = None, target = None, traverse_links = False, update_installed = False, provider = None, test = False, _processed = None ): ''' Get installed components using "provider" to find (and possibly install) components. This function is called with different provider functions in order to retrieve a list of all of the dependencies, or install all dependencies. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed traverse_links: False (default) or True: whether to recurse into linked dependencies. You normally want to set this to "True" when getting a list of dependencies, and False when installing them (unless the user has explicitly asked dependencies to be installed in linked components). provider: None (default) or function: provider( dependency_spec, available_components, search_dirs, working_directory, update_if_installed ) test: True, False, 'toplevel': should test-only dependencies be included (yes, no, or only at this level, not recursively) ''' def recursionFilter(c): if not c: logger.debug('do not recurse into failed component') # don't recurse into failed components return False if c.getName() in _processed: logger.debug('do not recurse into already processed component: %s' % c) return False if c.installedLinked() and not traverse_links: return False return True available_components = self.ensureOrderedDict(available_components) if search_dirs is None: search_dirs = [] if _processed is None: _processed = set() assert(test in [True, False, 'toplevel']) search_dirs.append(self.modulesPath()) logger.debug('process %s\nsearch dirs:%s' % (self.getName(), search_dirs)) if self.isTestDependency(): logger.debug("won't provide test dependencies recursively for test dependency %s", self.getName()) test = False components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, update_installed = update_installed, target = target, provider = provider, test = test ) _processed.add(self.getName()) if errors: errors = ['Failed to satisfy dependencies of %s:' % self.path] + errors need_recursion = [x for x in filter(recursionFilter, components.values())] available_components.update(components) logger.debug('processed %s\nneed recursion: %s\navailable:%s\nsearch dirs:%s' % (self.getName(), need_recursion, available_components, search_dirs)) if test == 'toplevel': test = False # NB: can't perform this step in parallel, since the available # components list must be updated in order for c in need_recursion: dep_components, dep_errors = c.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test, _processed = _processed ) available_components.update(dep_components) components.update(dep_components) errors += dep_errors return (components, errors)
[ "Get", "installed", "components", "using", "provider", "to", "find", "(", "and", "possibly", "install", ")", "components", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L337-L456
[ "def", "__getDependenciesRecursiveWithProvider", "(", "self", ",", "available_components", "=", "None", ",", "search_dirs", "=", "None", ",", "target", "=", "None", ",", "traverse_links", "=", "False", ",", "update_installed", "=", "False", ",", "provider", "=", "None", ",", "test", "=", "False", ",", "_processed", "=", "None", ")", ":", "def", "recursionFilter", "(", "c", ")", ":", "if", "not", "c", ":", "logger", ".", "debug", "(", "'do not recurse into failed component'", ")", "# don't recurse into failed components", "return", "False", "if", "c", ".", "getName", "(", ")", "in", "_processed", ":", "logger", ".", "debug", "(", "'do not recurse into already processed component: %s'", "%", "c", ")", "return", "False", "if", "c", ".", "installedLinked", "(", ")", "and", "not", "traverse_links", ":", "return", "False", "return", "True", "available_components", "=", "self", ".", "ensureOrderedDict", "(", "available_components", ")", "if", "search_dirs", "is", "None", ":", "search_dirs", "=", "[", "]", "if", "_processed", "is", "None", ":", "_processed", "=", "set", "(", ")", "assert", "(", "test", "in", "[", "True", ",", "False", ",", "'toplevel'", "]", ")", "search_dirs", ".", "append", "(", "self", ".", "modulesPath", "(", ")", ")", "logger", ".", "debug", "(", "'process %s\\nsearch dirs:%s'", "%", "(", "self", ".", "getName", "(", ")", ",", "search_dirs", ")", ")", "if", "self", ".", "isTestDependency", "(", ")", ":", "logger", ".", "debug", "(", "\"won't provide test dependencies recursively for test dependency %s\"", ",", "self", ".", "getName", "(", ")", ")", "test", "=", "False", "components", ",", "errors", "=", "self", ".", "__getDependenciesWithProvider", "(", "available_components", "=", "available_components", ",", "search_dirs", "=", "search_dirs", ",", "update_installed", "=", "update_installed", ",", "target", "=", "target", ",", "provider", "=", "provider", ",", "test", "=", "test", ")", "_processed", ".", "add", "(", "self", ".", "getName", "(", ")", ")", "if", "errors", ":", "errors", "=", "[", "'Failed to satisfy dependencies of %s:'", "%", "self", ".", "path", "]", "+", "errors", "need_recursion", "=", "[", "x", "for", "x", "in", "filter", "(", "recursionFilter", ",", "components", ".", "values", "(", ")", ")", "]", "available_components", ".", "update", "(", "components", ")", "logger", ".", "debug", "(", "'processed %s\\nneed recursion: %s\\navailable:%s\\nsearch dirs:%s'", "%", "(", "self", ".", "getName", "(", ")", ",", "need_recursion", ",", "available_components", ",", "search_dirs", ")", ")", "if", "test", "==", "'toplevel'", ":", "test", "=", "False", "# NB: can't perform this step in parallel, since the available", "# components list must be updated in order", "for", "c", "in", "need_recursion", ":", "dep_components", ",", "dep_errors", "=", "c", ".", "__getDependenciesRecursiveWithProvider", "(", "available_components", "=", "available_components", ",", "search_dirs", "=", "search_dirs", ",", "target", "=", "target", ",", "traverse_links", "=", "traverse_links", ",", "update_installed", "=", "update_installed", ",", "provider", "=", "provider", ",", "test", "=", "test", ",", "_processed", "=", "_processed", ")", "available_components", ".", "update", "(", "dep_components", ")", "components", ".", "update", "(", "dep_components", ")", "errors", "+=", "dep_errors", "return", "(", "components", ",", "errors", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getDependenciesRecursive
Get available and already installed components, don't check for remotely available components. See also satisfyDependenciesRecursive() Returns {component_name:component}
yotta/lib/component.py
def getDependenciesRecursive(self, available_components = None, processed = None, search_dirs = None, target = None, available_only = False, test = False ): ''' Get available and already installed components, don't check for remotely available components. See also satisfyDependenciesRecursive() Returns {component_name:component} ''' components, errors = self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = True, update_installed = False, provider = self.provideInstalled, test = test ) for error in errors: logger.error(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components
def getDependenciesRecursive(self, available_components = None, processed = None, search_dirs = None, target = None, available_only = False, test = False ): ''' Get available and already installed components, don't check for remotely available components. See also satisfyDependenciesRecursive() Returns {component_name:component} ''' components, errors = self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = True, update_installed = False, provider = self.provideInstalled, test = test ) for error in errors: logger.error(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components
[ "Get", "available", "and", "already", "installed", "components", "don", "t", "check", "for", "remotely", "available", "components", ".", "See", "also", "satisfyDependenciesRecursive", "()" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L505-L532
[ "def", "getDependenciesRecursive", "(", "self", ",", "available_components", "=", "None", ",", "processed", "=", "None", ",", "search_dirs", "=", "None", ",", "target", "=", "None", ",", "available_only", "=", "False", ",", "test", "=", "False", ")", ":", "components", ",", "errors", "=", "self", ".", "__getDependenciesRecursiveWithProvider", "(", "available_components", "=", "available_components", ",", "search_dirs", "=", "search_dirs", ",", "target", "=", "target", ",", "traverse_links", "=", "True", ",", "update_installed", "=", "False", ",", "provider", "=", "self", ".", "provideInstalled", ",", "test", "=", "test", ")", "for", "error", "in", "errors", ":", "logger", ".", "error", "(", "error", ")", "if", "available_only", ":", "components", "=", "OrderedDict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "components", ".", "items", "(", ")", "if", "v", ")", "return", "components" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.satisfyDependenciesRecursive
Retrieve and install all the dependencies of this component and its dependencies, recursively, or satisfy them from a collection of available_components or from disk. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. update_installed: False (default), True, or set(): whether to check the available versions of installed components, and update if a newer version is available. If this is a set(), only update things in the specified set. traverse_links: False (default) or True: whether to recurse into linked dependencies when updating/installing. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed test: True, False, or 'toplevel: should test-only dependencies be installed? (yes, no, or only for this module, not its dependencies).
yotta/lib/component.py
def satisfyDependenciesRecursive( self, available_components = None, search_dirs = None, update_installed = False, traverse_links = False, target = None, test = False ): ''' Retrieve and install all the dependencies of this component and its dependencies, recursively, or satisfy them from a collection of available_components or from disk. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. update_installed: False (default), True, or set(): whether to check the available versions of installed components, and update if a newer version is available. If this is a set(), only update things in the specified set. traverse_links: False (default) or True: whether to recurse into linked dependencies when updating/installing. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed test: True, False, or 'toplevel: should test-only dependencies be installed? (yes, no, or only for this module, not its dependencies). ''' def provider( dspec, available_components, search_dirs, working_directory, update_installed, dep_of=None ): r = access.satisfyFromAvailable(dspec.name, available_components) if r: if r.isTestDependency() and not dspec.is_test_dependency: logger.debug('test dependency subsequently occurred as real dependency: %s', r.getName()) r.setTestDependency(False) return r update_if_installed = False if update_installed is True: update_if_installed = True elif update_installed: update_if_installed = dspec.name in update_installed r = access.satisfyVersionFromSearchPaths( dspec.name, dspec.versionReq(), search_dirs, update_if_installed, inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: r.setTestDependency(dspec.is_test_dependency) return r # before resorting to install this module, check if we have an # existing linked module (which wasn't picked up because it didn't # match the version specification) - if we do, then we shouldn't # try to install, but should return that anyway: default_path = os.path.join(self.modulesPath(), dspec.name) if fsutils.isLink(default_path): r = Component( default_path, test_dependency = dspec.is_test_dependency, installed_linked = fsutils.isLink(default_path), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: assert(r.installedLinked()) return r else: logger.error('linked module %s is invalid: %s', dspec.name, r.getError()) return r r = access.satisfyVersionByInstalling( dspec.name, dspec.versionReq(), self.modulesPath(), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if not r: logger.error('could not install %s' % dspec.name) if r is not None: r.setTestDependency(dspec.is_test_dependency) return r return self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test )
def satisfyDependenciesRecursive( self, available_components = None, search_dirs = None, update_installed = False, traverse_links = False, target = None, test = False ): ''' Retrieve and install all the dependencies of this component and its dependencies, recursively, or satisfy them from a collection of available_components or from disk. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. update_installed: False (default), True, or set(): whether to check the available versions of installed components, and update if a newer version is available. If this is a set(), only update things in the specified set. traverse_links: False (default) or True: whether to recurse into linked dependencies when updating/installing. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed test: True, False, or 'toplevel: should test-only dependencies be installed? (yes, no, or only for this module, not its dependencies). ''' def provider( dspec, available_components, search_dirs, working_directory, update_installed, dep_of=None ): r = access.satisfyFromAvailable(dspec.name, available_components) if r: if r.isTestDependency() and not dspec.is_test_dependency: logger.debug('test dependency subsequently occurred as real dependency: %s', r.getName()) r.setTestDependency(False) return r update_if_installed = False if update_installed is True: update_if_installed = True elif update_installed: update_if_installed = dspec.name in update_installed r = access.satisfyVersionFromSearchPaths( dspec.name, dspec.versionReq(), search_dirs, update_if_installed, inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: r.setTestDependency(dspec.is_test_dependency) return r # before resorting to install this module, check if we have an # existing linked module (which wasn't picked up because it didn't # match the version specification) - if we do, then we shouldn't # try to install, but should return that anyway: default_path = os.path.join(self.modulesPath(), dspec.name) if fsutils.isLink(default_path): r = Component( default_path, test_dependency = dspec.is_test_dependency, installed_linked = fsutils.isLink(default_path), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: assert(r.installedLinked()) return r else: logger.error('linked module %s is invalid: %s', dspec.name, r.getError()) return r r = access.satisfyVersionByInstalling( dspec.name, dspec.versionReq(), self.modulesPath(), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if not r: logger.error('could not install %s' % dspec.name) if r is not None: r.setTestDependency(dspec.is_test_dependency) return r return self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test )
[ "Retrieve", "and", "install", "all", "the", "dependencies", "of", "this", "component", "and", "its", "dependencies", "recursively", "or", "satisfy", "them", "from", "a", "collection", "of", "available_components", "or", "from", "disk", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L540-L667
[ "def", "satisfyDependenciesRecursive", "(", "self", ",", "available_components", "=", "None", ",", "search_dirs", "=", "None", ",", "update_installed", "=", "False", ",", "traverse_links", "=", "False", ",", "target", "=", "None", ",", "test", "=", "False", ")", ":", "def", "provider", "(", "dspec", ",", "available_components", ",", "search_dirs", ",", "working_directory", ",", "update_installed", ",", "dep_of", "=", "None", ")", ":", "r", "=", "access", ".", "satisfyFromAvailable", "(", "dspec", ".", "name", ",", "available_components", ")", "if", "r", ":", "if", "r", ".", "isTestDependency", "(", ")", "and", "not", "dspec", ".", "is_test_dependency", ":", "logger", ".", "debug", "(", "'test dependency subsequently occurred as real dependency: %s'", ",", "r", ".", "getName", "(", ")", ")", "r", ".", "setTestDependency", "(", "False", ")", "return", "r", "update_if_installed", "=", "False", "if", "update_installed", "is", "True", ":", "update_if_installed", "=", "True", "elif", "update_installed", ":", "update_if_installed", "=", "dspec", ".", "name", "in", "update_installed", "r", "=", "access", ".", "satisfyVersionFromSearchPaths", "(", "dspec", ".", "name", ",", "dspec", ".", "versionReq", "(", ")", ",", "search_dirs", ",", "update_if_installed", ",", "inherit_shrinkwrap", "=", "dep_of", ".", "getShrinkwrap", "(", ")", ")", "if", "r", ":", "r", ".", "setTestDependency", "(", "dspec", ".", "is_test_dependency", ")", "return", "r", "# before resorting to install this module, check if we have an", "# existing linked module (which wasn't picked up because it didn't", "# match the version specification) - if we do, then we shouldn't", "# try to install, but should return that anyway:", "default_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "modulesPath", "(", ")", ",", "dspec", ".", "name", ")", "if", "fsutils", ".", "isLink", "(", "default_path", ")", ":", "r", "=", "Component", "(", "default_path", ",", "test_dependency", "=", "dspec", ".", "is_test_dependency", ",", "installed_linked", "=", "fsutils", ".", "isLink", "(", "default_path", ")", ",", "inherit_shrinkwrap", "=", "dep_of", ".", "getShrinkwrap", "(", ")", ")", "if", "r", ":", "assert", "(", "r", ".", "installedLinked", "(", ")", ")", "return", "r", "else", ":", "logger", ".", "error", "(", "'linked module %s is invalid: %s'", ",", "dspec", ".", "name", ",", "r", ".", "getError", "(", ")", ")", "return", "r", "r", "=", "access", ".", "satisfyVersionByInstalling", "(", "dspec", ".", "name", ",", "dspec", ".", "versionReq", "(", ")", ",", "self", ".", "modulesPath", "(", ")", ",", "inherit_shrinkwrap", "=", "dep_of", ".", "getShrinkwrap", "(", ")", ")", "if", "not", "r", ":", "logger", ".", "error", "(", "'could not install %s'", "%", "dspec", ".", "name", ")", "if", "r", "is", "not", "None", ":", "r", ".", "setTestDependency", "(", "dspec", ".", "is_test_dependency", ")", "return", "r", "return", "self", ".", "__getDependenciesRecursiveWithProvider", "(", "available_components", "=", "available_components", ",", "search_dirs", "=", "search_dirs", ",", "target", "=", "target", ",", "traverse_links", "=", "traverse_links", ",", "update_installed", "=", "update_installed", ",", "provider", "=", "provider", ",", "test", "=", "test", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.satisfyTarget
Ensure that the specified target name (and optionally version, github ref or URL) is installed in the targets directory of the current component returns (derived_target, errors)
yotta/lib/component.py
def satisfyTarget(self, target_name_and_version, update_installed=False, additional_config=None, install_missing=True): ''' Ensure that the specified target name (and optionally version, github ref or URL) is installed in the targets directory of the current component returns (derived_target, errors) ''' # Target, , represent an installed target, internal from yotta.lib import target application_dir = None if self.isApplication(): application_dir = self.path return target.getDerivedTarget( target_name_and_version, self.targetsPath(), install_missing = install_missing, application_dir = application_dir, update_installed = update_installed, additional_config = additional_config, shrinkwrap = self.getShrinkwrap() )
def satisfyTarget(self, target_name_and_version, update_installed=False, additional_config=None, install_missing=True): ''' Ensure that the specified target name (and optionally version, github ref or URL) is installed in the targets directory of the current component returns (derived_target, errors) ''' # Target, , represent an installed target, internal from yotta.lib import target application_dir = None if self.isApplication(): application_dir = self.path return target.getDerivedTarget( target_name_and_version, self.targetsPath(), install_missing = install_missing, application_dir = application_dir, update_installed = update_installed, additional_config = additional_config, shrinkwrap = self.getShrinkwrap() )
[ "Ensure", "that", "the", "specified", "target", "name", "(", "and", "optionally", "version", "github", "ref", "or", "URL", ")", "is", "installed", "in", "the", "targets", "directory", "of", "the", "current", "component" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L669-L689
[ "def", "satisfyTarget", "(", "self", ",", "target_name_and_version", ",", "update_installed", "=", "False", ",", "additional_config", "=", "None", ",", "install_missing", "=", "True", ")", ":", "# Target, , represent an installed target, internal", "from", "yotta", ".", "lib", "import", "target", "application_dir", "=", "None", "if", "self", ".", "isApplication", "(", ")", ":", "application_dir", "=", "self", ".", "path", "return", "target", ".", "getDerivedTarget", "(", "target_name_and_version", ",", "self", ".", "targetsPath", "(", ")", ",", "install_missing", "=", "install_missing", ",", "application_dir", "=", "application_dir", ",", "update_installed", "=", "update_installed", ",", "additional_config", "=", "additional_config", ",", "shrinkwrap", "=", "self", ".", "getShrinkwrap", "(", ")", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getTarget
Return a derived target object representing the selected target: if the target is not installed, or is invalid then the returned object will test false in a boolean context. Returns derived_target Errors are not displayed.
yotta/lib/component.py
def getTarget(self, target_name_and_version, additional_config=None): ''' Return a derived target object representing the selected target: if the target is not installed, or is invalid then the returned object will test false in a boolean context. Returns derived_target Errors are not displayed. ''' derived_target, errors = self.satisfyTarget( target_name_and_version, additional_config = additional_config, install_missing = False ) if len(errors): return None else: return derived_target
def getTarget(self, target_name_and_version, additional_config=None): ''' Return a derived target object representing the selected target: if the target is not installed, or is invalid then the returned object will test false in a boolean context. Returns derived_target Errors are not displayed. ''' derived_target, errors = self.satisfyTarget( target_name_and_version, additional_config = additional_config, install_missing = False ) if len(errors): return None else: return derived_target
[ "Return", "a", "derived", "target", "object", "representing", "the", "selected", "target", ":", "if", "the", "target", "is", "not", "installed", "or", "is", "invalid", "then", "the", "returned", "object", "will", "test", "false", "in", "a", "boolean", "context", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L691-L708
[ "def", "getTarget", "(", "self", ",", "target_name_and_version", ",", "additional_config", "=", "None", ")", ":", "derived_target", ",", "errors", "=", "self", ".", "satisfyTarget", "(", "target_name_and_version", ",", "additional_config", "=", "additional_config", ",", "install_missing", "=", "False", ")", "if", "len", "(", "errors", ")", ":", "return", "None", "else", ":", "return", "derived_target" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getBinaries
Return a dictionary of binaries to compile: {"dirname":"exename"}, this is used when automatically generating CMakeLists Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists.
yotta/lib/component.py
def getBinaries(self): ''' Return a dictionary of binaries to compile: {"dirname":"exename"}, this is used when automatically generating CMakeLists Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' # the module.json syntax is a subset of the package.json syntax: a # single string that defines the source directory to use to build an # executable with the same name as the component. This may be extended # to include the rest of the npm syntax in future (map of source-dir to # exe name). if 'bin' in self.description: return {os.path.normpath(self.description['bin']): self.getName()} else: return {}
def getBinaries(self): ''' Return a dictionary of binaries to compile: {"dirname":"exename"}, this is used when automatically generating CMakeLists Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' # the module.json syntax is a subset of the package.json syntax: a # single string that defines the source directory to use to build an # executable with the same name as the component. This may be extended # to include the rest of the npm syntax in future (map of source-dir to # exe name). if 'bin' in self.description: return {os.path.normpath(self.description['bin']): self.getName()} else: return {}
[ "Return", "a", "dictionary", "of", "binaries", "to", "compile", ":", "{", "dirname", ":", "exename", "}", "this", "is", "used", "when", "automatically", "generating", "CMakeLists" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L724-L749
[ "def", "getBinaries", "(", "self", ")", ":", "# the module.json syntax is a subset of the package.json syntax: a", "# single string that defines the source directory to use to build an", "# executable with the same name as the component. This may be extended", "# to include the rest of the npm syntax in future (map of source-dir to", "# exe name).", "if", "'bin'", "in", "self", ".", "description", ":", "return", "{", "os", ".", "path", ".", "normpath", "(", "self", ".", "description", "[", "'bin'", "]", ")", ":", "self", ".", "getName", "(", ")", "}", "else", ":", "return", "{", "}" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getLibs
Return a dictionary of libraries to compile: {"dirname":"libname"}, this is used when automatically generating CMakeLists. If explicit_only is not set, then in the absence of both 'lib' and 'bin' sections in the module.json file, the "source" directory will be returned. Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists.
yotta/lib/component.py
def getLibs(self, explicit_only=False): ''' Return a dictionary of libraries to compile: {"dirname":"libname"}, this is used when automatically generating CMakeLists. If explicit_only is not set, then in the absence of both 'lib' and 'bin' sections in the module.json file, the "source" directory will be returned. Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' if 'lib' in self.description: return {os.path.normpath(self.description['lib']): self.getName()} elif 'bin' not in self.description and not explicit_only: return {'source': self.getName()} else: return {}
def getLibs(self, explicit_only=False): ''' Return a dictionary of libraries to compile: {"dirname":"libname"}, this is used when automatically generating CMakeLists. If explicit_only is not set, then in the absence of both 'lib' and 'bin' sections in the module.json file, the "source" directory will be returned. Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' if 'lib' in self.description: return {os.path.normpath(self.description['lib']): self.getName()} elif 'bin' not in self.description and not explicit_only: return {'source': self.getName()} else: return {}
[ "Return", "a", "dictionary", "of", "libraries", "to", "compile", ":", "{", "dirname", ":", "libname", "}", "this", "is", "used", "when", "automatically", "generating", "CMakeLists", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L751-L777
[ "def", "getLibs", "(", "self", ",", "explicit_only", "=", "False", ")", ":", "if", "'lib'", "in", "self", ".", "description", ":", "return", "{", "os", ".", "path", ".", "normpath", "(", "self", ".", "description", "[", "'lib'", "]", ")", ":", "self", ".", "getName", "(", ")", "}", "elif", "'bin'", "not", "in", "self", ".", "description", "and", "not", "explicit_only", ":", "return", "{", "'source'", ":", "self", ".", "getName", "(", ")", "}", "else", ":", "return", "{", "}" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.licenses
Return a list of licenses that apply to this module. (Strings, which may be SPDX identifiers)
yotta/lib/component.py
def licenses(self): ''' Return a list of licenses that apply to this module. (Strings, which may be SPDX identifiers) ''' if 'license' in self.description: return [self.description['license']] else: return [x['type'] for x in self.description['licenses']]
def licenses(self): ''' Return a list of licenses that apply to this module. (Strings, which may be SPDX identifiers) ''' if 'license' in self.description: return [self.description['license']] else: return [x['type'] for x in self.description['licenses']]
[ "Return", "a", "list", "of", "licenses", "that", "apply", "to", "this", "module", ".", "(", "Strings", "which", "may", "be", "SPDX", "identifiers", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L779-L786
[ "def", "licenses", "(", "self", ")", ":", "if", "'license'", "in", "self", ".", "description", ":", "return", "[", "self", ".", "description", "[", "'license'", "]", "]", "else", ":", "return", "[", "x", "[", "'type'", "]", "for", "x", "in", "self", ".", "description", "[", "'licenses'", "]", "]" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getExtraIncludes
Some components must export whole directories full of headers into the search path. This is really really bad, and they shouldn't do it, but support is provided as a concession to compatibility.
yotta/lib/component.py
def getExtraIncludes(self): ''' Some components must export whole directories full of headers into the search path. This is really really bad, and they shouldn't do it, but support is provided as a concession to compatibility. ''' if 'extraIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraIncludes']] else: return []
def getExtraIncludes(self): ''' Some components must export whole directories full of headers into the search path. This is really really bad, and they shouldn't do it, but support is provided as a concession to compatibility. ''' if 'extraIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraIncludes']] else: return []
[ "Some", "components", "must", "export", "whole", "directories", "full", "of", "headers", "into", "the", "search", "path", ".", "This", "is", "really", "really", "bad", "and", "they", "shouldn", "t", "do", "it", "but", "support", "is", "provided", "as", "a", "concession", "to", "compatibility", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L788-L796
[ "def", "getExtraIncludes", "(", "self", ")", ":", "if", "'extraIncludes'", "in", "self", ".", "description", ":", "return", "[", "os", ".", "path", ".", "normpath", "(", "x", ")", "for", "x", "in", "self", ".", "description", "[", "'extraIncludes'", "]", "]", "else", ":", "return", "[", "]" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Component.getExtraSysIncludes
Some components (e.g. libc) must export directories of header files into the system include search path. They do this by adding a 'extraSysIncludes' : [ array of directories ] field in their package description. This function returns the list of directories (or an empty list), if it doesn't exist.
yotta/lib/component.py
def getExtraSysIncludes(self): ''' Some components (e.g. libc) must export directories of header files into the system include search path. They do this by adding a 'extraSysIncludes' : [ array of directories ] field in their package description. This function returns the list of directories (or an empty list), if it doesn't exist. ''' if 'extraSysIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraSysIncludes']] else: return []
def getExtraSysIncludes(self): ''' Some components (e.g. libc) must export directories of header files into the system include search path. They do this by adding a 'extraSysIncludes' : [ array of directories ] field in their package description. This function returns the list of directories (or an empty list), if it doesn't exist. ''' if 'extraSysIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraSysIncludes']] else: return []
[ "Some", "components", "(", "e", ".", "g", ".", "libc", ")", "must", "export", "directories", "of", "header", "files", "into", "the", "system", "include", "search", "path", ".", "They", "do", "this", "by", "adding", "a", "extraSysIncludes", ":", "[", "array", "of", "directories", "]", "field", "in", "their", "package", "description", ".", "This", "function", "returns", "the", "list", "of", "directories", "(", "or", "an", "empty", "list", ")", "if", "it", "doesn", "t", "exist", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L798-L808
[ "def", "getExtraSysIncludes", "(", "self", ")", ":", "if", "'extraSysIncludes'", "in", "self", ".", "description", ":", "return", "[", "os", ".", "path", ".", "normpath", "(", "x", ")", "for", "x", "in", "self", ".", "description", "[", "'extraSysIncludes'", "]", "]", "else", ":", "return", "[", "]" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
checkDependenciesForShrinkwrap
return a list of errors encountered (e.g. dependency missing or specification not met
yotta/shrinkwrap.py
def checkDependenciesForShrinkwrap(dependency_list): ''' return a list of errors encountered (e.g. dependency missing or specification not met ''' # sourceparse, , parse version specifications, internall from yotta.lib import sourceparse errors = [] # first gather the available versions of things: available_versions = {} for mod in dependency_list.get('modules', []): available_versions[mod['name']] = mod['version'] # now check that the available versions satisfy all of the specifications # from other modules: for mod in dependency_list.get('modules', []): for spec_info in mod.get('specifications', []): name = spec_info['name'] spec = spec_info['version'] if spec_info.get('testOnly', False): # test-only specifications are ignored for shrinkwrap continue if not name in available_versions: errors.append('dependency %s (required by %s) is missing' % ( name, mod['name'] )) else: available_version = available_versions[name] parsed_spec = sourceparse.parseSourceURL(spec) if not parsed_spec.semanticSpecMatches(available_version): errors.append('%s@%s does not meet specification %s required by %s' % ( name, available_version, parsed_spec.semanticSpec(), mod['name'] )) return errors
def checkDependenciesForShrinkwrap(dependency_list): ''' return a list of errors encountered (e.g. dependency missing or specification not met ''' # sourceparse, , parse version specifications, internall from yotta.lib import sourceparse errors = [] # first gather the available versions of things: available_versions = {} for mod in dependency_list.get('modules', []): available_versions[mod['name']] = mod['version'] # now check that the available versions satisfy all of the specifications # from other modules: for mod in dependency_list.get('modules', []): for spec_info in mod.get('specifications', []): name = spec_info['name'] spec = spec_info['version'] if spec_info.get('testOnly', False): # test-only specifications are ignored for shrinkwrap continue if not name in available_versions: errors.append('dependency %s (required by %s) is missing' % ( name, mod['name'] )) else: available_version = available_versions[name] parsed_spec = sourceparse.parseSourceURL(spec) if not parsed_spec.semanticSpecMatches(available_version): errors.append('%s@%s does not meet specification %s required by %s' % ( name, available_version, parsed_spec.semanticSpec(), mod['name'] )) return errors
[ "return", "a", "list", "of", "errors", "encountered", "(", "e", ".", "g", ".", "dependency", "missing", "or", "specification", "not", "met" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/shrinkwrap.py#L59-L91
[ "def", "checkDependenciesForShrinkwrap", "(", "dependency_list", ")", ":", "# sourceparse, , parse version specifications, internall", "from", "yotta", ".", "lib", "import", "sourceparse", "errors", "=", "[", "]", "# first gather the available versions of things:", "available_versions", "=", "{", "}", "for", "mod", "in", "dependency_list", ".", "get", "(", "'modules'", ",", "[", "]", ")", ":", "available_versions", "[", "mod", "[", "'name'", "]", "]", "=", "mod", "[", "'version'", "]", "# now check that the available versions satisfy all of the specifications", "# from other modules:", "for", "mod", "in", "dependency_list", ".", "get", "(", "'modules'", ",", "[", "]", ")", ":", "for", "spec_info", "in", "mod", ".", "get", "(", "'specifications'", ",", "[", "]", ")", ":", "name", "=", "spec_info", "[", "'name'", "]", "spec", "=", "spec_info", "[", "'version'", "]", "if", "spec_info", ".", "get", "(", "'testOnly'", ",", "False", ")", ":", "# test-only specifications are ignored for shrinkwrap", "continue", "if", "not", "name", "in", "available_versions", ":", "errors", ".", "append", "(", "'dependency %s (required by %s) is missing'", "%", "(", "name", ",", "mod", "[", "'name'", "]", ")", ")", "else", ":", "available_version", "=", "available_versions", "[", "name", "]", "parsed_spec", "=", "sourceparse", ".", "parseSourceURL", "(", "spec", ")", "if", "not", "parsed_spec", ".", "semanticSpecMatches", "(", "available_version", ")", ":", "errors", ".", "append", "(", "'%s@%s does not meet specification %s required by %s'", "%", "(", "name", ",", "available_version", ",", "parsed_spec", ".", "semanticSpec", "(", ")", ",", "mod", "[", "'name'", "]", ")", ")", "return", "errors" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
GitWorkingCopy.availableVersions
return a list of GitCloneVersion objects for tags which are valid semantic version idenfitifiers.
yotta/lib/git_access.py
def availableVersions(self): ''' return a list of GitCloneVersion objects for tags which are valid semantic version idenfitifiers. ''' r = [] for t in self.vcs.tags(): logger.debug("available version tag: %s", t) # ignore empty tags: if not len(t.strip()): continue try: r.append(GitCloneVersion(t, t, self)) except ValueError: logger.debug('invalid version tag: %s', t) return r
def availableVersions(self): ''' return a list of GitCloneVersion objects for tags which are valid semantic version idenfitifiers. ''' r = [] for t in self.vcs.tags(): logger.debug("available version tag: %s", t) # ignore empty tags: if not len(t.strip()): continue try: r.append(GitCloneVersion(t, t, self)) except ValueError: logger.debug('invalid version tag: %s', t) return r
[ "return", "a", "list", "of", "GitCloneVersion", "objects", "for", "tags", "which", "are", "valid", "semantic", "version", "idenfitifiers", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/git_access.py#L48-L62
[ "def", "availableVersions", "(", "self", ")", ":", "r", "=", "[", "]", "for", "t", "in", "self", ".", "vcs", ".", "tags", "(", ")", ":", "logger", ".", "debug", "(", "\"available version tag: %s\"", ",", "t", ")", "# ignore empty tags:", "if", "not", "len", "(", "t", ".", "strip", "(", ")", ")", ":", "continue", "try", ":", "r", ".", "append", "(", "GitCloneVersion", "(", "t", ",", "t", ",", "self", ")", ")", "except", "ValueError", ":", "logger", ".", "debug", "(", "'invalid version tag: %s'", ",", "t", ")", "return", "r" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
GitWorkingCopy.commitVersion
return a GithubComponentVersion object for a specific commit if valid
yotta/lib/git_access.py
def commitVersion(self, spec): ''' return a GithubComponentVersion object for a specific commit if valid ''' import re commit_match = re.match('^[a-f0-9]{7,40}$', spec, re.I) if commit_match: return GitCloneVersion('', spec, self) return None
def commitVersion(self, spec): ''' return a GithubComponentVersion object for a specific commit if valid ''' import re commit_match = re.match('^[a-f0-9]{7,40}$', spec, re.I) if commit_match: return GitCloneVersion('', spec, self) return None
[ "return", "a", "GithubComponentVersion", "object", "for", "a", "specific", "commit", "if", "valid" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/git_access.py#L78-L87
[ "def", "commitVersion", "(", "self", ",", "spec", ")", ":", "import", "re", "commit_match", "=", "re", ".", "match", "(", "'^[a-f0-9]{7,40}$'", ",", "spec", ",", "re", ".", "I", ")", "if", "commit_match", ":", "return", "GitCloneVersion", "(", "''", ",", "spec", ",", "self", ")", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
GitComponent.createFromSource
returns a git component for any git:// url, or None if this is not a git component. Normally version will be empty, unless the original url was of the form 'git://...#version', which can be used to grab a particular tag or branch, or ...#>=1.2.3, which can be used to specify semantic version specifications on tags.
yotta/lib/git_access.py
def createFromSource(cls, vs, name=None): ''' returns a git component for any git:// url, or None if this is not a git component. Normally version will be empty, unless the original url was of the form 'git://...#version', which can be used to grab a particular tag or branch, or ...#>=1.2.3, which can be used to specify semantic version specifications on tags. ''' return GitComponent(vs.location, vs.spec, vs.semantic_spec)
def createFromSource(cls, vs, name=None): ''' returns a git component for any git:// url, or None if this is not a git component. Normally version will be empty, unless the original url was of the form 'git://...#version', which can be used to grab a particular tag or branch, or ...#>=1.2.3, which can be used to specify semantic version specifications on tags. ''' return GitComponent(vs.location, vs.spec, vs.semantic_spec)
[ "returns", "a", "git", "component", "for", "any", "git", ":", "//", "url", "or", "None", "if", "this", "is", "not", "a", "git", "component", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/git_access.py#L98-L107
[ "def", "createFromSource", "(", "cls", ",", "vs", ",", "name", "=", "None", ")", ":", "return", "GitComponent", "(", "vs", ".", "location", ",", "vs", ".", "spec", ",", "vs", ".", "semantic_spec", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_mergeDictionaries
merge dictionaries of dictionaries recursively, with elements from dictionaries earlier in the argument sequence taking precedence
yotta/lib/target.py
def _mergeDictionaries(*args): ''' merge dictionaries of dictionaries recursively, with elements from dictionaries earlier in the argument sequence taking precedence ''' # to support merging of OrderedDicts, copy the result type from the first # argument: result = type(args[0])() for k, v in itertools.chain(*[x.items() for x in args]): if not k in result: result[k] = v elif isinstance(result[k], dict) and isinstance(v, dict): result[k] = _mergeDictionaries(result[k], v) return result
def _mergeDictionaries(*args): ''' merge dictionaries of dictionaries recursively, with elements from dictionaries earlier in the argument sequence taking precedence ''' # to support merging of OrderedDicts, copy the result type from the first # argument: result = type(args[0])() for k, v in itertools.chain(*[x.items() for x in args]): if not k in result: result[k] = v elif isinstance(result[k], dict) and isinstance(v, dict): result[k] = _mergeDictionaries(result[k], v) return result
[ "merge", "dictionaries", "of", "dictionaries", "recursively", "with", "elements", "from", "dictionaries", "earlier", "in", "the", "argument", "sequence", "taking", "precedence" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L41-L53
[ "def", "_mergeDictionaries", "(", "*", "args", ")", ":", "# to support merging of OrderedDicts, copy the result type from the first", "# argument:", "result", "=", "type", "(", "args", "[", "0", "]", ")", "(", ")", "for", "k", ",", "v", "in", "itertools", ".", "chain", "(", "*", "[", "x", ".", "items", "(", ")", "for", "x", "in", "args", "]", ")", ":", "if", "not", "k", "in", "result", ":", "result", "[", "k", "]", "=", "v", "elif", "isinstance", "(", "result", "[", "k", "]", ",", "dict", ")", "and", "isinstance", "(", "v", ",", "dict", ")", ":", "result", "[", "k", "]", "=", "_mergeDictionaries", "(", "result", "[", "k", "]", ",", "v", ")", "return", "result" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_mirrorStructure
create a new nested dictionary object with the same structure as 'dictionary', but with all scalar values replaced with 'value'
yotta/lib/target.py
def _mirrorStructure(dictionary, value): ''' create a new nested dictionary object with the same structure as 'dictionary', but with all scalar values replaced with 'value' ''' result = type(dictionary)() for k in dictionary.keys(): if isinstance(dictionary[k], dict): result[k] = _mirrorStructure(dictionary[k], value) else: result[k] = value return result
def _mirrorStructure(dictionary, value): ''' create a new nested dictionary object with the same structure as 'dictionary', but with all scalar values replaced with 'value' ''' result = type(dictionary)() for k in dictionary.keys(): if isinstance(dictionary[k], dict): result[k] = _mirrorStructure(dictionary[k], value) else: result[k] = value return result
[ "create", "a", "new", "nested", "dictionary", "object", "with", "the", "same", "structure", "as", "dictionary", "but", "with", "all", "scalar", "values", "replaced", "with", "value" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L55-L65
[ "def", "_mirrorStructure", "(", "dictionary", ",", "value", ")", ":", "result", "=", "type", "(", "dictionary", ")", "(", ")", "for", "k", "in", "dictionary", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "dictionary", "[", "k", "]", ",", "dict", ")", ":", "result", "[", "k", "]", "=", "_mirrorStructure", "(", "dictionary", "[", "k", "]", ",", "value", ")", "else", ":", "result", "[", "k", "]", "=", "value", "return", "result" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
loadAdditionalConfig
returns (error, config)
yotta/lib/target.py
def loadAdditionalConfig(config_path): ''' returns (error, config) ''' error = None config = {} if not config_path: return (error, config) if os.path.isfile(config_path): try: config = ordered_json.load(config_path) except Exception as e: error = "Invalid syntax in file %s: %s" % (config_path, e) else: # try to interpret the argument as literal JSON try: config = ordered_json.loads(config_path) except Exception as e: # if this fails too, guess whether it was intended to be JSON or # not, and display an appropriate error message if '{' in config_path or '}' in config_path: error = "Invalid syntax in literal JSON: %s" % e else: error = "File \"%s\" does not exist" % config_path logger.debug('read additional config: %s', config) return (error, config)
def loadAdditionalConfig(config_path): ''' returns (error, config) ''' error = None config = {} if not config_path: return (error, config) if os.path.isfile(config_path): try: config = ordered_json.load(config_path) except Exception as e: error = "Invalid syntax in file %s: %s" % (config_path, e) else: # try to interpret the argument as literal JSON try: config = ordered_json.loads(config_path) except Exception as e: # if this fails too, guess whether it was intended to be JSON or # not, and display an appropriate error message if '{' in config_path or '}' in config_path: error = "Invalid syntax in literal JSON: %s" % e else: error = "File \"%s\" does not exist" % config_path logger.debug('read additional config: %s', config) return (error, config)
[ "returns", "(", "error", "config", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L79-L103
[ "def", "loadAdditionalConfig", "(", "config_path", ")", ":", "error", "=", "None", "config", "=", "{", "}", "if", "not", "config_path", ":", "return", "(", "error", ",", "config", ")", "if", "os", ".", "path", ".", "isfile", "(", "config_path", ")", ":", "try", ":", "config", "=", "ordered_json", ".", "load", "(", "config_path", ")", "except", "Exception", "as", "e", ":", "error", "=", "\"Invalid syntax in file %s: %s\"", "%", "(", "config_path", ",", "e", ")", "else", ":", "# try to interpret the argument as literal JSON", "try", ":", "config", "=", "ordered_json", ".", "loads", "(", "config_path", ")", "except", "Exception", "as", "e", ":", "# if this fails too, guess whether it was intended to be JSON or", "# not, and display an appropriate error message", "if", "'{'", "in", "config_path", "or", "'}'", "in", "config_path", ":", "error", "=", "\"Invalid syntax in literal JSON: %s\"", "%", "e", "else", ":", "error", "=", "\"File \\\"%s\\\" does not exist\"", "%", "config_path", "logger", ".", "debug", "(", "'read additional config: %s'", ",", "config", ")", "return", "(", "error", ",", "config", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
getDerivedTarget
Get the specified target description, optionally ensuring that it (and all dependencies) are installed in targets_path. Returns (DerivedTarget, errors), or (None, errors) if the leaf target could not be found/installed.
yotta/lib/target.py
def getDerivedTarget( target_name_and_version, targets_path, application_dir = None, install_missing = True, update_installed = False, additional_config = None, shrinkwrap = None ): # access, , get components, internal from yotta.lib import access from yotta.lib import access_common ''' Get the specified target description, optionally ensuring that it (and all dependencies) are installed in targets_path. Returns (DerivedTarget, errors), or (None, errors) if the leaf target could not be found/installed. ''' logger.debug('satisfy target: %s' % target_name_and_version); if ',' in target_name_and_version: name, version_req = target_name_and_version.split(',') else: name = target_name_and_version version_req = '*' # shrinkwrap is the raw json form, not mapping form here, so rearrange it # before indexing: if shrinkwrap is not None: shrinkwrap_version_req = { x['name']: x['version'] for x in shrinkwrap.get('targets', []) }.get(name, None) else: shrinkwrap_version_req = None if shrinkwrap_version_req is not None: logger.debug( 'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name ) dspec = pack.DependencySpec( name, version_req, shrinkwrap_version_req = shrinkwrap_version_req ) leaf_target = None previous_name = dspec.name search_dirs = [targets_path] target_hierarchy = [] errors = [] while True: t = None try: if install_missing: t = access.satisfyVersion( name = dspec.name, version_required = dspec.versionReq(), available = target_hierarchy, search_paths = search_dirs, working_directory = targets_path, update_installed = ('Update' if update_installed else None), type = 'target', inherit_shrinkwrap = shrinkwrap ) else: t = access.satisfyVersionFromSearchPaths( name = dspec.name, version_required = dspec.versionReq(), search_paths = search_dirs, type = 'target', inherit_shrinkwrap = shrinkwrap ) except access_common.AccessException as e: errors.append(e) if not t: if install_missing: logger.error( 'could not install target %s for %s' % (dspec, previous_name) ) break else: target_hierarchy.append(t) previous_name = dspec.name assert(isinstance(t, Target)) dspec = t.baseTargetSpec() #pylint: disable=no-member if not leaf_target: leaf_target = t if dspec is None: break if leaf_target is None: return (None, errors) # if we have a valid target, try to load the app-specific config data (if # any): app_config = {} if application_dir is not None: app_config_fname = os.path.join(application_dir, App_Config_File) if os.path.exists(app_config_fname): try: app_config = ordered_json.load(app_config_fname) except Exception as e: errors.append(Exception("Invalid application config.json: %s" % (e))) return (DerivedTarget(leaf_target, target_hierarchy[1:], app_config, additional_config), errors)
def getDerivedTarget( target_name_and_version, targets_path, application_dir = None, install_missing = True, update_installed = False, additional_config = None, shrinkwrap = None ): # access, , get components, internal from yotta.lib import access from yotta.lib import access_common ''' Get the specified target description, optionally ensuring that it (and all dependencies) are installed in targets_path. Returns (DerivedTarget, errors), or (None, errors) if the leaf target could not be found/installed. ''' logger.debug('satisfy target: %s' % target_name_and_version); if ',' in target_name_and_version: name, version_req = target_name_and_version.split(',') else: name = target_name_and_version version_req = '*' # shrinkwrap is the raw json form, not mapping form here, so rearrange it # before indexing: if shrinkwrap is not None: shrinkwrap_version_req = { x['name']: x['version'] for x in shrinkwrap.get('targets', []) }.get(name, None) else: shrinkwrap_version_req = None if shrinkwrap_version_req is not None: logger.debug( 'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name ) dspec = pack.DependencySpec( name, version_req, shrinkwrap_version_req = shrinkwrap_version_req ) leaf_target = None previous_name = dspec.name search_dirs = [targets_path] target_hierarchy = [] errors = [] while True: t = None try: if install_missing: t = access.satisfyVersion( name = dspec.name, version_required = dspec.versionReq(), available = target_hierarchy, search_paths = search_dirs, working_directory = targets_path, update_installed = ('Update' if update_installed else None), type = 'target', inherit_shrinkwrap = shrinkwrap ) else: t = access.satisfyVersionFromSearchPaths( name = dspec.name, version_required = dspec.versionReq(), search_paths = search_dirs, type = 'target', inherit_shrinkwrap = shrinkwrap ) except access_common.AccessException as e: errors.append(e) if not t: if install_missing: logger.error( 'could not install target %s for %s' % (dspec, previous_name) ) break else: target_hierarchy.append(t) previous_name = dspec.name assert(isinstance(t, Target)) dspec = t.baseTargetSpec() #pylint: disable=no-member if not leaf_target: leaf_target = t if dspec is None: break if leaf_target is None: return (None, errors) # if we have a valid target, try to load the app-specific config data (if # any): app_config = {} if application_dir is not None: app_config_fname = os.path.join(application_dir, App_Config_File) if os.path.exists(app_config_fname): try: app_config = ordered_json.load(app_config_fname) except Exception as e: errors.append(Exception("Invalid application config.json: %s" % (e))) return (DerivedTarget(leaf_target, target_hierarchy[1:], app_config, additional_config), errors)
[ "Get", "the", "specified", "target", "description", "optionally", "ensuring", "that", "it", "(", "and", "all", "dependencies", ")", "are", "installed", "in", "targets_path", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L105-L206
[ "def", "getDerivedTarget", "(", "target_name_and_version", ",", "targets_path", ",", "application_dir", "=", "None", ",", "install_missing", "=", "True", ",", "update_installed", "=", "False", ",", "additional_config", "=", "None", ",", "shrinkwrap", "=", "None", ")", ":", "# access, , get components, internal", "from", "yotta", ".", "lib", "import", "access", "from", "yotta", ".", "lib", "import", "access_common", "logger", ".", "debug", "(", "'satisfy target: %s'", "%", "target_name_and_version", ")", "if", "','", "in", "target_name_and_version", ":", "name", ",", "version_req", "=", "target_name_and_version", ".", "split", "(", "','", ")", "else", ":", "name", "=", "target_name_and_version", "version_req", "=", "'*'", "# shrinkwrap is the raw json form, not mapping form here, so rearrange it", "# before indexing:", "if", "shrinkwrap", "is", "not", "None", ":", "shrinkwrap_version_req", "=", "{", "x", "[", "'name'", "]", ":", "x", "[", "'version'", "]", "for", "x", "in", "shrinkwrap", ".", "get", "(", "'targets'", ",", "[", "]", ")", "}", ".", "get", "(", "name", ",", "None", ")", "else", ":", "shrinkwrap_version_req", "=", "None", "if", "shrinkwrap_version_req", "is", "not", "None", ":", "logger", ".", "debug", "(", "'respecting shrinkwrap version %s for %s'", ",", "shrinkwrap_version_req", ",", "name", ")", "dspec", "=", "pack", ".", "DependencySpec", "(", "name", ",", "version_req", ",", "shrinkwrap_version_req", "=", "shrinkwrap_version_req", ")", "leaf_target", "=", "None", "previous_name", "=", "dspec", ".", "name", "search_dirs", "=", "[", "targets_path", "]", "target_hierarchy", "=", "[", "]", "errors", "=", "[", "]", "while", "True", ":", "t", "=", "None", "try", ":", "if", "install_missing", ":", "t", "=", "access", ".", "satisfyVersion", "(", "name", "=", "dspec", ".", "name", ",", "version_required", "=", "dspec", ".", "versionReq", "(", ")", ",", "available", "=", "target_hierarchy", ",", "search_paths", "=", "search_dirs", ",", "working_directory", "=", "targets_path", ",", "update_installed", "=", "(", "'Update'", "if", "update_installed", "else", "None", ")", ",", "type", "=", "'target'", ",", "inherit_shrinkwrap", "=", "shrinkwrap", ")", "else", ":", "t", "=", "access", ".", "satisfyVersionFromSearchPaths", "(", "name", "=", "dspec", ".", "name", ",", "version_required", "=", "dspec", ".", "versionReq", "(", ")", ",", "search_paths", "=", "search_dirs", ",", "type", "=", "'target'", ",", "inherit_shrinkwrap", "=", "shrinkwrap", ")", "except", "access_common", ".", "AccessException", "as", "e", ":", "errors", ".", "append", "(", "e", ")", "if", "not", "t", ":", "if", "install_missing", ":", "logger", ".", "error", "(", "'could not install target %s for %s'", "%", "(", "dspec", ",", "previous_name", ")", ")", "break", "else", ":", "target_hierarchy", ".", "append", "(", "t", ")", "previous_name", "=", "dspec", ".", "name", "assert", "(", "isinstance", "(", "t", ",", "Target", ")", ")", "dspec", "=", "t", ".", "baseTargetSpec", "(", ")", "#pylint: disable=no-member", "if", "not", "leaf_target", ":", "leaf_target", "=", "t", "if", "dspec", "is", "None", ":", "break", "if", "leaf_target", "is", "None", ":", "return", "(", "None", ",", "errors", ")", "# if we have a valid target, try to load the app-specific config data (if", "# any):", "app_config", "=", "{", "}", "if", "application_dir", "is", "not", "None", ":", "app_config_fname", "=", "os", ".", "path", ".", "join", "(", "application_dir", ",", "App_Config_File", ")", "if", "os", ".", "path", ".", "exists", "(", "app_config_fname", ")", ":", "try", ":", "app_config", "=", "ordered_json", ".", "load", "(", "app_config_fname", ")", "except", "Exception", "as", "e", ":", "errors", ".", "append", "(", "Exception", "(", "\"Invalid application config.json: %s\"", "%", "(", "e", ")", ")", ")", "return", "(", "DerivedTarget", "(", "leaf_target", ",", "target_hierarchy", "[", "1", ":", "]", ",", "app_config", ",", "additional_config", ")", ",", "errors", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
Target.baseTargetSpec
returns pack.DependencySpec for the base target of this target (or None if this target does not inherit from another target.
yotta/lib/target.py
def baseTargetSpec(self): ''' returns pack.DependencySpec for the base target of this target (or None if this target does not inherit from another target. ''' inherits = self.description.get('inherits', {}) if len(inherits) == 1: name, version_req = list(inherits.items())[0] shrinkwrap_version_req = self.getShrinkwrapMapping('targets').get(name, None) if shrinkwrap_version_req is not None: logger.debug( 'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_req, shrinkwrap_version_req = shrinkwrap_version_req ) elif len(inherits) > 1: logger.error('target %s specifies multiple base targets, but only one is allowed', self.getName()) return None
def baseTargetSpec(self): ''' returns pack.DependencySpec for the base target of this target (or None if this target does not inherit from another target. ''' inherits = self.description.get('inherits', {}) if len(inherits) == 1: name, version_req = list(inherits.items())[0] shrinkwrap_version_req = self.getShrinkwrapMapping('targets').get(name, None) if shrinkwrap_version_req is not None: logger.debug( 'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_req, shrinkwrap_version_req = shrinkwrap_version_req ) elif len(inherits) > 1: logger.error('target %s specifies multiple base targets, but only one is allowed', self.getName()) return None
[ "returns", "pack", ".", "DependencySpec", "for", "the", "base", "target", "of", "this", "target", "(", "or", "None", "if", "this", "target", "does", "not", "inherit", "from", "another", "target", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L235-L254
[ "def", "baseTargetSpec", "(", "self", ")", ":", "inherits", "=", "self", ".", "description", ".", "get", "(", "'inherits'", ",", "{", "}", ")", "if", "len", "(", "inherits", ")", "==", "1", ":", "name", ",", "version_req", "=", "list", "(", "inherits", ".", "items", "(", ")", ")", "[", "0", "]", "shrinkwrap_version_req", "=", "self", ".", "getShrinkwrapMapping", "(", "'targets'", ")", ".", "get", "(", "name", ",", "None", ")", "if", "shrinkwrap_version_req", "is", "not", "None", ":", "logger", ".", "debug", "(", "'respecting shrinkwrap version %s for %s'", ",", "shrinkwrap_version_req", ",", "name", ")", "return", "pack", ".", "DependencySpec", "(", "name", ",", "version_req", ",", "shrinkwrap_version_req", "=", "shrinkwrap_version_req", ")", "elif", "len", "(", "inherits", ")", ">", "1", ":", "logger", ".", "error", "(", "'target %s specifies multiple base targets, but only one is allowed'", ",", "self", ".", "getName", "(", ")", ")", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.getScript
return the specified script if one exists (possibly inherited from a base target)
yotta/lib/target.py
def getScript(self, scriptname): ''' return the specified script if one exists (possibly inherited from a base target) ''' for t in self.hierarchy: s = t.getScript(scriptname) if s: return s return None
def getScript(self, scriptname): ''' return the specified script if one exists (possibly inherited from a base target) ''' for t in self.hierarchy: s = t.getScript(scriptname) if s: return s return None
[ "return", "the", "specified", "script", "if", "one", "exists", "(", "possibly", "inherited", "from", "a", "base", "target", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L301-L309
[ "def", "getScript", "(", "self", ",", "scriptname", ")", ":", "for", "t", "in", "self", ".", "hierarchy", ":", "s", "=", "t", ".", "getScript", "(", "scriptname", ")", "if", "s", ":", "return", "s", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget._loadConfig
load the configuration information from the target hierarchy
yotta/lib/target.py
def _loadConfig(self): ''' load the configuration information from the target hierarchy ''' config_dicts = [self.additional_config, self.app_config] + [t.getConfig() for t in self.hierarchy] # create an identical set of dictionaries, but with the names of the # sources in place of the values. When these are merged they will show # where each merged property came from: config_blame = [ _mirrorStructure(self.additional_config, 'command-line config'), _mirrorStructure(self.app_config, 'application\'s config.json'), ] + [ _mirrorStructure(t.getConfig(), t.getName()) for t in self.hierarchy ] self.config = _mergeDictionaries(*config_dicts) self.config_blame = _mergeDictionaries(*config_blame)
def _loadConfig(self): ''' load the configuration information from the target hierarchy ''' config_dicts = [self.additional_config, self.app_config] + [t.getConfig() for t in self.hierarchy] # create an identical set of dictionaries, but with the names of the # sources in place of the values. When these are merged they will show # where each merged property came from: config_blame = [ _mirrorStructure(self.additional_config, 'command-line config'), _mirrorStructure(self.app_config, 'application\'s config.json'), ] + [ _mirrorStructure(t.getConfig(), t.getName()) for t in self.hierarchy ] self.config = _mergeDictionaries(*config_dicts) self.config_blame = _mergeDictionaries(*config_blame)
[ "load", "the", "configuration", "information", "from", "the", "target", "hierarchy" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L311-L325
[ "def", "_loadConfig", "(", "self", ")", ":", "config_dicts", "=", "[", "self", ".", "additional_config", ",", "self", ".", "app_config", "]", "+", "[", "t", ".", "getConfig", "(", ")", "for", "t", "in", "self", ".", "hierarchy", "]", "# create an identical set of dictionaries, but with the names of the", "# sources in place of the values. When these are merged they will show", "# where each merged property came from:", "config_blame", "=", "[", "_mirrorStructure", "(", "self", ".", "additional_config", ",", "'command-line config'", ")", ",", "_mirrorStructure", "(", "self", ".", "app_config", ",", "'application\\'s config.json'", ")", ",", "]", "+", "[", "_mirrorStructure", "(", "t", ".", "getConfig", "(", ")", ",", "t", ".", "getName", "(", ")", ")", "for", "t", "in", "self", ".", "hierarchy", "]", "self", ".", "config", "=", "_mergeDictionaries", "(", "*", "config_dicts", ")", "self", ".", "config_blame", "=", "_mergeDictionaries", "(", "*", "config_blame", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.getToolchainFiles
return a list of toolchain file paths in override order (starting at the bottom/leaf of the hierarchy and ending at the base). The list is returned in the order they should be included (most-derived last).
yotta/lib/target.py
def getToolchainFiles(self): ''' return a list of toolchain file paths in override order (starting at the bottom/leaf of the hierarchy and ending at the base). The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(x.path, x.description['toolchain']) for x in self.hierarchy if 'toolchain' in x.description ])
def getToolchainFiles(self): ''' return a list of toolchain file paths in override order (starting at the bottom/leaf of the hierarchy and ending at the base). The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(x.path, x.description['toolchain']) for x in self.hierarchy if 'toolchain' in x.description ])
[ "return", "a", "list", "of", "toolchain", "file", "paths", "in", "override", "order", "(", "starting", "at", "the", "bottom", "/", "leaf", "of", "the", "hierarchy", "and", "ending", "at", "the", "base", ")", ".", "The", "list", "is", "returned", "in", "the", "order", "they", "should", "be", "included", "(", "most", "-", "derived", "last", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L367-L375
[ "def", "getToolchainFiles", "(", "self", ")", ":", "return", "reversed", "(", "[", "os", ".", "path", ".", "join", "(", "x", ".", "path", ",", "x", ".", "description", "[", "'toolchain'", "]", ")", "for", "x", "in", "self", ".", "hierarchy", "if", "'toolchain'", "in", "x", ".", "description", "]", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.getAdditionalIncludes
Return the list of cmake files which are to be included by yotta in every module built. The list is returned in the order they should be included (most-derived last).
yotta/lib/target.py
def getAdditionalIncludes(self): ''' Return the list of cmake files which are to be included by yotta in every module built. The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(t.path, include_file) for t in self.hierarchy for include_file in t.description.get('cmakeIncludes', []) ])
def getAdditionalIncludes(self): ''' Return the list of cmake files which are to be included by yotta in every module built. The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(t.path, include_file) for t in self.hierarchy for include_file in t.description.get('cmakeIncludes', []) ])
[ "Return", "the", "list", "of", "cmake", "files", "which", "are", "to", "be", "included", "by", "yotta", "in", "every", "module", "built", ".", "The", "list", "is", "returned", "in", "the", "order", "they", "should", "be", "included", "(", "most", "-", "derived", "last", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L377-L386
[ "def", "getAdditionalIncludes", "(", "self", ")", ":", "return", "reversed", "(", "[", "os", ".", "path", ".", "join", "(", "t", ".", "path", ",", "include_file", ")", "for", "t", "in", "self", ".", "hierarchy", "for", "include_file", "in", "t", ".", "description", ".", "get", "(", "'cmakeIncludes'", ",", "[", "]", ")", "]", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.inheritsFrom
Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false.
yotta/lib/target.py
def inheritsFrom(self, target_name): ''' Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false. ''' for t in self.hierarchy: if t and t.getName() == target_name or target_name in t.description.get('inherits', {}): return True return False
def inheritsFrom(self, target_name): ''' Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false. ''' for t in self.hierarchy: if t and t.getName() == target_name or target_name in t.description.get('inherits', {}): return True return False
[ "Return", "true", "if", "this", "target", "inherits", "from", "the", "named", "target", "(", "directly", "or", "indirectly", ".", "Also", "returns", "true", "if", "this", "target", "is", "the", "named", "target", ".", "Otherwise", "return", "false", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L388-L396
[ "def", "inheritsFrom", "(", "self", ",", "target_name", ")", ":", "for", "t", "in", "self", ".", "hierarchy", ":", "if", "t", "and", "t", ".", "getName", "(", ")", "==", "target_name", "or", "target_name", "in", "t", ".", "description", ".", "get", "(", "'inherits'", ",", "{", "}", ")", ":", "return", "True", "return", "False" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.exec_helper
Execute the given command, returning an error message if an error occured or None if the command was succesful.
yotta/lib/target.py
def exec_helper(self, cmd, builddir): ''' Execute the given command, returning an error message if an error occured or None if the command was succesful.''' try: child = subprocess.Popen(cmd, cwd=builddir) child.wait() except OSError as e: if e.errno == errno.ENOENT: if cmd[0] == 'cmake': return 'CMake is not installed, please follow the installation instructions at http://docs.yottabuild.org/#installing' else: return '%s is not installed' % (cmd[0]) else: return 'command %s failed' % (cmd) if child.returncode: return 'command %s failed' % (cmd)
def exec_helper(self, cmd, builddir): ''' Execute the given command, returning an error message if an error occured or None if the command was succesful.''' try: child = subprocess.Popen(cmd, cwd=builddir) child.wait() except OSError as e: if e.errno == errno.ENOENT: if cmd[0] == 'cmake': return 'CMake is not installed, please follow the installation instructions at http://docs.yottabuild.org/#installing' else: return '%s is not installed' % (cmd[0]) else: return 'command %s failed' % (cmd) if child.returncode: return 'command %s failed' % (cmd)
[ "Execute", "the", "given", "command", "returning", "an", "error", "message", "if", "an", "error", "occured", "or", "None", "if", "the", "command", "was", "succesful", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L460-L475
[ "def", "exec_helper", "(", "self", ",", "cmd", ",", "builddir", ")", ":", "try", ":", "child", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "cwd", "=", "builddir", ")", "child", ".", "wait", "(", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "if", "cmd", "[", "0", "]", "==", "'cmake'", ":", "return", "'CMake is not installed, please follow the installation instructions at http://docs.yottabuild.org/#installing'", "else", ":", "return", "'%s is not installed'", "%", "(", "cmd", "[", "0", "]", ")", "else", ":", "return", "'command %s failed'", "%", "(", "cmd", ")", "if", "child", ".", "returncode", ":", "return", "'command %s failed'", "%", "(", "cmd", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.build
Execute the commands necessary to build this component, and all of its dependencies.
yotta/lib/target.py
def build(self, builddir, component, args, release_build=False, build_args=None, targets=None, release_no_debug_info_build=False): ''' Execute the commands necessary to build this component, and all of its dependencies. ''' if build_args is None: build_args = [] if targets is None: targets = [] # in the future this may be specified in the target description, but # for now we only support cmake, so everything is simple: if release_no_debug_info_build: build_type = 'Release' elif release_build: build_type = 'RelWithDebInfo' else: build_type = 'Debug' cmd = ['cmake', '-D', 'CMAKE_BUILD_TYPE=%s' % build_type, '-G', args.cmake_generator, '.'] res = self.exec_helper(cmd, builddir) if res is not None: return res # work-around various yotta-specific issues with the generated # Ninja/project files: from yotta.lib import cmake_fixups cmake_fixups.applyFixupsForFenerator(args.cmake_generator, builddir, component) build_command = self.overrideBuildCommand(args.cmake_generator, targets=targets) if build_command: cmd = build_command + build_args else: cmd = ['cmake', '--build', builddir] if len(targets): # !!! FIXME: support multiple targets with the default CMake # build command cmd += ['--target', targets[0]] cmd += build_args res = self.exec_helper(cmd, builddir) if res is not None: return res hint = self.hintForCMakeGenerator(args.cmake_generator, component) if hint: logger.info(hint)
def build(self, builddir, component, args, release_build=False, build_args=None, targets=None, release_no_debug_info_build=False): ''' Execute the commands necessary to build this component, and all of its dependencies. ''' if build_args is None: build_args = [] if targets is None: targets = [] # in the future this may be specified in the target description, but # for now we only support cmake, so everything is simple: if release_no_debug_info_build: build_type = 'Release' elif release_build: build_type = 'RelWithDebInfo' else: build_type = 'Debug' cmd = ['cmake', '-D', 'CMAKE_BUILD_TYPE=%s' % build_type, '-G', args.cmake_generator, '.'] res = self.exec_helper(cmd, builddir) if res is not None: return res # work-around various yotta-specific issues with the generated # Ninja/project files: from yotta.lib import cmake_fixups cmake_fixups.applyFixupsForFenerator(args.cmake_generator, builddir, component) build_command = self.overrideBuildCommand(args.cmake_generator, targets=targets) if build_command: cmd = build_command + build_args else: cmd = ['cmake', '--build', builddir] if len(targets): # !!! FIXME: support multiple targets with the default CMake # build command cmd += ['--target', targets[0]] cmd += build_args res = self.exec_helper(cmd, builddir) if res is not None: return res hint = self.hintForCMakeGenerator(args.cmake_generator, component) if hint: logger.info(hint)
[ "Execute", "the", "commands", "necessary", "to", "build", "this", "component", "and", "all", "of", "its", "dependencies", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L478-L519
[ "def", "build", "(", "self", ",", "builddir", ",", "component", ",", "args", ",", "release_build", "=", "False", ",", "build_args", "=", "None", ",", "targets", "=", "None", ",", "release_no_debug_info_build", "=", "False", ")", ":", "if", "build_args", "is", "None", ":", "build_args", "=", "[", "]", "if", "targets", "is", "None", ":", "targets", "=", "[", "]", "# in the future this may be specified in the target description, but", "# for now we only support cmake, so everything is simple:", "if", "release_no_debug_info_build", ":", "build_type", "=", "'Release'", "elif", "release_build", ":", "build_type", "=", "'RelWithDebInfo'", "else", ":", "build_type", "=", "'Debug'", "cmd", "=", "[", "'cmake'", ",", "'-D'", ",", "'CMAKE_BUILD_TYPE=%s'", "%", "build_type", ",", "'-G'", ",", "args", ".", "cmake_generator", ",", "'.'", "]", "res", "=", "self", ".", "exec_helper", "(", "cmd", ",", "builddir", ")", "if", "res", "is", "not", "None", ":", "return", "res", "# work-around various yotta-specific issues with the generated", "# Ninja/project files:", "from", "yotta", ".", "lib", "import", "cmake_fixups", "cmake_fixups", ".", "applyFixupsForFenerator", "(", "args", ".", "cmake_generator", ",", "builddir", ",", "component", ")", "build_command", "=", "self", ".", "overrideBuildCommand", "(", "args", ".", "cmake_generator", ",", "targets", "=", "targets", ")", "if", "build_command", ":", "cmd", "=", "build_command", "+", "build_args", "else", ":", "cmd", "=", "[", "'cmake'", ",", "'--build'", ",", "builddir", "]", "if", "len", "(", "targets", ")", ":", "# !!! FIXME: support multiple targets with the default CMake", "# build command", "cmd", "+=", "[", "'--target'", ",", "targets", "[", "0", "]", "]", "cmd", "+=", "build_args", "res", "=", "self", ".", "exec_helper", "(", "cmd", ",", "builddir", ")", "if", "res", "is", "not", "None", ":", "return", "res", "hint", "=", "self", ".", "hintForCMakeGenerator", "(", "args", ".", "cmake_generator", ",", "component", ")", "if", "hint", ":", "logger", ".", "info", "(", "hint", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.findProgram
Return the builddir-relative path of program, if only a partial path is specified. Returns None and logs an error message if the program is ambiguous or not found
yotta/lib/target.py
def findProgram(self, builddir, program): ''' Return the builddir-relative path of program, if only a partial path is specified. Returns None and logs an error message if the program is ambiguous or not found ''' # if this is an exact match, do no further checking: if os.path.isfile(os.path.join(builddir, program)): logging.info('found %s' % program) return program exact_matches = [] insensitive_matches = [] approx_matches = [] for path, dirs, files in os.walk(builddir): if program in files: exact_matches.append(os.path.relpath(os.path.join(path, program), builddir)) continue files_lower = [f.lower() for f in files] if program.lower() in files_lower: insensitive_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(program.lower())]), builddir ) ) continue # !!! TODO: in the future add approximate string matching (typos, # etc.), for now we just test stripping any paths off program, and # looking for substring matches: pg_basen_lower_noext = os.path.splitext(os.path.basename(program).lower())[0] for f in files_lower: if pg_basen_lower_noext in f: approx_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(f)]), builddir ) ) if len(exact_matches) == 1: logging.info('found %s at %s', program, exact_matches[0]) return exact_matches[0] elif len(exact_matches) > 1: logging.error( '%s matches multiple executables, please use a full path (one of %s)' % ( program, ', or '.join(['"'+os.path.join(m, program)+'"' for m in exact_matches]) ) ) return None # if we have matches with and without a file extension, prefer the # no-file extension version, and discard the others (so we avoid # picking up post-processed files): reduced_approx_matches = [] for m in approx_matches: root = os.path.splitext(m)[0] if (m == root) or (root not in approx_matches): reduced_approx_matches.append(m) approx_matches = reduced_approx_matches for matches in (insensitive_matches, approx_matches): if len(matches) == 1: logging.info('found %s at %s' % ( program, matches[0] )) return matches[0] elif len(matches) > 1: logging.error( '%s is similar to several executables found. Please use an exact name:\n%s' % ( program, '\n'.join(matches) ) ) return None logging.error('could not find program "%s" to debug' % program) return None
def findProgram(self, builddir, program): ''' Return the builddir-relative path of program, if only a partial path is specified. Returns None and logs an error message if the program is ambiguous or not found ''' # if this is an exact match, do no further checking: if os.path.isfile(os.path.join(builddir, program)): logging.info('found %s' % program) return program exact_matches = [] insensitive_matches = [] approx_matches = [] for path, dirs, files in os.walk(builddir): if program in files: exact_matches.append(os.path.relpath(os.path.join(path, program), builddir)) continue files_lower = [f.lower() for f in files] if program.lower() in files_lower: insensitive_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(program.lower())]), builddir ) ) continue # !!! TODO: in the future add approximate string matching (typos, # etc.), for now we just test stripping any paths off program, and # looking for substring matches: pg_basen_lower_noext = os.path.splitext(os.path.basename(program).lower())[0] for f in files_lower: if pg_basen_lower_noext in f: approx_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(f)]), builddir ) ) if len(exact_matches) == 1: logging.info('found %s at %s', program, exact_matches[0]) return exact_matches[0] elif len(exact_matches) > 1: logging.error( '%s matches multiple executables, please use a full path (one of %s)' % ( program, ', or '.join(['"'+os.path.join(m, program)+'"' for m in exact_matches]) ) ) return None # if we have matches with and without a file extension, prefer the # no-file extension version, and discard the others (so we avoid # picking up post-processed files): reduced_approx_matches = [] for m in approx_matches: root = os.path.splitext(m)[0] if (m == root) or (root not in approx_matches): reduced_approx_matches.append(m) approx_matches = reduced_approx_matches for matches in (insensitive_matches, approx_matches): if len(matches) == 1: logging.info('found %s at %s' % ( program, matches[0] )) return matches[0] elif len(matches) > 1: logging.error( '%s is similar to several executables found. Please use an exact name:\n%s' % ( program, '\n'.join(matches) ) ) return None logging.error('could not find program "%s" to debug' % program) return None
[ "Return", "the", "builddir", "-", "relative", "path", "of", "program", "if", "only", "a", "partial", "path", "is", "specified", ".", "Returns", "None", "and", "logs", "an", "error", "message", "if", "the", "program", "is", "ambiguous", "or", "not", "found" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L521-L595
[ "def", "findProgram", "(", "self", ",", "builddir", ",", "program", ")", ":", "# if this is an exact match, do no further checking:", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "builddir", ",", "program", ")", ")", ":", "logging", ".", "info", "(", "'found %s'", "%", "program", ")", "return", "program", "exact_matches", "=", "[", "]", "insensitive_matches", "=", "[", "]", "approx_matches", "=", "[", "]", "for", "path", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "builddir", ")", ":", "if", "program", "in", "files", ":", "exact_matches", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "program", ")", ",", "builddir", ")", ")", "continue", "files_lower", "=", "[", "f", ".", "lower", "(", ")", "for", "f", "in", "files", "]", "if", "program", ".", "lower", "(", ")", "in", "files_lower", ":", "insensitive_matches", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "files", "[", "files_lower", ".", "index", "(", "program", ".", "lower", "(", ")", ")", "]", ")", ",", "builddir", ")", ")", "continue", "# !!! TODO: in the future add approximate string matching (typos,", "# etc.), for now we just test stripping any paths off program, and", "# looking for substring matches:", "pg_basen_lower_noext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "program", ")", ".", "lower", "(", ")", ")", "[", "0", "]", "for", "f", "in", "files_lower", ":", "if", "pg_basen_lower_noext", "in", "f", ":", "approx_matches", ".", "append", "(", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "path", ",", "files", "[", "files_lower", ".", "index", "(", "f", ")", "]", ")", ",", "builddir", ")", ")", "if", "len", "(", "exact_matches", ")", "==", "1", ":", "logging", ".", "info", "(", "'found %s at %s'", ",", "program", ",", "exact_matches", "[", "0", "]", ")", "return", "exact_matches", "[", "0", "]", "elif", "len", "(", "exact_matches", ")", ">", "1", ":", "logging", ".", "error", "(", "'%s matches multiple executables, please use a full path (one of %s)'", "%", "(", "program", ",", "', or '", ".", "join", "(", "[", "'\"'", "+", "os", ".", "path", ".", "join", "(", "m", ",", "program", ")", "+", "'\"'", "for", "m", "in", "exact_matches", "]", ")", ")", ")", "return", "None", "# if we have matches with and without a file extension, prefer the", "# no-file extension version, and discard the others (so we avoid", "# picking up post-processed files):", "reduced_approx_matches", "=", "[", "]", "for", "m", "in", "approx_matches", ":", "root", "=", "os", ".", "path", ".", "splitext", "(", "m", ")", "[", "0", "]", "if", "(", "m", "==", "root", ")", "or", "(", "root", "not", "in", "approx_matches", ")", ":", "reduced_approx_matches", ".", "append", "(", "m", ")", "approx_matches", "=", "reduced_approx_matches", "for", "matches", "in", "(", "insensitive_matches", ",", "approx_matches", ")", ":", "if", "len", "(", "matches", ")", "==", "1", ":", "logging", ".", "info", "(", "'found %s at %s'", "%", "(", "program", ",", "matches", "[", "0", "]", ")", ")", "return", "matches", "[", "0", "]", "elif", "len", "(", "matches", ")", ">", "1", ":", "logging", ".", "error", "(", "'%s is similar to several executables found. Please use an exact name:\\n%s'", "%", "(", "program", ",", "'\\n'", ".", "join", "(", "matches", ")", ")", ")", "return", "None", "logging", ".", "error", "(", "'could not find program \"%s\" to debug'", "%", "program", ")", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.start
Launch the specified program. Uses the `start` script if specified by the target, attempts to run it natively if that script is not defined.
yotta/lib/target.py
def start(self, builddir, program, forward_args): ''' Launch the specified program. Uses the `start` script if specified by the target, attempts to run it natively if that script is not defined. ''' child = None try: prog_path = self.findProgram(builddir, program) if prog_path is None: return start_env, start_vars = self.buildProgEnvAndVars(prog_path, builddir) if self.getScript('start'): cmd = [ os.path.expandvars(string.Template(x).safe_substitute(**start_vars)) for x in self.getScript('start') ] + forward_args else: cmd = shlex.split('./' + prog_path) + forward_args logger.debug('starting program: %s', cmd) child = subprocess.Popen( cmd, cwd = builddir, env = start_env ) child.wait() if child.returncode: return "process exited with status %s" % child.returncode child = None except OSError as e: import errno if e.errno == errno.ENOEXEC: return ("the program %s cannot be run (perhaps your target "+ "needs to define a 'start' script to start it on its " "intended execution target?)") % prog_path finally: if child is not None: _tryTerminate(child)
def start(self, builddir, program, forward_args): ''' Launch the specified program. Uses the `start` script if specified by the target, attempts to run it natively if that script is not defined. ''' child = None try: prog_path = self.findProgram(builddir, program) if prog_path is None: return start_env, start_vars = self.buildProgEnvAndVars(prog_path, builddir) if self.getScript('start'): cmd = [ os.path.expandvars(string.Template(x).safe_substitute(**start_vars)) for x in self.getScript('start') ] + forward_args else: cmd = shlex.split('./' + prog_path) + forward_args logger.debug('starting program: %s', cmd) child = subprocess.Popen( cmd, cwd = builddir, env = start_env ) child.wait() if child.returncode: return "process exited with status %s" % child.returncode child = None except OSError as e: import errno if e.errno == errno.ENOEXEC: return ("the program %s cannot be run (perhaps your target "+ "needs to define a 'start' script to start it on its " "intended execution target?)") % prog_path finally: if child is not None: _tryTerminate(child)
[ "Launch", "the", "specified", "program", ".", "Uses", "the", "start", "script", "if", "specified", "by", "the", "target", "attempts", "to", "run", "it", "natively", "if", "that", "script", "is", "not", "defined", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L609-L645
[ "def", "start", "(", "self", ",", "builddir", ",", "program", ",", "forward_args", ")", ":", "child", "=", "None", "try", ":", "prog_path", "=", "self", ".", "findProgram", "(", "builddir", ",", "program", ")", "if", "prog_path", "is", "None", ":", "return", "start_env", ",", "start_vars", "=", "self", ".", "buildProgEnvAndVars", "(", "prog_path", ",", "builddir", ")", "if", "self", ".", "getScript", "(", "'start'", ")", ":", "cmd", "=", "[", "os", ".", "path", ".", "expandvars", "(", "string", ".", "Template", "(", "x", ")", ".", "safe_substitute", "(", "*", "*", "start_vars", ")", ")", "for", "x", "in", "self", ".", "getScript", "(", "'start'", ")", "]", "+", "forward_args", "else", ":", "cmd", "=", "shlex", ".", "split", "(", "'./'", "+", "prog_path", ")", "+", "forward_args", "logger", ".", "debug", "(", "'starting program: %s'", ",", "cmd", ")", "child", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "cwd", "=", "builddir", ",", "env", "=", "start_env", ")", "child", ".", "wait", "(", ")", "if", "child", ".", "returncode", ":", "return", "\"process exited with status %s\"", "%", "child", ".", "returncode", "child", "=", "None", "except", "OSError", "as", "e", ":", "import", "errno", "if", "e", ".", "errno", "==", "errno", ".", "ENOEXEC", ":", "return", "(", "\"the program %s cannot be run (perhaps your target \"", "+", "\"needs to define a 'start' script to start it on its \"", "\"intended execution target?)\"", ")", "%", "prog_path", "finally", ":", "if", "child", "is", "not", "None", ":", "_tryTerminate", "(", "child", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
DerivedTarget.debug
Launch a debugger for the specified program. Uses the `debug` script if specified by the target, falls back to the `debug` and `debugServer` commands if not. `program` is inserted into the $program variable in commands.
yotta/lib/target.py
def debug(self, builddir, program): ''' Launch a debugger for the specified program. Uses the `debug` script if specified by the target, falls back to the `debug` and `debugServer` commands if not. `program` is inserted into the $program variable in commands. ''' try: signal.signal(signal.SIGINT, _ignoreSignal); if self.getScript('debug') is not None: return self._debugWithScript(builddir, program) elif 'debug' in self.description: logger.warning( 'target %s provides deprecated debug property. It should '+ 'provide script.debug instead.', self.getName() ) return self._debugDeprecated(builddir, program) else: return "Target %s does not specify debug commands" % self finally: # clear the sigint handler signal.signal(signal.SIGINT, signal.SIG_DFL);
def debug(self, builddir, program): ''' Launch a debugger for the specified program. Uses the `debug` script if specified by the target, falls back to the `debug` and `debugServer` commands if not. `program` is inserted into the $program variable in commands. ''' try: signal.signal(signal.SIGINT, _ignoreSignal); if self.getScript('debug') is not None: return self._debugWithScript(builddir, program) elif 'debug' in self.description: logger.warning( 'target %s provides deprecated debug property. It should '+ 'provide script.debug instead.', self.getName() ) return self._debugDeprecated(builddir, program) else: return "Target %s does not specify debug commands" % self finally: # clear the sigint handler signal.signal(signal.SIGINT, signal.SIG_DFL);
[ "Launch", "a", "debugger", "for", "the", "specified", "program", ".", "Uses", "the", "debug", "script", "if", "specified", "by", "the", "target", "falls", "back", "to", "the", "debug", "and", "debugServer", "commands", "if", "not", ".", "program", "is", "inserted", "into", "the", "$program", "variable", "in", "commands", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/target.py#L647-L668
[ "def", "debug", "(", "self", ",", "builddir", ",", "program", ")", ":", "try", ":", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "_ignoreSignal", ")", "if", "self", ".", "getScript", "(", "'debug'", ")", "is", "not", "None", ":", "return", "self", ".", "_debugWithScript", "(", "builddir", ",", "program", ")", "elif", "'debug'", "in", "self", ".", "description", ":", "logger", ".", "warning", "(", "'target %s provides deprecated debug property. It should '", "+", "'provide script.debug instead.'", ",", "self", ".", "getName", "(", ")", ")", "return", "self", ".", "_debugDeprecated", "(", "builddir", ",", "program", ")", "else", ":", "return", "\"Target %s does not specify debug commands\"", "%", "self", "finally", ":", "# clear the sigint handler", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal", ".", "SIG_DFL", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
which
look for "program" in PATH (respecting PATHEXT), and return the path to it, or None if it was not found
yotta/lib/fsutils_win.py
def which(program): ''' look for "program" in PATH (respecting PATHEXT), and return the path to it, or None if it was not found ''' # current directory / absolute paths: if os.path.exists(program) and os.access(program, os.X_OK): return program # PATH: for path in os.environ['PATH'].split(os.pathsep): # path variables may be quoted: path = path.strip('"') for ext in os.environ.get('PATHEXT', '').split(os.pathsep): progpath = os.path.join(path, program + ext) if os.path.exists(progpath) and os.access(progpath, os.X_OK): return progpath # not found return None
def which(program): ''' look for "program" in PATH (respecting PATHEXT), and return the path to it, or None if it was not found ''' # current directory / absolute paths: if os.path.exists(program) and os.access(program, os.X_OK): return program # PATH: for path in os.environ['PATH'].split(os.pathsep): # path variables may be quoted: path = path.strip('"') for ext in os.environ.get('PATHEXT', '').split(os.pathsep): progpath = os.path.join(path, program + ext) if os.path.exists(progpath) and os.access(progpath, os.X_OK): return progpath # not found return None
[ "look", "for", "program", "in", "PATH", "(", "respecting", "PATHEXT", ")", "and", "return", "the", "path", "to", "it", "or", "None", "if", "it", "was", "not", "found" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/fsutils_win.py#L57-L73
[ "def", "which", "(", "program", ")", ":", "# current directory / absolute paths:", "if", "os", ".", "path", ".", "exists", "(", "program", ")", "and", "os", ".", "access", "(", "program", ",", "os", ".", "X_OK", ")", ":", "return", "program", "# PATH:", "for", "path", "in", "os", ".", "environ", "[", "'PATH'", "]", ".", "split", "(", "os", ".", "pathsep", ")", ":", "# path variables may be quoted:", "path", "=", "path", ".", "strip", "(", "'\"'", ")", "for", "ext", "in", "os", ".", "environ", ".", "get", "(", "'PATHEXT'", ",", "''", ")", ".", "split", "(", "os", ".", "pathsep", ")", ":", "progpath", "=", "os", ".", "path", ".", "join", "(", "path", ",", "program", "+", "ext", ")", "if", "os", ".", "path", ".", "exists", "(", "progpath", ")", "and", "os", ".", "access", "(", "progpath", ",", "os", ".", "X_OK", ")", ":", "return", "progpath", "# not found", "return", "None" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
pruneCache
Prune the cache
yotta/lib/access_common.py
def pruneCache(): ''' Prune the cache ''' cache_dir = folders.cacheDirectory() def fullpath(f): return os.path.join(cache_dir, f) def getMTimeSafe(f): # it's possible that another process removed the file before we stat # it, handle this gracefully try: return os.stat(f).st_mtime except FileNotFoundError: import time return time.clock() # ensure cache exists fsutils.mkDirP(cache_dir) max_cached_modules = getMaxCachedModules() for f in sorted( [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f)) and not f.endswith('.json') and not f.endswith('.locked') ], key = lambda f: getMTimeSafe(fullpath(f)), reverse = True )[max_cached_modules:]: cache_logger.debug('cleaning up cache file %s', f) removeFromCache(f) cache_logger.debug('cache pruned to %s items', max_cached_modules)
def pruneCache(): ''' Prune the cache ''' cache_dir = folders.cacheDirectory() def fullpath(f): return os.path.join(cache_dir, f) def getMTimeSafe(f): # it's possible that another process removed the file before we stat # it, handle this gracefully try: return os.stat(f).st_mtime except FileNotFoundError: import time return time.clock() # ensure cache exists fsutils.mkDirP(cache_dir) max_cached_modules = getMaxCachedModules() for f in sorted( [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f)) and not f.endswith('.json') and not f.endswith('.locked') ], key = lambda f: getMTimeSafe(fullpath(f)), reverse = True )[max_cached_modules:]: cache_logger.debug('cleaning up cache file %s', f) removeFromCache(f) cache_logger.debug('cache pruned to %s items', max_cached_modules)
[ "Prune", "the", "cache" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L112-L137
[ "def", "pruneCache", "(", ")", ":", "cache_dir", "=", "folders", ".", "cacheDirectory", "(", ")", "def", "fullpath", "(", "f", ")", ":", "return", "os", ".", "path", ".", "join", "(", "cache_dir", ",", "f", ")", "def", "getMTimeSafe", "(", "f", ")", ":", "# it's possible that another process removed the file before we stat", "# it, handle this gracefully", "try", ":", "return", "os", ".", "stat", "(", "f", ")", ".", "st_mtime", "except", "FileNotFoundError", ":", "import", "time", "return", "time", ".", "clock", "(", ")", "# ensure cache exists", "fsutils", ".", "mkDirP", "(", "cache_dir", ")", "max_cached_modules", "=", "getMaxCachedModules", "(", ")", "for", "f", "in", "sorted", "(", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "cache_dir", ")", "if", "os", ".", "path", ".", "isfile", "(", "fullpath", "(", "f", ")", ")", "and", "not", "f", ".", "endswith", "(", "'.json'", ")", "and", "not", "f", ".", "endswith", "(", "'.locked'", ")", "]", ",", "key", "=", "lambda", "f", ":", "getMTimeSafe", "(", "fullpath", "(", "f", ")", ")", ",", "reverse", "=", "True", ")", "[", "max_cached_modules", ":", "]", ":", "cache_logger", ".", "debug", "(", "'cleaning up cache file %s'", ",", "f", ")", "removeFromCache", "(", "f", ")", "cache_logger", ".", "debug", "(", "'cache pruned to %s items'", ",", "max_cached_modules", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
sometimesPruneCache
return decorator to prune cache after calling fn with a probability of p
yotta/lib/access_common.py
def sometimesPruneCache(p): ''' return decorator to prune cache after calling fn with a probability of p''' def decorator(fn): @functools.wraps(fn) def wrapped(*args, **kwargs): r = fn(*args, **kwargs) if random.random() < p: pruneCache() return r return wrapped return decorator
def sometimesPruneCache(p): ''' return decorator to prune cache after calling fn with a probability of p''' def decorator(fn): @functools.wraps(fn) def wrapped(*args, **kwargs): r = fn(*args, **kwargs) if random.random() < p: pruneCache() return r return wrapped return decorator
[ "return", "decorator", "to", "prune", "cache", "after", "calling", "fn", "with", "a", "probability", "of", "p" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L139-L149
[ "def", "sometimesPruneCache", "(", "p", ")", ":", "def", "decorator", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "r", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "random", ".", "random", "(", ")", "<", "p", ":", "pruneCache", "(", ")", "return", "r", "return", "wrapped", "return", "decorator" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
unpackFromCache
If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise NotInCache (a KeyError subclass).
yotta/lib/access_common.py
def unpackFromCache(cache_key, to_directory): ''' If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise NotInCache (a KeyError subclass). ''' if cache_key is None: raise NotInCache('"None" is never in cache') cache_key = _encodeCacheKey(cache_key) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) path = os.path.join(cache_dir, cache_key) logger.debug('attempt to unpack from cache %s -> %s', path, to_directory) try: unpackFrom(path, to_directory) try: shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json')) except IOError as e: if e.errno == errno.ENOENT: pass else: raise cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory) return except IOError as e: if e.errno == errno.ENOENT: cache_logger.debug('%s not in cache', cache_key) raise NotInCache('not in cache') except OSError as e: if e.errno == errno.ENOTEMPTY: logger.error('directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.') else: raise
def unpackFromCache(cache_key, to_directory): ''' If the specified cache key exists, unpack the tarball into the specified directory, otherwise raise NotInCache (a KeyError subclass). ''' if cache_key is None: raise NotInCache('"None" is never in cache') cache_key = _encodeCacheKey(cache_key) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) path = os.path.join(cache_dir, cache_key) logger.debug('attempt to unpack from cache %s -> %s', path, to_directory) try: unpackFrom(path, to_directory) try: shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json')) except IOError as e: if e.errno == errno.ENOENT: pass else: raise cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory) return except IOError as e: if e.errno == errno.ENOENT: cache_logger.debug('%s not in cache', cache_key) raise NotInCache('not in cache') except OSError as e: if e.errno == errno.ENOTEMPTY: logger.error('directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.') else: raise
[ "If", "the", "specified", "cache", "key", "exists", "unpack", "the", "tarball", "into", "the", "specified", "directory", "otherwise", "raise", "NotInCache", "(", "a", "KeyError", "subclass", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L208-L240
[ "def", "unpackFromCache", "(", "cache_key", ",", "to_directory", ")", ":", "if", "cache_key", "is", "None", ":", "raise", "NotInCache", "(", "'\"None\" is never in cache'", ")", "cache_key", "=", "_encodeCacheKey", "(", "cache_key", ")", "cache_dir", "=", "folders", ".", "cacheDirectory", "(", ")", "fsutils", ".", "mkDirP", "(", "cache_dir", ")", "path", "=", "os", ".", "path", ".", "join", "(", "cache_dir", ",", "cache_key", ")", "logger", ".", "debug", "(", "'attempt to unpack from cache %s -> %s'", ",", "path", ",", "to_directory", ")", "try", ":", "unpackFrom", "(", "path", ",", "to_directory", ")", "try", ":", "shutil", ".", "copy", "(", "path", "+", "'.json'", ",", "os", ".", "path", ".", "join", "(", "to_directory", ",", "'.yotta_origin.json'", ")", ")", "except", "IOError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "pass", "else", ":", "raise", "cache_logger", ".", "debug", "(", "'unpacked %s from cache into %s'", ",", "cache_key", ",", "to_directory", ")", "return", "except", "IOError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "cache_logger", ".", "debug", "(", "'%s not in cache'", ",", "cache_key", ")", "raise", "NotInCache", "(", "'not in cache'", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOTEMPTY", ":", "logger", ".", "error", "(", "'directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.'", ")", "else", ":", "raise" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_downloadToCache
Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. You should use either removeFromCache(cache_key) or _moveCachedFile to move the downloaded file to a known key after downloading.
yotta/lib/access_common.py
def _downloadToCache(stream, hashinfo={}, origin_info=dict()): ''' Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. You should use either removeFromCache(cache_key) or _moveCachedFile to move the downloaded file to a known key after downloading. ''' hash_name = None hash_value = None m = None if len(hashinfo): # check for hashes in preferred order. Currently this is just sha256 # (which the registry uses). Initial investigations suggest that github # doesn't return a header with the hash of the file being downloaded. for h in ('sha256',): if h in hashinfo: hash_name = h hash_value = hashinfo[h] m = getattr(hashlib, h)() break if not hash_name: logger.warning('could not find supported hash type in %s', hashinfo) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) file_size = 0 (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir, suffix='.locked') with os.fdopen(download_file, 'wb') as f: f.seek(0) for chunk in stream.iter_content(4096): f.write(chunk) if hash_name: m.update(chunk) if hash_name: calculated_hash = m.hexdigest() logger.debug( 'calculated %s hash: %s check against: %s' % ( hash_name, calculated_hash, hash_value ) ) if hash_value and (hash_value != calculated_hash): raise Exception('Hash verification failed.') file_size = f.tell() logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname) f.truncate() extended_origin_info = { 'hash': hashinfo, 'size': file_size } extended_origin_info.update(origin_info) ordered_json.dump(download_fname + '.json', extended_origin_info) return os.path.basename(download_fname)
def _downloadToCache(stream, hashinfo={}, origin_info=dict()): ''' Download the specified stream to a temporary cache directory, and returns a cache key that can be used to access/remove the file. You should use either removeFromCache(cache_key) or _moveCachedFile to move the downloaded file to a known key after downloading. ''' hash_name = None hash_value = None m = None if len(hashinfo): # check for hashes in preferred order. Currently this is just sha256 # (which the registry uses). Initial investigations suggest that github # doesn't return a header with the hash of the file being downloaded. for h in ('sha256',): if h in hashinfo: hash_name = h hash_value = hashinfo[h] m = getattr(hashlib, h)() break if not hash_name: logger.warning('could not find supported hash type in %s', hashinfo) cache_dir = folders.cacheDirectory() fsutils.mkDirP(cache_dir) file_size = 0 (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir, suffix='.locked') with os.fdopen(download_file, 'wb') as f: f.seek(0) for chunk in stream.iter_content(4096): f.write(chunk) if hash_name: m.update(chunk) if hash_name: calculated_hash = m.hexdigest() logger.debug( 'calculated %s hash: %s check against: %s' % ( hash_name, calculated_hash, hash_value ) ) if hash_value and (hash_value != calculated_hash): raise Exception('Hash verification failed.') file_size = f.tell() logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname) f.truncate() extended_origin_info = { 'hash': hashinfo, 'size': file_size } extended_origin_info.update(origin_info) ordered_json.dump(download_fname + '.json', extended_origin_info) return os.path.basename(download_fname)
[ "Download", "the", "specified", "stream", "to", "a", "temporary", "cache", "directory", "and", "returns", "a", "cache", "key", "that", "can", "be", "used", "to", "access", "/", "remove", "the", "file", ".", "You", "should", "use", "either", "removeFromCache", "(", "cache_key", ")", "or", "_moveCachedFile", "to", "move", "the", "downloaded", "file", "to", "a", "known", "key", "after", "downloading", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L242-L297
[ "def", "_downloadToCache", "(", "stream", ",", "hashinfo", "=", "{", "}", ",", "origin_info", "=", "dict", "(", ")", ")", ":", "hash_name", "=", "None", "hash_value", "=", "None", "m", "=", "None", "if", "len", "(", "hashinfo", ")", ":", "# check for hashes in preferred order. Currently this is just sha256", "# (which the registry uses). Initial investigations suggest that github", "# doesn't return a header with the hash of the file being downloaded.", "for", "h", "in", "(", "'sha256'", ",", ")", ":", "if", "h", "in", "hashinfo", ":", "hash_name", "=", "h", "hash_value", "=", "hashinfo", "[", "h", "]", "m", "=", "getattr", "(", "hashlib", ",", "h", ")", "(", ")", "break", "if", "not", "hash_name", ":", "logger", ".", "warning", "(", "'could not find supported hash type in %s'", ",", "hashinfo", ")", "cache_dir", "=", "folders", ".", "cacheDirectory", "(", ")", "fsutils", ".", "mkDirP", "(", "cache_dir", ")", "file_size", "=", "0", "(", "download_file", ",", "download_fname", ")", "=", "tempfile", ".", "mkstemp", "(", "dir", "=", "cache_dir", ",", "suffix", "=", "'.locked'", ")", "with", "os", ".", "fdopen", "(", "download_file", ",", "'wb'", ")", "as", "f", ":", "f", ".", "seek", "(", "0", ")", "for", "chunk", "in", "stream", ".", "iter_content", "(", "4096", ")", ":", "f", ".", "write", "(", "chunk", ")", "if", "hash_name", ":", "m", ".", "update", "(", "chunk", ")", "if", "hash_name", ":", "calculated_hash", "=", "m", ".", "hexdigest", "(", ")", "logger", ".", "debug", "(", "'calculated %s hash: %s check against: %s'", "%", "(", "hash_name", ",", "calculated_hash", ",", "hash_value", ")", ")", "if", "hash_value", "and", "(", "hash_value", "!=", "calculated_hash", ")", ":", "raise", "Exception", "(", "'Hash verification failed.'", ")", "file_size", "=", "f", ".", "tell", "(", ")", "logger", ".", "debug", "(", "'wrote tarfile of size: %s to %s'", ",", "file_size", ",", "download_fname", ")", "f", ".", "truncate", "(", ")", "extended_origin_info", "=", "{", "'hash'", ":", "hashinfo", ",", "'size'", ":", "file_size", "}", "extended_origin_info", ".", "update", "(", "origin_info", ")", "ordered_json", ".", "dump", "(", "download_fname", "+", "'.json'", ",", "extended_origin_info", ")", "return", "os", ".", "path", ".", "basename", "(", "download_fname", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
_moveCachedFile
Move a file atomically within the cache: used to make cached files available at known keys, so they can be used by other processes.
yotta/lib/access_common.py
def _moveCachedFile(from_key, to_key): ''' Move a file atomically within the cache: used to make cached files available at known keys, so they can be used by other processes. ''' cache_dir = folders.cacheDirectory() from_path = os.path.join(cache_dir, from_key) to_path = os.path.join(cache_dir, to_key) try: os.rename(from_path, to_path) # if moving the actual file was successful, then try to move the # metadata: os.rename(from_path+'.json', to_path+'.json') except Exception as e: # if the source doesn't exist, or the destination doesn't exist, remove # the file instead. # windows error 183 == file already exists # (be careful not to use WindowsError on non-windows platforms as it # isn't defined) if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \ (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183): fsutils.rmF(from_path) else: raise
def _moveCachedFile(from_key, to_key): ''' Move a file atomically within the cache: used to make cached files available at known keys, so they can be used by other processes. ''' cache_dir = folders.cacheDirectory() from_path = os.path.join(cache_dir, from_key) to_path = os.path.join(cache_dir, to_key) try: os.rename(from_path, to_path) # if moving the actual file was successful, then try to move the # metadata: os.rename(from_path+'.json', to_path+'.json') except Exception as e: # if the source doesn't exist, or the destination doesn't exist, remove # the file instead. # windows error 183 == file already exists # (be careful not to use WindowsError on non-windows platforms as it # isn't defined) if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \ (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183): fsutils.rmF(from_path) else: raise
[ "Move", "a", "file", "atomically", "within", "the", "cache", ":", "used", "to", "make", "cached", "files", "available", "at", "known", "keys", "so", "they", "can", "be", "used", "by", "other", "processes", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L299-L321
[ "def", "_moveCachedFile", "(", "from_key", ",", "to_key", ")", ":", "cache_dir", "=", "folders", ".", "cacheDirectory", "(", ")", "from_path", "=", "os", ".", "path", ".", "join", "(", "cache_dir", ",", "from_key", ")", "to_path", "=", "os", ".", "path", ".", "join", "(", "cache_dir", ",", "to_key", ")", "try", ":", "os", ".", "rename", "(", "from_path", ",", "to_path", ")", "# if moving the actual file was successful, then try to move the", "# metadata:", "os", ".", "rename", "(", "from_path", "+", "'.json'", ",", "to_path", "+", "'.json'", ")", "except", "Exception", "as", "e", ":", "# if the source doesn't exist, or the destination doesn't exist, remove", "# the file instead.", "# windows error 183 == file already exists", "# (be careful not to use WindowsError on non-windows platforms as it", "# isn't defined)", "if", "(", "isinstance", "(", "e", ",", "OSError", ")", "and", "e", ".", "errno", "==", "errno", ".", "ENOENT", ")", "or", "(", "isinstance", "(", "e", ",", "getattr", "(", "__builtins__", ",", "\"WindowsError\"", ",", "type", "(", "None", ")", ")", ")", "and", "e", ".", "errno", "==", "183", ")", ":", "fsutils", ".", "rmF", "(", "from_path", ")", "else", ":", "raise" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
unpackTarballStream
Unpack a responses stream that contains a tarball into a directory. If a hash is provided, then it will be used as a cache key (for future requests you can try to retrieve the key value from the cache first, before making the request)
yotta/lib/access_common.py
def unpackTarballStream(stream, into_directory, hash={}, cache_key=None, origin_info=dict()): ''' Unpack a responses stream that contains a tarball into a directory. If a hash is provided, then it will be used as a cache key (for future requests you can try to retrieve the key value from the cache first, before making the request) ''' cache_key = _encodeCacheKey(cache_key) # if the cache is disabled, then use a random cache key even if one was # provided, so that the module is not persisted in the cache and its # temporary download location is a random key: if getMaxCachedModules() == 0: cache_key = None new_cache_key = _downloadToCache(stream, hash, origin_info) unpackFromCache(new_cache_key, into_directory) if cache_key is None: # if we didn't provide a cache key, there's no point in storing the cache removeFromCache(new_cache_key) else: # otherwise make this file available at the known cache key _moveCachedFile(new_cache_key, cache_key)
def unpackTarballStream(stream, into_directory, hash={}, cache_key=None, origin_info=dict()): ''' Unpack a responses stream that contains a tarball into a directory. If a hash is provided, then it will be used as a cache key (for future requests you can try to retrieve the key value from the cache first, before making the request) ''' cache_key = _encodeCacheKey(cache_key) # if the cache is disabled, then use a random cache key even if one was # provided, so that the module is not persisted in the cache and its # temporary download location is a random key: if getMaxCachedModules() == 0: cache_key = None new_cache_key = _downloadToCache(stream, hash, origin_info) unpackFromCache(new_cache_key, into_directory) if cache_key is None: # if we didn't provide a cache key, there's no point in storing the cache removeFromCache(new_cache_key) else: # otherwise make this file available at the known cache key _moveCachedFile(new_cache_key, cache_key)
[ "Unpack", "a", "responses", "stream", "that", "contains", "a", "tarball", "into", "a", "directory", ".", "If", "a", "hash", "is", "provided", "then", "it", "will", "be", "used", "as", "a", "cache", "key", "(", "for", "future", "requests", "you", "can", "try", "to", "retrieve", "the", "key", "value", "from", "the", "cache", "first", "before", "making", "the", "request", ")" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/access_common.py#L324-L346
[ "def", "unpackTarballStream", "(", "stream", ",", "into_directory", ",", "hash", "=", "{", "}", ",", "cache_key", "=", "None", ",", "origin_info", "=", "dict", "(", ")", ")", ":", "cache_key", "=", "_encodeCacheKey", "(", "cache_key", ")", "# if the cache is disabled, then use a random cache key even if one was", "# provided, so that the module is not persisted in the cache and its", "# temporary download location is a random key:", "if", "getMaxCachedModules", "(", ")", "==", "0", ":", "cache_key", "=", "None", "new_cache_key", "=", "_downloadToCache", "(", "stream", ",", "hash", ",", "origin_info", ")", "unpackFromCache", "(", "new_cache_key", ",", "into_directory", ")", "if", "cache_key", "is", "None", ":", "# if we didn't provide a cache key, there's no point in storing the cache", "removeFromCache", "(", "new_cache_key", ")", "else", ":", "# otherwise make this file available at the known cache key", "_moveCachedFile", "(", "new_cache_key", ",", "cache_key", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
parseSourceURL
Parse the specified version source URL (or version spec), and return an instance of VersionSource
yotta/lib/sourceparse.py
def parseSourceURL(source_url): ''' Parse the specified version source URL (or version spec), and return an instance of VersionSource ''' name, spec = _getNonRegistryRef(source_url) if spec: return spec try: url_is_spec = version.Spec(source_url) except ValueError: url_is_spec = None if url_is_spec is not None: # if the url is an unadorned version specification (including an empty # string) then the source is the module registry: return VersionSource('registry', '', source_url) raise InvalidVersionSpec("Invalid version specification: \"%s\"" % (source_url))
def parseSourceURL(source_url): ''' Parse the specified version source URL (or version spec), and return an instance of VersionSource ''' name, spec = _getNonRegistryRef(source_url) if spec: return spec try: url_is_spec = version.Spec(source_url) except ValueError: url_is_spec = None if url_is_spec is not None: # if the url is an unadorned version specification (including an empty # string) then the source is the module registry: return VersionSource('registry', '', source_url) raise InvalidVersionSpec("Invalid version specification: \"%s\"" % (source_url))
[ "Parse", "the", "specified", "version", "source", "URL", "(", "or", "version", "spec", ")", "and", "return", "an", "instance", "of", "VersionSource" ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/sourceparse.py#L96-L114
[ "def", "parseSourceURL", "(", "source_url", ")", ":", "name", ",", "spec", "=", "_getNonRegistryRef", "(", "source_url", ")", "if", "spec", ":", "return", "spec", "try", ":", "url_is_spec", "=", "version", ".", "Spec", "(", "source_url", ")", "except", "ValueError", ":", "url_is_spec", "=", "None", "if", "url_is_spec", "is", "not", "None", ":", "# if the url is an unadorned version specification (including an empty", "# string) then the source is the module registry:", "return", "VersionSource", "(", "'registry'", ",", "''", ",", "source_url", ")", "raise", "InvalidVersionSpec", "(", "\"Invalid version specification: \\\"%s\\\"\"", "%", "(", "source_url", ")", ")" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
parseTargetNameAndSpec
Parse targetname[@versionspec] and return a tuple (target_name_string, version_spec_string). targetname[,versionspec] is also supported (this is how target names and specifications are stored internally, and was the documented way of setting the spec on the commandline) Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a target name) passed in, then '*' will be returned as the specification.
yotta/lib/sourceparse.py
def parseTargetNameAndSpec(target_name_and_spec): ''' Parse targetname[@versionspec] and return a tuple (target_name_string, version_spec_string). targetname[,versionspec] is also supported (this is how target names and specifications are stored internally, and was the documented way of setting the spec on the commandline) Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a target name) passed in, then '*' will be returned as the specification. ''' import re # fist check if this is a raw github specification that we can get the # target name from: name, spec = _getNonRegistryRef(target_name_and_spec) if name: return name, target_name_and_spec # next split at the first @ or , if any split_at = '@' if target_name_and_spec.find('@') > target_name_and_spec.find(',') and \ ',' in target_name_and_spec: split_at = ',' name = target_name_and_spec.split(split_at)[0] spec = target_name_and_spec[len(name)+1:] name = name.strip() # if there's no specification, return the explicit any-version # specification: if not spec: spec = '*' return name, spec
def parseTargetNameAndSpec(target_name_and_spec): ''' Parse targetname[@versionspec] and return a tuple (target_name_string, version_spec_string). targetname[,versionspec] is also supported (this is how target names and specifications are stored internally, and was the documented way of setting the spec on the commandline) Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a target name) passed in, then '*' will be returned as the specification. ''' import re # fist check if this is a raw github specification that we can get the # target name from: name, spec = _getNonRegistryRef(target_name_and_spec) if name: return name, target_name_and_spec # next split at the first @ or , if any split_at = '@' if target_name_and_spec.find('@') > target_name_and_spec.find(',') and \ ',' in target_name_and_spec: split_at = ',' name = target_name_and_spec.split(split_at)[0] spec = target_name_and_spec[len(name)+1:] name = name.strip() # if there's no specification, return the explicit any-version # specification: if not spec: spec = '*' return name, spec
[ "Parse", "targetname", "[", "@versionspec", "]", "and", "return", "a", "tuple", "(", "target_name_string", "version_spec_string", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/sourceparse.py#L128-L165
[ "def", "parseTargetNameAndSpec", "(", "target_name_and_spec", ")", ":", "import", "re", "# fist check if this is a raw github specification that we can get the", "# target name from:", "name", ",", "spec", "=", "_getNonRegistryRef", "(", "target_name_and_spec", ")", "if", "name", ":", "return", "name", ",", "target_name_and_spec", "# next split at the first @ or , if any", "split_at", "=", "'@'", "if", "target_name_and_spec", ".", "find", "(", "'@'", ")", ">", "target_name_and_spec", ".", "find", "(", "','", ")", "and", "','", "in", "target_name_and_spec", ":", "split_at", "=", "','", "name", "=", "target_name_and_spec", ".", "split", "(", "split_at", ")", "[", "0", "]", "spec", "=", "target_name_and_spec", "[", "len", "(", "name", ")", "+", "1", ":", "]", "name", "=", "name", ".", "strip", "(", ")", "# if there's no specification, return the explicit any-version", "# specification:", "if", "not", "spec", ":", "spec", "=", "'*'", "return", "name", ",", "spec" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
parseModuleNameAndSpec
Parse modulename[@versionspec] and return a tuple (module_name_string, version_spec_string). Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a module name) passed in, then '*' will be returned as the specification.
yotta/lib/sourceparse.py
def parseModuleNameAndSpec(module_name_and_spec): ''' Parse modulename[@versionspec] and return a tuple (module_name_string, version_spec_string). Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a module name) passed in, then '*' will be returned as the specification. ''' import re # fist check if this is a raw github specification that we can get the # module name from: name, spec = _getNonRegistryRef(module_name_and_spec) if name: return name, module_name_and_spec # next split at the first @, if any name = module_name_and_spec.split('@')[0] spec = module_name_and_spec[len(name)+1:] name = name.strip() # if there's no specification, return the explicit any-version # specification: if not spec: spec = '*' return name, spec
def parseModuleNameAndSpec(module_name_and_spec): ''' Parse modulename[@versionspec] and return a tuple (module_name_string, version_spec_string). Also accepts raw github version specs (Owner/reponame#whatever), as the name can be deduced from these. Note that the specification split from the name is not validated. If there is no specification (just a module name) passed in, then '*' will be returned as the specification. ''' import re # fist check if this is a raw github specification that we can get the # module name from: name, spec = _getNonRegistryRef(module_name_and_spec) if name: return name, module_name_and_spec # next split at the first @, if any name = module_name_and_spec.split('@')[0] spec = module_name_and_spec[len(name)+1:] name = name.strip() # if there's no specification, return the explicit any-version # specification: if not spec: spec = '*' return name, spec
[ "Parse", "modulename", "[", "@versionspec", "]", "and", "return", "a", "tuple", "(", "module_name_string", "version_spec_string", ")", "." ]
ARMmbed/yotta
python
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/sourceparse.py#L167-L196
[ "def", "parseModuleNameAndSpec", "(", "module_name_and_spec", ")", ":", "import", "re", "# fist check if this is a raw github specification that we can get the", "# module name from:", "name", ",", "spec", "=", "_getNonRegistryRef", "(", "module_name_and_spec", ")", "if", "name", ":", "return", "name", ",", "module_name_and_spec", "# next split at the first @, if any", "name", "=", "module_name_and_spec", ".", "split", "(", "'@'", ")", "[", "0", "]", "spec", "=", "module_name_and_spec", "[", "len", "(", "name", ")", "+", "1", ":", "]", "name", "=", "name", ".", "strip", "(", ")", "# if there's no specification, return the explicit any-version", "# specification:", "if", "not", "spec", ":", "spec", "=", "'*'", "return", "name", ",", "spec" ]
56bc1e56c602fa20307b23fe27518e9cd6c11af1
valid
gfit
Fit empirical Bayes prior in the hierarchical model [Efron2014]_. .. math:: mu ~ G, X ~ N(mu, sigma^2) Parameters ---------- X: ndarray A 1D array of observations. sigma: float Noise estimate on X. p: int Number of parameters used to fit G. Default: 5 nbin: int Number of bins used for discrete approximation. Default: 200 unif_fraction: float Fraction of G modeled as "slab". Default: 0.1 Returns ------- An array of the posterior density estimate g.
forestci/calibration.py
def gfit(X, sigma, p=5, nbin=200, unif_fraction=0.1): """ Fit empirical Bayes prior in the hierarchical model [Efron2014]_. .. math:: mu ~ G, X ~ N(mu, sigma^2) Parameters ---------- X: ndarray A 1D array of observations. sigma: float Noise estimate on X. p: int Number of parameters used to fit G. Default: 5 nbin: int Number of bins used for discrete approximation. Default: 200 unif_fraction: float Fraction of G modeled as "slab". Default: 0.1 Returns ------- An array of the posterior density estimate g. """ min_x = min(min(X) - 2 * np.std(X, ddof=1), 0) max_x = max(max(X) + 2 * np.std(X, ddof=1), np.std(X, ddof=1)) xvals = np.linspace(min_x, max_x, nbin) binw = (max_x - min_x) / (nbin - 1) zero_idx = max(np.where(xvals <= 0)[0]) noise_kernel = norm().pdf(xvals / sigma) * binw / sigma if zero_idx > 0: noise_rotate = noise_kernel[list(np.arange(zero_idx, len(xvals))) + list(np.arange(0, zero_idx))] else: noise_rotate = noise_kernel XX = np.zeros((p, len(xvals)), dtype=np.float) for ind, exp in enumerate(range(1, p+1)): mask = np.ones_like(xvals) mask[np.where(xvals <= 0)[0]] = 0 XX[ind, :] = pow(xvals, exp) * mask XX = XX.T def neg_loglik(eta): mask = np.ones_like(xvals) mask[np.where(xvals <= 0)[0]] = 0 g_eta_raw = np.exp(np.dot(XX, eta)) * mask if ((np.sum(g_eta_raw) == np.inf) | (np.sum(g_eta_raw) <= 100 * np.finfo(np.double).tiny)): return (1000 * (len(X) + sum(eta ** 2))) g_eta_main = g_eta_raw / sum(g_eta_raw) g_eta = ((1 - unif_fraction) * g_eta_main + unif_fraction * mask / sum(mask)) f_eta = fftconvolve(g_eta, noise_rotate, mode='same') return np.sum(np.interp(X, xvals, -np.log(np.maximum(f_eta, 0.0000001)))) eta_hat = minimize(neg_loglik, list(itertools.repeat(-1, p))).x g_eta_raw = np.exp(np.dot(XX, eta_hat)) * mask g_eta_main = g_eta_raw / sum(g_eta_raw) g_eta = ((1 - unif_fraction) * g_eta_main + unif_fraction * mask) / sum(mask) return xvals, g_eta
def gfit(X, sigma, p=5, nbin=200, unif_fraction=0.1): """ Fit empirical Bayes prior in the hierarchical model [Efron2014]_. .. math:: mu ~ G, X ~ N(mu, sigma^2) Parameters ---------- X: ndarray A 1D array of observations. sigma: float Noise estimate on X. p: int Number of parameters used to fit G. Default: 5 nbin: int Number of bins used for discrete approximation. Default: 200 unif_fraction: float Fraction of G modeled as "slab". Default: 0.1 Returns ------- An array of the posterior density estimate g. """ min_x = min(min(X) - 2 * np.std(X, ddof=1), 0) max_x = max(max(X) + 2 * np.std(X, ddof=1), np.std(X, ddof=1)) xvals = np.linspace(min_x, max_x, nbin) binw = (max_x - min_x) / (nbin - 1) zero_idx = max(np.where(xvals <= 0)[0]) noise_kernel = norm().pdf(xvals / sigma) * binw / sigma if zero_idx > 0: noise_rotate = noise_kernel[list(np.arange(zero_idx, len(xvals))) + list(np.arange(0, zero_idx))] else: noise_rotate = noise_kernel XX = np.zeros((p, len(xvals)), dtype=np.float) for ind, exp in enumerate(range(1, p+1)): mask = np.ones_like(xvals) mask[np.where(xvals <= 0)[0]] = 0 XX[ind, :] = pow(xvals, exp) * mask XX = XX.T def neg_loglik(eta): mask = np.ones_like(xvals) mask[np.where(xvals <= 0)[0]] = 0 g_eta_raw = np.exp(np.dot(XX, eta)) * mask if ((np.sum(g_eta_raw) == np.inf) | (np.sum(g_eta_raw) <= 100 * np.finfo(np.double).tiny)): return (1000 * (len(X) + sum(eta ** 2))) g_eta_main = g_eta_raw / sum(g_eta_raw) g_eta = ((1 - unif_fraction) * g_eta_main + unif_fraction * mask / sum(mask)) f_eta = fftconvolve(g_eta, noise_rotate, mode='same') return np.sum(np.interp(X, xvals, -np.log(np.maximum(f_eta, 0.0000001)))) eta_hat = minimize(neg_loglik, list(itertools.repeat(-1, p))).x g_eta_raw = np.exp(np.dot(XX, eta_hat)) * mask g_eta_main = g_eta_raw / sum(g_eta_raw) g_eta = ((1 - unif_fraction) * g_eta_main + unif_fraction * mask) / sum(mask) return xvals, g_eta
[ "Fit", "empirical", "Bayes", "prior", "in", "the", "hierarchical", "model", "[", "Efron2014", "]", "_", "." ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/calibration.py#L35-L106
[ "def", "gfit", "(", "X", ",", "sigma", ",", "p", "=", "5", ",", "nbin", "=", "200", ",", "unif_fraction", "=", "0.1", ")", ":", "min_x", "=", "min", "(", "min", "(", "X", ")", "-", "2", "*", "np", ".", "std", "(", "X", ",", "ddof", "=", "1", ")", ",", "0", ")", "max_x", "=", "max", "(", "max", "(", "X", ")", "+", "2", "*", "np", ".", "std", "(", "X", ",", "ddof", "=", "1", ")", ",", "np", ".", "std", "(", "X", ",", "ddof", "=", "1", ")", ")", "xvals", "=", "np", ".", "linspace", "(", "min_x", ",", "max_x", ",", "nbin", ")", "binw", "=", "(", "max_x", "-", "min_x", ")", "/", "(", "nbin", "-", "1", ")", "zero_idx", "=", "max", "(", "np", ".", "where", "(", "xvals", "<=", "0", ")", "[", "0", "]", ")", "noise_kernel", "=", "norm", "(", ")", ".", "pdf", "(", "xvals", "/", "sigma", ")", "*", "binw", "/", "sigma", "if", "zero_idx", ">", "0", ":", "noise_rotate", "=", "noise_kernel", "[", "list", "(", "np", ".", "arange", "(", "zero_idx", ",", "len", "(", "xvals", ")", ")", ")", "+", "list", "(", "np", ".", "arange", "(", "0", ",", "zero_idx", ")", ")", "]", "else", ":", "noise_rotate", "=", "noise_kernel", "XX", "=", "np", ".", "zeros", "(", "(", "p", ",", "len", "(", "xvals", ")", ")", ",", "dtype", "=", "np", ".", "float", ")", "for", "ind", ",", "exp", "in", "enumerate", "(", "range", "(", "1", ",", "p", "+", "1", ")", ")", ":", "mask", "=", "np", ".", "ones_like", "(", "xvals", ")", "mask", "[", "np", ".", "where", "(", "xvals", "<=", "0", ")", "[", "0", "]", "]", "=", "0", "XX", "[", "ind", ",", ":", "]", "=", "pow", "(", "xvals", ",", "exp", ")", "*", "mask", "XX", "=", "XX", ".", "T", "def", "neg_loglik", "(", "eta", ")", ":", "mask", "=", "np", ".", "ones_like", "(", "xvals", ")", "mask", "[", "np", ".", "where", "(", "xvals", "<=", "0", ")", "[", "0", "]", "]", "=", "0", "g_eta_raw", "=", "np", ".", "exp", "(", "np", ".", "dot", "(", "XX", ",", "eta", ")", ")", "*", "mask", "if", "(", "(", "np", ".", "sum", "(", "g_eta_raw", ")", "==", "np", ".", "inf", ")", "|", "(", "np", ".", "sum", "(", "g_eta_raw", ")", "<=", "100", "*", "np", ".", "finfo", "(", "np", ".", "double", ")", ".", "tiny", ")", ")", ":", "return", "(", "1000", "*", "(", "len", "(", "X", ")", "+", "sum", "(", "eta", "**", "2", ")", ")", ")", "g_eta_main", "=", "g_eta_raw", "/", "sum", "(", "g_eta_raw", ")", "g_eta", "=", "(", "(", "1", "-", "unif_fraction", ")", "*", "g_eta_main", "+", "unif_fraction", "*", "mask", "/", "sum", "(", "mask", ")", ")", "f_eta", "=", "fftconvolve", "(", "g_eta", ",", "noise_rotate", ",", "mode", "=", "'same'", ")", "return", "np", ".", "sum", "(", "np", ".", "interp", "(", "X", ",", "xvals", ",", "-", "np", ".", "log", "(", "np", ".", "maximum", "(", "f_eta", ",", "0.0000001", ")", ")", ")", ")", "eta_hat", "=", "minimize", "(", "neg_loglik", ",", "list", "(", "itertools", ".", "repeat", "(", "-", "1", ",", "p", ")", ")", ")", ".", "x", "g_eta_raw", "=", "np", ".", "exp", "(", "np", ".", "dot", "(", "XX", ",", "eta_hat", ")", ")", "*", "mask", "g_eta_main", "=", "g_eta_raw", "/", "sum", "(", "g_eta_raw", ")", "g_eta", "=", "(", "(", "1", "-", "unif_fraction", ")", "*", "g_eta_main", "+", "unif_fraction", "*", "mask", ")", "/", "sum", "(", "mask", ")", "return", "xvals", ",", "g_eta" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
gbayes
Estimate Bayes posterior with Gaussian noise [Efron2014]_. Parameters ---------- x0: ndarray an observation g_est: float a prior density, as returned by gfit sigma: int noise estimate Returns ------- An array of the posterior estimate E[mu | x0]
forestci/calibration.py
def gbayes(x0, g_est, sigma): """ Estimate Bayes posterior with Gaussian noise [Efron2014]_. Parameters ---------- x0: ndarray an observation g_est: float a prior density, as returned by gfit sigma: int noise estimate Returns ------- An array of the posterior estimate E[mu | x0] """ Kx = norm().pdf((g_est[0] - x0) / sigma) post = Kx * g_est[1] post /= sum(post) return sum(post * g_est[0])
def gbayes(x0, g_est, sigma): """ Estimate Bayes posterior with Gaussian noise [Efron2014]_. Parameters ---------- x0: ndarray an observation g_est: float a prior density, as returned by gfit sigma: int noise estimate Returns ------- An array of the posterior estimate E[mu | x0] """ Kx = norm().pdf((g_est[0] - x0) / sigma) post = Kx * g_est[1] post /= sum(post) return sum(post * g_est[0])
[ "Estimate", "Bayes", "posterior", "with", "Gaussian", "noise", "[", "Efron2014", "]", "_", "." ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/calibration.py#L109-L130
[ "def", "gbayes", "(", "x0", ",", "g_est", ",", "sigma", ")", ":", "Kx", "=", "norm", "(", ")", ".", "pdf", "(", "(", "g_est", "[", "0", "]", "-", "x0", ")", "/", "sigma", ")", "post", "=", "Kx", "*", "g_est", "[", "1", "]", "post", "/=", "sum", "(", "post", ")", "return", "sum", "(", "post", "*", "g_est", "[", "0", "]", ")" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
calibrateEB
Calibrate noisy variance estimates with empirical Bayes. Parameters ---------- vars: ndarray List of variance estimates. sigma2: int Estimate of the Monte Carlo noise in vars. Returns ------- An array of the calibrated variance estimates
forestci/calibration.py
def calibrateEB(variances, sigma2): """ Calibrate noisy variance estimates with empirical Bayes. Parameters ---------- vars: ndarray List of variance estimates. sigma2: int Estimate of the Monte Carlo noise in vars. Returns ------- An array of the calibrated variance estimates """ if (sigma2 <= 0 or min(variances) == max(variances)): return(np.maximum(variances, 0)) sigma = np.sqrt(sigma2) eb_prior = gfit(variances, sigma) # Set up a partial execution of the function part = functools.partial(gbayes, g_est=eb_prior, sigma=sigma) if len(variances) >= 200: # Interpolate to speed up computations: calib_x = np.percentile(variances, np.arange(0, 102, 2)) calib_y = list(map(part, calib_x)) calib_all = np.interp(variances, calib_x, calib_y) else: calib_all = list(map(part, variances)) return np.asarray(calib_all)
def calibrateEB(variances, sigma2): """ Calibrate noisy variance estimates with empirical Bayes. Parameters ---------- vars: ndarray List of variance estimates. sigma2: int Estimate of the Monte Carlo noise in vars. Returns ------- An array of the calibrated variance estimates """ if (sigma2 <= 0 or min(variances) == max(variances)): return(np.maximum(variances, 0)) sigma = np.sqrt(sigma2) eb_prior = gfit(variances, sigma) # Set up a partial execution of the function part = functools.partial(gbayes, g_est=eb_prior, sigma=sigma) if len(variances) >= 200: # Interpolate to speed up computations: calib_x = np.percentile(variances, np.arange(0, 102, 2)) calib_y = list(map(part, calib_x)) calib_all = np.interp(variances, calib_x, calib_y) else: calib_all = list(map(part, variances)) return np.asarray(calib_all)
[ "Calibrate", "noisy", "variance", "estimates", "with", "empirical", "Bayes", "." ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/calibration.py#L133-L164
[ "def", "calibrateEB", "(", "variances", ",", "sigma2", ")", ":", "if", "(", "sigma2", "<=", "0", "or", "min", "(", "variances", ")", "==", "max", "(", "variances", ")", ")", ":", "return", "(", "np", ".", "maximum", "(", "variances", ",", "0", ")", ")", "sigma", "=", "np", ".", "sqrt", "(", "sigma2", ")", "eb_prior", "=", "gfit", "(", "variances", ",", "sigma", ")", "# Set up a partial execution of the function", "part", "=", "functools", ".", "partial", "(", "gbayes", ",", "g_est", "=", "eb_prior", ",", "sigma", "=", "sigma", ")", "if", "len", "(", "variances", ")", ">=", "200", ":", "# Interpolate to speed up computations:", "calib_x", "=", "np", ".", "percentile", "(", "variances", ",", "np", ".", "arange", "(", "0", ",", "102", ",", "2", ")", ")", "calib_y", "=", "list", "(", "map", "(", "part", ",", "calib_x", ")", ")", "calib_all", "=", "np", ".", "interp", "(", "variances", ",", "calib_x", ",", "calib_y", ")", "else", ":", "calib_all", "=", "list", "(", "map", "(", "part", ",", "variances", ")", ")", "return", "np", ".", "asarray", "(", "calib_all", ")" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
calc_inbag
Derive samples used to create trees in scikit-learn RandomForest objects. Recovers the samples in each tree from the random state of that tree using :func:`forest._generate_sample_indices`. Parameters ---------- n_samples : int The number of samples used to fit the scikit-learn RandomForest object. forest : RandomForest Regressor or Classifier object that is already fit by scikit-learn. Returns ------- Array that records how many times a data point was placed in a tree. Columns are individual trees. Rows are the number of times a sample was used in a tree.
forestci/forestci.py
def calc_inbag(n_samples, forest): """ Derive samples used to create trees in scikit-learn RandomForest objects. Recovers the samples in each tree from the random state of that tree using :func:`forest._generate_sample_indices`. Parameters ---------- n_samples : int The number of samples used to fit the scikit-learn RandomForest object. forest : RandomForest Regressor or Classifier object that is already fit by scikit-learn. Returns ------- Array that records how many times a data point was placed in a tree. Columns are individual trees. Rows are the number of times a sample was used in a tree. """ if not forest.bootstrap: e_s = "Cannot calculate the inbag from a forest that has " e_s = " bootstrap=False" raise ValueError(e_s) n_trees = forest.n_estimators inbag = np.zeros((n_samples, n_trees)) sample_idx = [] for t_idx in range(n_trees): sample_idx.append( _generate_sample_indices(forest.estimators_[t_idx].random_state, n_samples)) inbag[:, t_idx] = np.bincount(sample_idx[-1], minlength=n_samples) return inbag
def calc_inbag(n_samples, forest): """ Derive samples used to create trees in scikit-learn RandomForest objects. Recovers the samples in each tree from the random state of that tree using :func:`forest._generate_sample_indices`. Parameters ---------- n_samples : int The number of samples used to fit the scikit-learn RandomForest object. forest : RandomForest Regressor or Classifier object that is already fit by scikit-learn. Returns ------- Array that records how many times a data point was placed in a tree. Columns are individual trees. Rows are the number of times a sample was used in a tree. """ if not forest.bootstrap: e_s = "Cannot calculate the inbag from a forest that has " e_s = " bootstrap=False" raise ValueError(e_s) n_trees = forest.n_estimators inbag = np.zeros((n_samples, n_trees)) sample_idx = [] for t_idx in range(n_trees): sample_idx.append( _generate_sample_indices(forest.estimators_[t_idx].random_state, n_samples)) inbag[:, t_idx] = np.bincount(sample_idx[-1], minlength=n_samples) return inbag
[ "Derive", "samples", "used", "to", "create", "trees", "in", "scikit", "-", "learn", "RandomForest", "objects", "." ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/forestci.py#L32-L67
[ "def", "calc_inbag", "(", "n_samples", ",", "forest", ")", ":", "if", "not", "forest", ".", "bootstrap", ":", "e_s", "=", "\"Cannot calculate the inbag from a forest that has \"", "e_s", "=", "\" bootstrap=False\"", "raise", "ValueError", "(", "e_s", ")", "n_trees", "=", "forest", ".", "n_estimators", "inbag", "=", "np", ".", "zeros", "(", "(", "n_samples", ",", "n_trees", ")", ")", "sample_idx", "=", "[", "]", "for", "t_idx", "in", "range", "(", "n_trees", ")", ":", "sample_idx", ".", "append", "(", "_generate_sample_indices", "(", "forest", ".", "estimators_", "[", "t_idx", "]", ".", "random_state", ",", "n_samples", ")", ")", "inbag", "[", ":", ",", "t_idx", "]", "=", "np", ".", "bincount", "(", "sample_idx", "[", "-", "1", "]", ",", "minlength", "=", "n_samples", ")", "return", "inbag" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
_core_computation
Helper function, that performs the core computation Parameters ---------- X_train : ndarray An array with shape (n_train_sample, n_features). X_test : ndarray An array with shape (n_test_sample, n_features). inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. memory_constrained: boolean (optional) Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int (optional) An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True.
forestci/forestci.py
def _core_computation(X_train, X_test, inbag, pred_centered, n_trees, memory_constrained=False, memory_limit=None, test_mode=False): """ Helper function, that performs the core computation Parameters ---------- X_train : ndarray An array with shape (n_train_sample, n_features). X_test : ndarray An array with shape (n_test_sample, n_features). inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. memory_constrained: boolean (optional) Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int (optional) An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True. """ if not memory_constrained: return np.sum((np.dot(inbag - 1, pred_centered.T) / n_trees) ** 2, 0) if not memory_limit: raise ValueError('If memory_constrained=True, must provide', 'memory_limit.') # Assumes double precision float chunk_size = int((memory_limit * 1e6) / (8.0 * X_train.shape[0])) if chunk_size == 0: min_limit = 8.0 * X_train.shape[0] / 1e6 raise ValueError('memory_limit provided is too small.' + 'For these dimensions, memory_limit must ' + 'be greater than or equal to %.3e' % min_limit) chunk_edges = np.arange(0, X_test.shape[0] + chunk_size, chunk_size) inds = range(X_test.shape[0]) chunks = [inds[chunk_edges[i]:chunk_edges[i+1]] for i in range(len(chunk_edges)-1)] if test_mode: print('Number of chunks: %d' % (len(chunks),)) V_IJ = np.concatenate([ np.sum((np.dot(inbag-1, pred_centered[chunk].T)/n_trees)**2, 0) for chunk in chunks]) return V_IJ
def _core_computation(X_train, X_test, inbag, pred_centered, n_trees, memory_constrained=False, memory_limit=None, test_mode=False): """ Helper function, that performs the core computation Parameters ---------- X_train : ndarray An array with shape (n_train_sample, n_features). X_test : ndarray An array with shape (n_test_sample, n_features). inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. memory_constrained: boolean (optional) Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int (optional) An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True. """ if not memory_constrained: return np.sum((np.dot(inbag - 1, pred_centered.T) / n_trees) ** 2, 0) if not memory_limit: raise ValueError('If memory_constrained=True, must provide', 'memory_limit.') # Assumes double precision float chunk_size = int((memory_limit * 1e6) / (8.0 * X_train.shape[0])) if chunk_size == 0: min_limit = 8.0 * X_train.shape[0] / 1e6 raise ValueError('memory_limit provided is too small.' + 'For these dimensions, memory_limit must ' + 'be greater than or equal to %.3e' % min_limit) chunk_edges = np.arange(0, X_test.shape[0] + chunk_size, chunk_size) inds = range(X_test.shape[0]) chunks = [inds[chunk_edges[i]:chunk_edges[i+1]] for i in range(len(chunk_edges)-1)] if test_mode: print('Number of chunks: %d' % (len(chunks),)) V_IJ = np.concatenate([ np.sum((np.dot(inbag-1, pred_centered[chunk].T)/n_trees)**2, 0) for chunk in chunks]) return V_IJ
[ "Helper", "function", "that", "performs", "the", "core", "computation" ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/forestci.py#L70-L132
[ "def", "_core_computation", "(", "X_train", ",", "X_test", ",", "inbag", ",", "pred_centered", ",", "n_trees", ",", "memory_constrained", "=", "False", ",", "memory_limit", "=", "None", ",", "test_mode", "=", "False", ")", ":", "if", "not", "memory_constrained", ":", "return", "np", ".", "sum", "(", "(", "np", ".", "dot", "(", "inbag", "-", "1", ",", "pred_centered", ".", "T", ")", "/", "n_trees", ")", "**", "2", ",", "0", ")", "if", "not", "memory_limit", ":", "raise", "ValueError", "(", "'If memory_constrained=True, must provide'", ",", "'memory_limit.'", ")", "# Assumes double precision float", "chunk_size", "=", "int", "(", "(", "memory_limit", "*", "1e6", ")", "/", "(", "8.0", "*", "X_train", ".", "shape", "[", "0", "]", ")", ")", "if", "chunk_size", "==", "0", ":", "min_limit", "=", "8.0", "*", "X_train", ".", "shape", "[", "0", "]", "/", "1e6", "raise", "ValueError", "(", "'memory_limit provided is too small.'", "+", "'For these dimensions, memory_limit must '", "+", "'be greater than or equal to %.3e'", "%", "min_limit", ")", "chunk_edges", "=", "np", ".", "arange", "(", "0", ",", "X_test", ".", "shape", "[", "0", "]", "+", "chunk_size", ",", "chunk_size", ")", "inds", "=", "range", "(", "X_test", ".", "shape", "[", "0", "]", ")", "chunks", "=", "[", "inds", "[", "chunk_edges", "[", "i", "]", ":", "chunk_edges", "[", "i", "+", "1", "]", "]", "for", "i", "in", "range", "(", "len", "(", "chunk_edges", ")", "-", "1", ")", "]", "if", "test_mode", ":", "print", "(", "'Number of chunks: %d'", "%", "(", "len", "(", "chunks", ")", ",", ")", ")", "V_IJ", "=", "np", ".", "concatenate", "(", "[", "np", ".", "sum", "(", "(", "np", ".", "dot", "(", "inbag", "-", "1", ",", "pred_centered", "[", "chunk", "]", ".", "T", ")", "/", "n_trees", ")", "**", "2", ",", "0", ")", "for", "chunk", "in", "chunks", "]", ")", "return", "V_IJ" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
_bias_correction
Helper functions that implements bias correction Parameters ---------- V_IJ : ndarray Intermediate result in the computation. inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. n_trees : int The number of trees in the forest object.
forestci/forestci.py
def _bias_correction(V_IJ, inbag, pred_centered, n_trees): """ Helper functions that implements bias correction Parameters ---------- V_IJ : ndarray Intermediate result in the computation. inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. n_trees : int The number of trees in the forest object. """ n_train_samples = inbag.shape[0] n_var = np.mean(np.square(inbag[0:n_trees]).mean(axis=1).T.view() - np.square(inbag[0:n_trees].mean(axis=1)).T.view()) boot_var = np.square(pred_centered).sum(axis=1) / n_trees bias_correction = n_train_samples * n_var * boot_var / n_trees V_IJ_unbiased = V_IJ - bias_correction return V_IJ_unbiased
def _bias_correction(V_IJ, inbag, pred_centered, n_trees): """ Helper functions that implements bias correction Parameters ---------- V_IJ : ndarray Intermediate result in the computation. inbag : ndarray The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. pred_centered : ndarray Centered predictions that are an intermediate result in the computation. n_trees : int The number of trees in the forest object. """ n_train_samples = inbag.shape[0] n_var = np.mean(np.square(inbag[0:n_trees]).mean(axis=1).T.view() - np.square(inbag[0:n_trees].mean(axis=1)).T.view()) boot_var = np.square(pred_centered).sum(axis=1) / n_trees bias_correction = n_train_samples * n_var * boot_var / n_trees V_IJ_unbiased = V_IJ - bias_correction return V_IJ_unbiased
[ "Helper", "functions", "that", "implements", "bias", "correction" ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/forestci.py#L135-L164
[ "def", "_bias_correction", "(", "V_IJ", ",", "inbag", ",", "pred_centered", ",", "n_trees", ")", ":", "n_train_samples", "=", "inbag", ".", "shape", "[", "0", "]", "n_var", "=", "np", ".", "mean", "(", "np", ".", "square", "(", "inbag", "[", "0", ":", "n_trees", "]", ")", ".", "mean", "(", "axis", "=", "1", ")", ".", "T", ".", "view", "(", ")", "-", "np", ".", "square", "(", "inbag", "[", "0", ":", "n_trees", "]", ".", "mean", "(", "axis", "=", "1", ")", ")", ".", "T", ".", "view", "(", ")", ")", "boot_var", "=", "np", ".", "square", "(", "pred_centered", ")", ".", "sum", "(", "axis", "=", "1", ")", "/", "n_trees", "bias_correction", "=", "n_train_samples", "*", "n_var", "*", "boot_var", "/", "n_trees", "V_IJ_unbiased", "=", "V_IJ", "-", "bias_correction", "return", "V_IJ_unbiased" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
random_forest_error
Calculate error bars from scikit-learn RandomForest estimators. RandomForest is a regressor or classifier object this variance can be used to plot error bars for RandomForest objects Parameters ---------- forest : RandomForest Regressor or Classifier object. X_train : ndarray An array with shape (n_train_sample, n_features). The design matrix for training data. X_test : ndarray An array with shape (n_test_sample, n_features). The design matrix for testing data inbag : ndarray, optional The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. calibrate: boolean, optional Whether to apply calibration to mitigate Monte Carlo noise. Some variance estimates may be negative due to Monte Carlo effects if the number of trees in the forest is too small. To use calibration, Default: True memory_constrained: boolean, optional Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int, optional. An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True. Returns ------- An array with the unbiased sampling variance (V_IJ_unbiased) for a RandomForest object. See Also ---------- :func:`calc_inbag` Notes ----- The calculation of error is based on the infinitesimal jackknife variance, as described in [Wager2014]_ and is a Python implementation of the R code provided at: https://github.com/swager/randomForestCI .. [Wager2014] S. Wager, T. Hastie, B. Efron. "Confidence Intervals for Random Forests: The Jackknife and the Infinitesimal Jackknife", Journal of Machine Learning Research vol. 15, pp. 1625-1651, 2014.
forestci/forestci.py
def random_forest_error(forest, X_train, X_test, inbag=None, calibrate=True, memory_constrained=False, memory_limit=None): """ Calculate error bars from scikit-learn RandomForest estimators. RandomForest is a regressor or classifier object this variance can be used to plot error bars for RandomForest objects Parameters ---------- forest : RandomForest Regressor or Classifier object. X_train : ndarray An array with shape (n_train_sample, n_features). The design matrix for training data. X_test : ndarray An array with shape (n_test_sample, n_features). The design matrix for testing data inbag : ndarray, optional The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. calibrate: boolean, optional Whether to apply calibration to mitigate Monte Carlo noise. Some variance estimates may be negative due to Monte Carlo effects if the number of trees in the forest is too small. To use calibration, Default: True memory_constrained: boolean, optional Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int, optional. An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True. Returns ------- An array with the unbiased sampling variance (V_IJ_unbiased) for a RandomForest object. See Also ---------- :func:`calc_inbag` Notes ----- The calculation of error is based on the infinitesimal jackknife variance, as described in [Wager2014]_ and is a Python implementation of the R code provided at: https://github.com/swager/randomForestCI .. [Wager2014] S. Wager, T. Hastie, B. Efron. "Confidence Intervals for Random Forests: The Jackknife and the Infinitesimal Jackknife", Journal of Machine Learning Research vol. 15, pp. 1625-1651, 2014. """ if inbag is None: inbag = calc_inbag(X_train.shape[0], forest) pred = np.array([tree.predict(X_test) for tree in forest]).T pred_mean = np.mean(pred, 0) pred_centered = pred - pred_mean n_trees = forest.n_estimators V_IJ = _core_computation(X_train, X_test, inbag, pred_centered, n_trees, memory_constrained, memory_limit) V_IJ_unbiased = _bias_correction(V_IJ, inbag, pred_centered, n_trees) # Correct for cases where resampling is done without replacement: if np.max(inbag) == 1: variance_inflation = 1 / (1 - np.mean(inbag)) ** 2 V_IJ_unbiased *= variance_inflation if not calibrate: return V_IJ_unbiased if V_IJ_unbiased.shape[0] <= 20: print("No calibration with n_samples <= 20") return V_IJ_unbiased if calibrate: calibration_ratio = 2 n_sample = np.ceil(n_trees / calibration_ratio) new_forest = copy.deepcopy(forest) new_forest.estimators_ =\ np.random.permutation(new_forest.estimators_)[:int(n_sample)] new_forest.n_estimators = int(n_sample) results_ss = random_forest_error(new_forest, X_train, X_test, calibrate=False, memory_constrained=memory_constrained, memory_limit=memory_limit) # Use this second set of variance estimates # to estimate scale of Monte Carlo noise sigma2_ss = np.mean((results_ss - V_IJ_unbiased)**2) delta = n_sample / n_trees sigma2 = (delta**2 + (1 - delta)**2) / (2 * (1 - delta)**2) * sigma2_ss # Use Monte Carlo noise scale estimate for empirical Bayes calibration V_IJ_calibrated = calibrateEB(V_IJ_unbiased, sigma2) return V_IJ_calibrated
def random_forest_error(forest, X_train, X_test, inbag=None, calibrate=True, memory_constrained=False, memory_limit=None): """ Calculate error bars from scikit-learn RandomForest estimators. RandomForest is a regressor or classifier object this variance can be used to plot error bars for RandomForest objects Parameters ---------- forest : RandomForest Regressor or Classifier object. X_train : ndarray An array with shape (n_train_sample, n_features). The design matrix for training data. X_test : ndarray An array with shape (n_test_sample, n_features). The design matrix for testing data inbag : ndarray, optional The inbag matrix that fit the data. If set to `None` (default) it will be inferred from the forest. However, this only works for trees for which bootstrapping was set to `True`. That is, if sampling was done with replacement. Otherwise, users need to provide their own inbag matrix. calibrate: boolean, optional Whether to apply calibration to mitigate Monte Carlo noise. Some variance estimates may be negative due to Monte Carlo effects if the number of trees in the forest is too small. To use calibration, Default: True memory_constrained: boolean, optional Whether or not there is a restriction on memory. If False, it is assumed that a ndarry of shape (n_train_sample,n_test_sample) fits in main memory. Setting to True can actually provide a speed up if memory_limit is tuned to the optimal range. memory_limit: int, optional. An upper bound for how much memory the itermediate matrices will take up in Megabytes. This must be provided if memory_constrained=True. Returns ------- An array with the unbiased sampling variance (V_IJ_unbiased) for a RandomForest object. See Also ---------- :func:`calc_inbag` Notes ----- The calculation of error is based on the infinitesimal jackknife variance, as described in [Wager2014]_ and is a Python implementation of the R code provided at: https://github.com/swager/randomForestCI .. [Wager2014] S. Wager, T. Hastie, B. Efron. "Confidence Intervals for Random Forests: The Jackknife and the Infinitesimal Jackknife", Journal of Machine Learning Research vol. 15, pp. 1625-1651, 2014. """ if inbag is None: inbag = calc_inbag(X_train.shape[0], forest) pred = np.array([tree.predict(X_test) for tree in forest]).T pred_mean = np.mean(pred, 0) pred_centered = pred - pred_mean n_trees = forest.n_estimators V_IJ = _core_computation(X_train, X_test, inbag, pred_centered, n_trees, memory_constrained, memory_limit) V_IJ_unbiased = _bias_correction(V_IJ, inbag, pred_centered, n_trees) # Correct for cases where resampling is done without replacement: if np.max(inbag) == 1: variance_inflation = 1 / (1 - np.mean(inbag)) ** 2 V_IJ_unbiased *= variance_inflation if not calibrate: return V_IJ_unbiased if V_IJ_unbiased.shape[0] <= 20: print("No calibration with n_samples <= 20") return V_IJ_unbiased if calibrate: calibration_ratio = 2 n_sample = np.ceil(n_trees / calibration_ratio) new_forest = copy.deepcopy(forest) new_forest.estimators_ =\ np.random.permutation(new_forest.estimators_)[:int(n_sample)] new_forest.n_estimators = int(n_sample) results_ss = random_forest_error(new_forest, X_train, X_test, calibrate=False, memory_constrained=memory_constrained, memory_limit=memory_limit) # Use this second set of variance estimates # to estimate scale of Monte Carlo noise sigma2_ss = np.mean((results_ss - V_IJ_unbiased)**2) delta = n_sample / n_trees sigma2 = (delta**2 + (1 - delta)**2) / (2 * (1 - delta)**2) * sigma2_ss # Use Monte Carlo noise scale estimate for empirical Bayes calibration V_IJ_calibrated = calibrateEB(V_IJ_unbiased, sigma2) return V_IJ_calibrated
[ "Calculate", "error", "bars", "from", "scikit", "-", "learn", "RandomForest", "estimators", "." ]
scikit-learn-contrib/forest-confidence-interval
python
https://github.com/scikit-learn-contrib/forest-confidence-interval/blob/401c63a74a27d775eff0f72b6c20ffd568491fe0/forestci/forestci.py#L167-L275
[ "def", "random_forest_error", "(", "forest", ",", "X_train", ",", "X_test", ",", "inbag", "=", "None", ",", "calibrate", "=", "True", ",", "memory_constrained", "=", "False", ",", "memory_limit", "=", "None", ")", ":", "if", "inbag", "is", "None", ":", "inbag", "=", "calc_inbag", "(", "X_train", ".", "shape", "[", "0", "]", ",", "forest", ")", "pred", "=", "np", ".", "array", "(", "[", "tree", ".", "predict", "(", "X_test", ")", "for", "tree", "in", "forest", "]", ")", ".", "T", "pred_mean", "=", "np", ".", "mean", "(", "pred", ",", "0", ")", "pred_centered", "=", "pred", "-", "pred_mean", "n_trees", "=", "forest", ".", "n_estimators", "V_IJ", "=", "_core_computation", "(", "X_train", ",", "X_test", ",", "inbag", ",", "pred_centered", ",", "n_trees", ",", "memory_constrained", ",", "memory_limit", ")", "V_IJ_unbiased", "=", "_bias_correction", "(", "V_IJ", ",", "inbag", ",", "pred_centered", ",", "n_trees", ")", "# Correct for cases where resampling is done without replacement:", "if", "np", ".", "max", "(", "inbag", ")", "==", "1", ":", "variance_inflation", "=", "1", "/", "(", "1", "-", "np", ".", "mean", "(", "inbag", ")", ")", "**", "2", "V_IJ_unbiased", "*=", "variance_inflation", "if", "not", "calibrate", ":", "return", "V_IJ_unbiased", "if", "V_IJ_unbiased", ".", "shape", "[", "0", "]", "<=", "20", ":", "print", "(", "\"No calibration with n_samples <= 20\"", ")", "return", "V_IJ_unbiased", "if", "calibrate", ":", "calibration_ratio", "=", "2", "n_sample", "=", "np", ".", "ceil", "(", "n_trees", "/", "calibration_ratio", ")", "new_forest", "=", "copy", ".", "deepcopy", "(", "forest", ")", "new_forest", ".", "estimators_", "=", "np", ".", "random", ".", "permutation", "(", "new_forest", ".", "estimators_", ")", "[", ":", "int", "(", "n_sample", ")", "]", "new_forest", ".", "n_estimators", "=", "int", "(", "n_sample", ")", "results_ss", "=", "random_forest_error", "(", "new_forest", ",", "X_train", ",", "X_test", ",", "calibrate", "=", "False", ",", "memory_constrained", "=", "memory_constrained", ",", "memory_limit", "=", "memory_limit", ")", "# Use this second set of variance estimates", "# to estimate scale of Monte Carlo noise", "sigma2_ss", "=", "np", ".", "mean", "(", "(", "results_ss", "-", "V_IJ_unbiased", ")", "**", "2", ")", "delta", "=", "n_sample", "/", "n_trees", "sigma2", "=", "(", "delta", "**", "2", "+", "(", "1", "-", "delta", ")", "**", "2", ")", "/", "(", "2", "*", "(", "1", "-", "delta", ")", "**", "2", ")", "*", "sigma2_ss", "# Use Monte Carlo noise scale estimate for empirical Bayes calibration", "V_IJ_calibrated", "=", "calibrateEB", "(", "V_IJ_unbiased", ",", "sigma2", ")", "return", "V_IJ_calibrated" ]
401c63a74a27d775eff0f72b6c20ffd568491fe0
valid
SSLSatchel.generate_self_signed_certificate
Generates a self-signed certificate for use in an internal development environment for testing SSL pages. http://almostalldigital.wordpress.com/2013/03/07/self-signed-ssl-certificate-for-ec2-load-balancer/
burlap/ssl.py
def generate_self_signed_certificate(self, domain='', r=None): """ Generates a self-signed certificate for use in an internal development environment for testing SSL pages. http://almostalldigital.wordpress.com/2013/03/07/self-signed-ssl-certificate-for-ec2-load-balancer/ """ r = self.local_renderer r.env.domain = domain or r.env.domain assert r.env.domain, 'No SSL domain defined.' role = r or self.genv.ROLE or ALL ssl_dst = 'roles/%s/ssl' % (role,) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) r.env.base_dst = '%s/%s' % (ssl_dst, r.env.domain) r.local('openssl req -new -newkey rsa:{ssl_length} ' '-days {ssl_days} -nodes -x509 ' '-subj "/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}" ' '-keyout {ssl_base_dst}.key -out {ssl_base_dst}.crt')
def generate_self_signed_certificate(self, domain='', r=None): """ Generates a self-signed certificate for use in an internal development environment for testing SSL pages. http://almostalldigital.wordpress.com/2013/03/07/self-signed-ssl-certificate-for-ec2-load-balancer/ """ r = self.local_renderer r.env.domain = domain or r.env.domain assert r.env.domain, 'No SSL domain defined.' role = r or self.genv.ROLE or ALL ssl_dst = 'roles/%s/ssl' % (role,) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) r.env.base_dst = '%s/%s' % (ssl_dst, r.env.domain) r.local('openssl req -new -newkey rsa:{ssl_length} ' '-days {ssl_days} -nodes -x509 ' '-subj "/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}" ' '-keyout {ssl_base_dst}.key -out {ssl_base_dst}.crt')
[ "Generates", "a", "self", "-", "signed", "certificate", "for", "use", "in", "an", "internal", "development", "environment", "for", "testing", "SSL", "pages", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/ssl.py#L35-L53
[ "def", "generate_self_signed_certificate", "(", "self", ",", "domain", "=", "''", ",", "r", "=", "None", ")", ":", "r", "=", "self", ".", "local_renderer", "r", ".", "env", ".", "domain", "=", "domain", "or", "r", ".", "env", ".", "domain", "assert", "r", ".", "env", ".", "domain", ",", "'No SSL domain defined.'", "role", "=", "r", "or", "self", ".", "genv", ".", "ROLE", "or", "ALL", "ssl_dst", "=", "'roles/%s/ssl'", "%", "(", "role", ",", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "ssl_dst", ")", ":", "os", ".", "makedirs", "(", "ssl_dst", ")", "r", ".", "env", ".", "base_dst", "=", "'%s/%s'", "%", "(", "ssl_dst", ",", "r", ".", "env", ".", "domain", ")", "r", ".", "local", "(", "'openssl req -new -newkey rsa:{ssl_length} '", "'-days {ssl_days} -nodes -x509 '", "'-subj \"/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}\" '", "'-keyout {ssl_base_dst}.key -out {ssl_base_dst}.crt'", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
SSLSatchel.generate_csr
Creates a certificate signing request to be submitted to a formal certificate authority to generate a certificate. Note, the provider may say the CSR must be created on the target server, but this is not necessary.
burlap/ssl.py
def generate_csr(self, domain='', r=None): """ Creates a certificate signing request to be submitted to a formal certificate authority to generate a certificate. Note, the provider may say the CSR must be created on the target server, but this is not necessary. """ r = r or self.local_renderer r.env.domain = domain or r.env.domain role = self.genv.ROLE or ALL site = self.genv.SITE or self.genv.default_site print('self.genv.default_site:', self.genv.default_site, file=sys.stderr) print('site.csr0:', site, file=sys.stderr) ssl_dst = 'roles/%s/ssl' % (role,) print('ssl_dst:', ssl_dst) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) for site, site_data in self.iter_sites(): print('site.csr1:', site, file=sys.stderr) assert r.env.domain, 'No SSL domain defined.' r.env.ssl_base_dst = '%s/%s' % (ssl_dst, r.env.domain.replace('*.', '')) r.env.ssl_csr_year = date.today().year r.local('openssl req -nodes -newkey rsa:{ssl_length} ' '-subj "/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}" ' '-keyout {ssl_base_dst}.{ssl_csr_year}.key -out {ssl_base_dst}.{ssl_csr_year}.csr')
def generate_csr(self, domain='', r=None): """ Creates a certificate signing request to be submitted to a formal certificate authority to generate a certificate. Note, the provider may say the CSR must be created on the target server, but this is not necessary. """ r = r or self.local_renderer r.env.domain = domain or r.env.domain role = self.genv.ROLE or ALL site = self.genv.SITE or self.genv.default_site print('self.genv.default_site:', self.genv.default_site, file=sys.stderr) print('site.csr0:', site, file=sys.stderr) ssl_dst = 'roles/%s/ssl' % (role,) print('ssl_dst:', ssl_dst) if not os.path.isdir(ssl_dst): os.makedirs(ssl_dst) for site, site_data in self.iter_sites(): print('site.csr1:', site, file=sys.stderr) assert r.env.domain, 'No SSL domain defined.' r.env.ssl_base_dst = '%s/%s' % (ssl_dst, r.env.domain.replace('*.', '')) r.env.ssl_csr_year = date.today().year r.local('openssl req -nodes -newkey rsa:{ssl_length} ' '-subj "/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}" ' '-keyout {ssl_base_dst}.{ssl_csr_year}.key -out {ssl_base_dst}.{ssl_csr_year}.csr')
[ "Creates", "a", "certificate", "signing", "request", "to", "be", "submitted", "to", "a", "formal", "certificate", "authority", "to", "generate", "a", "certificate", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/ssl.py#L57-L82
[ "def", "generate_csr", "(", "self", ",", "domain", "=", "''", ",", "r", "=", "None", ")", ":", "r", "=", "r", "or", "self", ".", "local_renderer", "r", ".", "env", ".", "domain", "=", "domain", "or", "r", ".", "env", ".", "domain", "role", "=", "self", ".", "genv", ".", "ROLE", "or", "ALL", "site", "=", "self", ".", "genv", ".", "SITE", "or", "self", ".", "genv", ".", "default_site", "print", "(", "'self.genv.default_site:'", ",", "self", ".", "genv", ".", "default_site", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "'site.csr0:'", ",", "site", ",", "file", "=", "sys", ".", "stderr", ")", "ssl_dst", "=", "'roles/%s/ssl'", "%", "(", "role", ",", ")", "print", "(", "'ssl_dst:'", ",", "ssl_dst", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "ssl_dst", ")", ":", "os", ".", "makedirs", "(", "ssl_dst", ")", "for", "site", ",", "site_data", "in", "self", ".", "iter_sites", "(", ")", ":", "print", "(", "'site.csr1:'", ",", "site", ",", "file", "=", "sys", ".", "stderr", ")", "assert", "r", ".", "env", ".", "domain", ",", "'No SSL domain defined.'", "r", ".", "env", ".", "ssl_base_dst", "=", "'%s/%s'", "%", "(", "ssl_dst", ",", "r", ".", "env", ".", "domain", ".", "replace", "(", "'*.'", ",", "''", ")", ")", "r", ".", "env", ".", "ssl_csr_year", "=", "date", ".", "today", "(", ")", ".", "year", "r", ".", "local", "(", "'openssl req -nodes -newkey rsa:{ssl_length} '", "'-subj \"/C={ssl_country}/ST={ssl_state}/L={ssl_city}/O={ssl_organization}/CN={ssl_domain}\" '", "'-keyout {ssl_base_dst}.{ssl_csr_year}.key -out {ssl_base_dst}.{ssl_csr_year}.csr'", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
SSLSatchel.get_expiration_date
Reads the expiration date of a local crt file.
burlap/ssl.py
def get_expiration_date(self, fn): """ Reads the expiration date of a local crt file. """ r = self.local_renderer r.env.crt_fn = fn with hide('running'): ret = r.local('openssl x509 -noout -in {ssl_crt_fn} -dates', capture=True) matches = re.findall('notAfter=(.*?)$', ret, flags=re.IGNORECASE) if matches: return dateutil.parser.parse(matches[0])
def get_expiration_date(self, fn): """ Reads the expiration date of a local crt file. """ r = self.local_renderer r.env.crt_fn = fn with hide('running'): ret = r.local('openssl x509 -noout -in {ssl_crt_fn} -dates', capture=True) matches = re.findall('notAfter=(.*?)$', ret, flags=re.IGNORECASE) if matches: return dateutil.parser.parse(matches[0])
[ "Reads", "the", "expiration", "date", "of", "a", "local", "crt", "file", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/ssl.py#L84-L94
[ "def", "get_expiration_date", "(", "self", ",", "fn", ")", ":", "r", "=", "self", ".", "local_renderer", "r", ".", "env", ".", "crt_fn", "=", "fn", "with", "hide", "(", "'running'", ")", ":", "ret", "=", "r", ".", "local", "(", "'openssl x509 -noout -in {ssl_crt_fn} -dates'", ",", "capture", "=", "True", ")", "matches", "=", "re", ".", "findall", "(", "'notAfter=(.*?)$'", ",", "ret", ",", "flags", "=", "re", ".", "IGNORECASE", ")", "if", "matches", ":", "return", "dateutil", ".", "parser", ".", "parse", "(", "matches", "[", "0", "]", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
SSLSatchel.list_expiration_dates
Scans through all local .crt files and displays the expiration dates.
burlap/ssl.py
def list_expiration_dates(self, base='roles/all/ssl'): """ Scans through all local .crt files and displays the expiration dates. """ max_fn_len = 0 max_date_len = 0 data = [] for fn in os.listdir(base): fqfn = os.path.join(base, fn) if not os.path.isfile(fqfn): continue if not fn.endswith('.crt'): continue expiration_date = self.get_expiration_date(fqfn) max_fn_len = max(max_fn_len, len(fn)) max_date_len = max(max_date_len, len(str(expiration_date))) data.append((fn, expiration_date)) print('%s %s %s' % ('Filename'.ljust(max_fn_len), 'Expiration Date'.ljust(max_date_len), 'Expired')) now = datetime.now().replace(tzinfo=pytz.UTC) for fn, dt in sorted(data): if dt is None: expired = '?' elif dt < now: expired = 'YES' else: expired = 'NO' print('%s %s %s' % (fn.ljust(max_fn_len), str(dt).ljust(max_date_len), expired))
def list_expiration_dates(self, base='roles/all/ssl'): """ Scans through all local .crt files and displays the expiration dates. """ max_fn_len = 0 max_date_len = 0 data = [] for fn in os.listdir(base): fqfn = os.path.join(base, fn) if not os.path.isfile(fqfn): continue if not fn.endswith('.crt'): continue expiration_date = self.get_expiration_date(fqfn) max_fn_len = max(max_fn_len, len(fn)) max_date_len = max(max_date_len, len(str(expiration_date))) data.append((fn, expiration_date)) print('%s %s %s' % ('Filename'.ljust(max_fn_len), 'Expiration Date'.ljust(max_date_len), 'Expired')) now = datetime.now().replace(tzinfo=pytz.UTC) for fn, dt in sorted(data): if dt is None: expired = '?' elif dt < now: expired = 'YES' else: expired = 'NO' print('%s %s %s' % (fn.ljust(max_fn_len), str(dt).ljust(max_date_len), expired))
[ "Scans", "through", "all", "local", ".", "crt", "files", "and", "displays", "the", "expiration", "dates", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/ssl.py#L97-L124
[ "def", "list_expiration_dates", "(", "self", ",", "base", "=", "'roles/all/ssl'", ")", ":", "max_fn_len", "=", "0", "max_date_len", "=", "0", "data", "=", "[", "]", "for", "fn", "in", "os", ".", "listdir", "(", "base", ")", ":", "fqfn", "=", "os", ".", "path", ".", "join", "(", "base", ",", "fn", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "fqfn", ")", ":", "continue", "if", "not", "fn", ".", "endswith", "(", "'.crt'", ")", ":", "continue", "expiration_date", "=", "self", ".", "get_expiration_date", "(", "fqfn", ")", "max_fn_len", "=", "max", "(", "max_fn_len", ",", "len", "(", "fn", ")", ")", "max_date_len", "=", "max", "(", "max_date_len", ",", "len", "(", "str", "(", "expiration_date", ")", ")", ")", "data", ".", "append", "(", "(", "fn", ",", "expiration_date", ")", ")", "print", "(", "'%s %s %s'", "%", "(", "'Filename'", ".", "ljust", "(", "max_fn_len", ")", ",", "'Expiration Date'", ".", "ljust", "(", "max_date_len", ")", ",", "'Expired'", ")", ")", "now", "=", "datetime", ".", "now", "(", ")", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "UTC", ")", "for", "fn", ",", "dt", "in", "sorted", "(", "data", ")", ":", "if", "dt", "is", "None", ":", "expired", "=", "'?'", "elif", "dt", "<", "now", ":", "expired", "=", "'YES'", "else", ":", "expired", "=", "'NO'", "print", "(", "'%s %s %s'", "%", "(", "fn", ".", "ljust", "(", "max_fn_len", ")", ",", "str", "(", "dt", ")", ".", "ljust", "(", "max_date_len", ")", ",", "expired", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
SSLSatchel.verify_certificate_chain
Confirms the key, CSR, and certificate files all match.
burlap/ssl.py
def verify_certificate_chain(self, base=None, crt=None, csr=None, key=None): """ Confirms the key, CSR, and certificate files all match. """ from burlap.common import get_verbose, print_fail, print_success r = self.local_renderer if base: crt = base + '.crt' csr = base + '.csr' key = base + '.key' else: assert crt and csr and key, 'If base not provided, crt and csr and key must be given.' assert os.path.isfile(crt) assert os.path.isfile(csr) assert os.path.isfile(key) csr_md5 = r.local('openssl req -noout -modulus -in %s | openssl md5' % csr, capture=True) key_md5 = r.local('openssl rsa -noout -modulus -in %s | openssl md5' % key, capture=True) crt_md5 = r.local('openssl x509 -noout -modulus -in %s | openssl md5' % crt, capture=True) match = crt_md5 == csr_md5 == key_md5 if self.verbose or not match: print('crt:', crt_md5) print('csr:', csr_md5) print('key:', key_md5) if match: print_success('Files look good!') else: print_fail('Files no not match!') raise Exception('Files no not match!')
def verify_certificate_chain(self, base=None, crt=None, csr=None, key=None): """ Confirms the key, CSR, and certificate files all match. """ from burlap.common import get_verbose, print_fail, print_success r = self.local_renderer if base: crt = base + '.crt' csr = base + '.csr' key = base + '.key' else: assert crt and csr and key, 'If base not provided, crt and csr and key must be given.' assert os.path.isfile(crt) assert os.path.isfile(csr) assert os.path.isfile(key) csr_md5 = r.local('openssl req -noout -modulus -in %s | openssl md5' % csr, capture=True) key_md5 = r.local('openssl rsa -noout -modulus -in %s | openssl md5' % key, capture=True) crt_md5 = r.local('openssl x509 -noout -modulus -in %s | openssl md5' % crt, capture=True) match = crt_md5 == csr_md5 == key_md5 if self.verbose or not match: print('crt:', crt_md5) print('csr:', csr_md5) print('key:', key_md5) if match: print_success('Files look good!') else: print_fail('Files no not match!') raise Exception('Files no not match!')
[ "Confirms", "the", "key", "CSR", "and", "certificate", "files", "all", "match", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/ssl.py#L127-L161
[ "def", "verify_certificate_chain", "(", "self", ",", "base", "=", "None", ",", "crt", "=", "None", ",", "csr", "=", "None", ",", "key", "=", "None", ")", ":", "from", "burlap", ".", "common", "import", "get_verbose", ",", "print_fail", ",", "print_success", "r", "=", "self", ".", "local_renderer", "if", "base", ":", "crt", "=", "base", "+", "'.crt'", "csr", "=", "base", "+", "'.csr'", "key", "=", "base", "+", "'.key'", "else", ":", "assert", "crt", "and", "csr", "and", "key", ",", "'If base not provided, crt and csr and key must be given.'", "assert", "os", ".", "path", ".", "isfile", "(", "crt", ")", "assert", "os", ".", "path", ".", "isfile", "(", "csr", ")", "assert", "os", ".", "path", ".", "isfile", "(", "key", ")", "csr_md5", "=", "r", ".", "local", "(", "'openssl req -noout -modulus -in %s | openssl md5'", "%", "csr", ",", "capture", "=", "True", ")", "key_md5", "=", "r", ".", "local", "(", "'openssl rsa -noout -modulus -in %s | openssl md5'", "%", "key", ",", "capture", "=", "True", ")", "crt_md5", "=", "r", ".", "local", "(", "'openssl x509 -noout -modulus -in %s | openssl md5'", "%", "crt", ",", "capture", "=", "True", ")", "match", "=", "crt_md5", "==", "csr_md5", "==", "key_md5", "if", "self", ".", "verbose", "or", "not", "match", ":", "print", "(", "'crt:'", ",", "crt_md5", ")", "print", "(", "'csr:'", ",", "csr_md5", ")", "print", "(", "'key:'", ",", "key_md5", ")", "if", "match", ":", "print_success", "(", "'Files look good!'", ")", "else", ":", "print_fail", "(", "'Files no not match!'", ")", "raise", "Exception", "(", "'Files no not match!'", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
_get_environ_handler
Dynamically creates a Fabric task for each configuration role.
burlap/__init__.py
def _get_environ_handler(name, d): """ Dynamically creates a Fabric task for each configuration role. """ def func(site=None, **kwargs): from fabric import state # We can't auto-set default_site, because that break tasks that have # to operate over multiple sites. # If a task requires a site, it can pull from default_site as needed. #site = site or d.get('default_site') or env.SITE BURLAP_SHELL_PREFIX = int(os.environ.get('BURLAP_SHELL_PREFIX', '0')) if BURLAP_SHELL_PREFIX: print('#!/bin/bash') print('# Generated with:') print('#') print('# export BURLAP_SHELL_PREFIX=1; export BURLAP_COMMAND_PREFIX=0; fab %s' % (' '.join(sys.argv[1:]),)) print('#') BURLAP_COMMAND_PREFIX = int(os.environ.get('BURLAP_COMMAND_PREFIX', '1')) with_args = [] if not BURLAP_COMMAND_PREFIX: for k in state.output: state.output[k] = False hostname = kwargs.get('hostname') hostname = hostname or kwargs.get('name') hostname = hostname or kwargs.get('hn') hostname = hostname or kwargs.get('h') verbose = int(kwargs.get('verbose', '0')) common.set_verbose(verbose) # Load environment for current role. env.update(env_default) env[common.ROLE] = os.environ[common.ROLE] = name if site: env[common.SITE] = os.environ[common.SITE] = site env.update(d) # Load host retriever. retriever = None if env.hosts_retriever: # Dynamically retrieve hosts. # module_name = '.'.join(env.hosts_retriever.split('.')[:-1]) # func_name = env.hosts_retriever.split('.')[-1] # retriever = getattr(importlib.import_module(module_name), func_name) retriever = common.get_hosts_retriever() if verbose: print('Using retriever:', env.hosts_retriever, retriever) # Load host translator. translator = None if hostname: # Filter hosts list by a specific host name. module_name = '.'.join(env.hostname_translator.split('.')[:-1]) func_name = env.hostname_translator.split('.')[-1] translator = getattr(importlib.import_module(module_name), func_name) # Re-load environment for current role, incase loading # the retriever/translator reset some environment values. env.update(env_default) env[common.ROLE] = os.environ[common.ROLE] = name if site: env[common.SITE] = os.environ[common.SITE] = site env.update(d) # Dynamically retrieve hosts. if env.hosts_retriever: if verbose: print('Building host list with retriever %s...' % env.hosts_retriever) env.hosts = list(retriever(site=site)) if verbose: print('Found hosts:') print(env.hosts) # Filter hosts list by a specific host name. if hostname: _hostname = hostname hostname = translator(hostname=hostname) _hosts = env.hosts env.hosts = [_ for _ in env.hosts if _ == hostname] assert env.hosts, 'Hostname %s does not match any known hosts.' % (_hostname,) if env.is_local is None: if env.hosts: env.is_local = 'localhost' in env.hosts or '127.0.0.1' in env.hosts elif env.host_string: env.is_local = 'localhost' in env.host_string or '127.0.0.1' in env.host_string for cb in common.post_role_load_callbacks: cb() # Ensure satchels don't cache values from previously loaded roles. common.reset_all_satchels() if env.hosts and not env.host_string: env.host_string = env.hosts[0] if verbose: print('Loaded role %s.' % (name,), file=sys.stderr) func.__doc__ = 'Sets enivronment variables for the "%s" role.' % (name,) return func
def _get_environ_handler(name, d): """ Dynamically creates a Fabric task for each configuration role. """ def func(site=None, **kwargs): from fabric import state # We can't auto-set default_site, because that break tasks that have # to operate over multiple sites. # If a task requires a site, it can pull from default_site as needed. #site = site or d.get('default_site') or env.SITE BURLAP_SHELL_PREFIX = int(os.environ.get('BURLAP_SHELL_PREFIX', '0')) if BURLAP_SHELL_PREFIX: print('#!/bin/bash') print('# Generated with:') print('#') print('# export BURLAP_SHELL_PREFIX=1; export BURLAP_COMMAND_PREFIX=0; fab %s' % (' '.join(sys.argv[1:]),)) print('#') BURLAP_COMMAND_PREFIX = int(os.environ.get('BURLAP_COMMAND_PREFIX', '1')) with_args = [] if not BURLAP_COMMAND_PREFIX: for k in state.output: state.output[k] = False hostname = kwargs.get('hostname') hostname = hostname or kwargs.get('name') hostname = hostname or kwargs.get('hn') hostname = hostname or kwargs.get('h') verbose = int(kwargs.get('verbose', '0')) common.set_verbose(verbose) # Load environment for current role. env.update(env_default) env[common.ROLE] = os.environ[common.ROLE] = name if site: env[common.SITE] = os.environ[common.SITE] = site env.update(d) # Load host retriever. retriever = None if env.hosts_retriever: # Dynamically retrieve hosts. # module_name = '.'.join(env.hosts_retriever.split('.')[:-1]) # func_name = env.hosts_retriever.split('.')[-1] # retriever = getattr(importlib.import_module(module_name), func_name) retriever = common.get_hosts_retriever() if verbose: print('Using retriever:', env.hosts_retriever, retriever) # Load host translator. translator = None if hostname: # Filter hosts list by a specific host name. module_name = '.'.join(env.hostname_translator.split('.')[:-1]) func_name = env.hostname_translator.split('.')[-1] translator = getattr(importlib.import_module(module_name), func_name) # Re-load environment for current role, incase loading # the retriever/translator reset some environment values. env.update(env_default) env[common.ROLE] = os.environ[common.ROLE] = name if site: env[common.SITE] = os.environ[common.SITE] = site env.update(d) # Dynamically retrieve hosts. if env.hosts_retriever: if verbose: print('Building host list with retriever %s...' % env.hosts_retriever) env.hosts = list(retriever(site=site)) if verbose: print('Found hosts:') print(env.hosts) # Filter hosts list by a specific host name. if hostname: _hostname = hostname hostname = translator(hostname=hostname) _hosts = env.hosts env.hosts = [_ for _ in env.hosts if _ == hostname] assert env.hosts, 'Hostname %s does not match any known hosts.' % (_hostname,) if env.is_local is None: if env.hosts: env.is_local = 'localhost' in env.hosts or '127.0.0.1' in env.hosts elif env.host_string: env.is_local = 'localhost' in env.host_string or '127.0.0.1' in env.host_string for cb in common.post_role_load_callbacks: cb() # Ensure satchels don't cache values from previously loaded roles. common.reset_all_satchels() if env.hosts and not env.host_string: env.host_string = env.hosts[0] if verbose: print('Loaded role %s.' % (name,), file=sys.stderr) func.__doc__ = 'Sets enivronment variables for the "%s" role.' % (name,) return func
[ "Dynamically", "creates", "a", "Fabric", "task", "for", "each", "configuration", "role", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/__init__.py#L80-L185
[ "def", "_get_environ_handler", "(", "name", ",", "d", ")", ":", "def", "func", "(", "site", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", "fabric", "import", "state", "# We can't auto-set default_site, because that break tasks that have", "# to operate over multiple sites.", "# If a task requires a site, it can pull from default_site as needed.", "#site = site or d.get('default_site') or env.SITE", "BURLAP_SHELL_PREFIX", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "'BURLAP_SHELL_PREFIX'", ",", "'0'", ")", ")", "if", "BURLAP_SHELL_PREFIX", ":", "print", "(", "'#!/bin/bash'", ")", "print", "(", "'# Generated with:'", ")", "print", "(", "'#'", ")", "print", "(", "'# export BURLAP_SHELL_PREFIX=1; export BURLAP_COMMAND_PREFIX=0; fab %s'", "%", "(", "' '", ".", "join", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", ",", ")", ")", "print", "(", "'#'", ")", "BURLAP_COMMAND_PREFIX", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "'BURLAP_COMMAND_PREFIX'", ",", "'1'", ")", ")", "with_args", "=", "[", "]", "if", "not", "BURLAP_COMMAND_PREFIX", ":", "for", "k", "in", "state", ".", "output", ":", "state", ".", "output", "[", "k", "]", "=", "False", "hostname", "=", "kwargs", ".", "get", "(", "'hostname'", ")", "hostname", "=", "hostname", "or", "kwargs", ".", "get", "(", "'name'", ")", "hostname", "=", "hostname", "or", "kwargs", ".", "get", "(", "'hn'", ")", "hostname", "=", "hostname", "or", "kwargs", ".", "get", "(", "'h'", ")", "verbose", "=", "int", "(", "kwargs", ".", "get", "(", "'verbose'", ",", "'0'", ")", ")", "common", ".", "set_verbose", "(", "verbose", ")", "# Load environment for current role.", "env", ".", "update", "(", "env_default", ")", "env", "[", "common", ".", "ROLE", "]", "=", "os", ".", "environ", "[", "common", ".", "ROLE", "]", "=", "name", "if", "site", ":", "env", "[", "common", ".", "SITE", "]", "=", "os", ".", "environ", "[", "common", ".", "SITE", "]", "=", "site", "env", ".", "update", "(", "d", ")", "# Load host retriever.", "retriever", "=", "None", "if", "env", ".", "hosts_retriever", ":", "# Dynamically retrieve hosts.", "# module_name = '.'.join(env.hosts_retriever.split('.')[:-1])", "# func_name = env.hosts_retriever.split('.')[-1]", "# retriever = getattr(importlib.import_module(module_name), func_name)", "retriever", "=", "common", ".", "get_hosts_retriever", "(", ")", "if", "verbose", ":", "print", "(", "'Using retriever:'", ",", "env", ".", "hosts_retriever", ",", "retriever", ")", "# Load host translator.", "translator", "=", "None", "if", "hostname", ":", "# Filter hosts list by a specific host name.", "module_name", "=", "'.'", ".", "join", "(", "env", ".", "hostname_translator", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "func_name", "=", "env", ".", "hostname_translator", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "translator", "=", "getattr", "(", "importlib", ".", "import_module", "(", "module_name", ")", ",", "func_name", ")", "# Re-load environment for current role, incase loading", "# the retriever/translator reset some environment values.", "env", ".", "update", "(", "env_default", ")", "env", "[", "common", ".", "ROLE", "]", "=", "os", ".", "environ", "[", "common", ".", "ROLE", "]", "=", "name", "if", "site", ":", "env", "[", "common", ".", "SITE", "]", "=", "os", ".", "environ", "[", "common", ".", "SITE", "]", "=", "site", "env", ".", "update", "(", "d", ")", "# Dynamically retrieve hosts.", "if", "env", ".", "hosts_retriever", ":", "if", "verbose", ":", "print", "(", "'Building host list with retriever %s...'", "%", "env", ".", "hosts_retriever", ")", "env", ".", "hosts", "=", "list", "(", "retriever", "(", "site", "=", "site", ")", ")", "if", "verbose", ":", "print", "(", "'Found hosts:'", ")", "print", "(", "env", ".", "hosts", ")", "# Filter hosts list by a specific host name.", "if", "hostname", ":", "_hostname", "=", "hostname", "hostname", "=", "translator", "(", "hostname", "=", "hostname", ")", "_hosts", "=", "env", ".", "hosts", "env", ".", "hosts", "=", "[", "_", "for", "_", "in", "env", ".", "hosts", "if", "_", "==", "hostname", "]", "assert", "env", ".", "hosts", ",", "'Hostname %s does not match any known hosts.'", "%", "(", "_hostname", ",", ")", "if", "env", ".", "is_local", "is", "None", ":", "if", "env", ".", "hosts", ":", "env", ".", "is_local", "=", "'localhost'", "in", "env", ".", "hosts", "or", "'127.0.0.1'", "in", "env", ".", "hosts", "elif", "env", ".", "host_string", ":", "env", ".", "is_local", "=", "'localhost'", "in", "env", ".", "host_string", "or", "'127.0.0.1'", "in", "env", ".", "host_string", "for", "cb", "in", "common", ".", "post_role_load_callbacks", ":", "cb", "(", ")", "# Ensure satchels don't cache values from previously loaded roles.", "common", ".", "reset_all_satchels", "(", ")", "if", "env", ".", "hosts", "and", "not", "env", ".", "host_string", ":", "env", ".", "host_string", "=", "env", ".", "hosts", "[", "0", "]", "if", "verbose", ":", "print", "(", "'Loaded role %s.'", "%", "(", "name", ",", ")", ",", "file", "=", "sys", ".", "stderr", ")", "func", ".", "__doc__", "=", "'Sets enivronment variables for the \"%s\" role.'", "%", "(", "name", ",", ")", "return", "func" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
update_merge
Recursively merges two dictionaries. Uses fabric's AttributeDict so you can reference values via dot-notation. e.g. env.value1.value2.value3... http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
burlap/__init__.py
def update_merge(d, u): """ Recursively merges two dictionaries. Uses fabric's AttributeDict so you can reference values via dot-notation. e.g. env.value1.value2.value3... http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ import collections for k, v in u.items(): if isinstance(v, collections.Mapping): r = update_merge(d.get(k, dict()), v) d[k] = r else: d[k] = u[k] return d
def update_merge(d, u): """ Recursively merges two dictionaries. Uses fabric's AttributeDict so you can reference values via dot-notation. e.g. env.value1.value2.value3... http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ import collections for k, v in u.items(): if isinstance(v, collections.Mapping): r = update_merge(d.get(k, dict()), v) d[k] = r else: d[k] = u[k] return d
[ "Recursively", "merges", "two", "dictionaries", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/__init__.py#L187-L203
[ "def", "update_merge", "(", "d", ",", "u", ")", ":", "import", "collections", "for", "k", ",", "v", "in", "u", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "collections", ".", "Mapping", ")", ":", "r", "=", "update_merge", "(", "d", ".", "get", "(", "k", ",", "dict", "(", ")", ")", ",", "v", ")", "d", "[", "k", "]", "=", "r", "else", ":", "d", "[", "k", "]", "=", "u", "[", "k", "]", "return", "d" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
check_version
Compares the local version against the latest official version on PyPI and displays a warning message if a newer release is available. This check can be disabled by setting the environment variable BURLAP_CHECK_VERSION=0.
burlap/__init__.py
def check_version(): """ Compares the local version against the latest official version on PyPI and displays a warning message if a newer release is available. This check can be disabled by setting the environment variable BURLAP_CHECK_VERSION=0. """ global CHECK_VERSION if not CHECK_VERSION: return # Ensure we only check once in this process. CHECK_VERSION = 0 # Lookup most recent remote version. from six.moves.urllib.request import urlopen try: response = urlopen("https://pypi.org/pypi/burlap/json") data = json.loads(response.read().decode()) remote_release = sorted(tuple(map(int, _.split('.'))) for _ in data['releases'].keys())[-1] remote_release_str = '.'.join(map(str, remote_release)) local_release = VERSION local_release_str = '.'.join(map(str, local_release)) # Display warning. if remote_release > local_release: print('\033[93m') print("You are using burlap version %s, however version %s is available." % (local_release_str, remote_release_str)) print("You should consider upgrading via the 'pip install --upgrade burlap' command.") print('\033[0m') except Exception as exc: print('\033[93m') print("Unable to check for updated burlap version: %s" % exc) print('\033[0m')
def check_version(): """ Compares the local version against the latest official version on PyPI and displays a warning message if a newer release is available. This check can be disabled by setting the environment variable BURLAP_CHECK_VERSION=0. """ global CHECK_VERSION if not CHECK_VERSION: return # Ensure we only check once in this process. CHECK_VERSION = 0 # Lookup most recent remote version. from six.moves.urllib.request import urlopen try: response = urlopen("https://pypi.org/pypi/burlap/json") data = json.loads(response.read().decode()) remote_release = sorted(tuple(map(int, _.split('.'))) for _ in data['releases'].keys())[-1] remote_release_str = '.'.join(map(str, remote_release)) local_release = VERSION local_release_str = '.'.join(map(str, local_release)) # Display warning. if remote_release > local_release: print('\033[93m') print("You are using burlap version %s, however version %s is available." % (local_release_str, remote_release_str)) print("You should consider upgrading via the 'pip install --upgrade burlap' command.") print('\033[0m') except Exception as exc: print('\033[93m') print("Unable to check for updated burlap version: %s" % exc) print('\033[0m')
[ "Compares", "the", "local", "version", "against", "the", "latest", "official", "version", "on", "PyPI", "and", "displays", "a", "warning", "message", "if", "a", "newer", "release", "is", "available", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/__init__.py#L287-L316
[ "def", "check_version", "(", ")", ":", "global", "CHECK_VERSION", "if", "not", "CHECK_VERSION", ":", "return", "# Ensure we only check once in this process.", "CHECK_VERSION", "=", "0", "# Lookup most recent remote version.", "from", "six", ".", "moves", ".", "urllib", ".", "request", "import", "urlopen", "try", ":", "response", "=", "urlopen", "(", "\"https://pypi.org/pypi/burlap/json\"", ")", "data", "=", "json", ".", "loads", "(", "response", ".", "read", "(", ")", ".", "decode", "(", ")", ")", "remote_release", "=", "sorted", "(", "tuple", "(", "map", "(", "int", ",", "_", ".", "split", "(", "'.'", ")", ")", ")", "for", "_", "in", "data", "[", "'releases'", "]", ".", "keys", "(", ")", ")", "[", "-", "1", "]", "remote_release_str", "=", "'.'", ".", "join", "(", "map", "(", "str", ",", "remote_release", ")", ")", "local_release", "=", "VERSION", "local_release_str", "=", "'.'", ".", "join", "(", "map", "(", "str", ",", "local_release", ")", ")", "# Display warning.", "if", "remote_release", ">", "local_release", ":", "print", "(", "'\\033[93m'", ")", "print", "(", "\"You are using burlap version %s, however version %s is available.\"", "%", "(", "local_release_str", ",", "remote_release_str", ")", ")", "print", "(", "\"You should consider upgrading via the 'pip install --upgrade burlap' command.\"", ")", "print", "(", "'\\033[0m'", ")", "except", "Exception", "as", "exc", ":", "print", "(", "'\\033[93m'", ")", "print", "(", "\"Unable to check for updated burlap version: %s\"", "%", "exc", ")", "print", "(", "'\\033[0m'", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
populate_fabfile
Automatically includes all submodules and role selectors in the top-level fabfile using spooky-scary black magic. This allows us to avoid manually declaring imports for every module, e.g. import burlap.pip import burlap.vm import burlap... which has the added benefit of allowing us to manually call the commands without typing "burlap". This is soley for convenience. If not needed, it can be disabled by specifying the environment variable: export BURLAP_POPULATE_STACK=0
burlap/__init__.py
def populate_fabfile(): """ Automatically includes all submodules and role selectors in the top-level fabfile using spooky-scary black magic. This allows us to avoid manually declaring imports for every module, e.g. import burlap.pip import burlap.vm import burlap... which has the added benefit of allowing us to manually call the commands without typing "burlap". This is soley for convenience. If not needed, it can be disabled by specifying the environment variable: export BURLAP_POPULATE_STACK=0 """ stack = inspect.stack() fab_frame = None for frame_obj, script_fn, line, _, _, _ in stack: if 'fabfile.py' in script_fn: fab_frame = frame_obj break if not fab_frame: return try: locals_ = fab_frame.f_locals for module_name, module in sub_modules.items(): locals_[module_name] = module for role_name, role_func in role_commands.items(): assert role_name not in sub_modules, \ ('The role %s conflicts with a built-in submodule. ' 'Please choose a different name.') % (role_name) locals_[role_name] = role_func locals_['common'] = common # Put all debug commands into the global namespace. # for _debug_name in debug.debug.get_tasks(): # print('_debug_name:', _debug_name) locals_['shell'] = shell#debug.debug.shell # Put all virtual satchels in the global namespace so Fabric can find them. for _module_alias in common.post_import_modules: exec("import %s" % _module_alias) # pylint: disable=exec-used locals_[_module_alias] = locals()[_module_alias] finally: del stack
def populate_fabfile(): """ Automatically includes all submodules and role selectors in the top-level fabfile using spooky-scary black magic. This allows us to avoid manually declaring imports for every module, e.g. import burlap.pip import burlap.vm import burlap... which has the added benefit of allowing us to manually call the commands without typing "burlap". This is soley for convenience. If not needed, it can be disabled by specifying the environment variable: export BURLAP_POPULATE_STACK=0 """ stack = inspect.stack() fab_frame = None for frame_obj, script_fn, line, _, _, _ in stack: if 'fabfile.py' in script_fn: fab_frame = frame_obj break if not fab_frame: return try: locals_ = fab_frame.f_locals for module_name, module in sub_modules.items(): locals_[module_name] = module for role_name, role_func in role_commands.items(): assert role_name not in sub_modules, \ ('The role %s conflicts with a built-in submodule. ' 'Please choose a different name.') % (role_name) locals_[role_name] = role_func locals_['common'] = common # Put all debug commands into the global namespace. # for _debug_name in debug.debug.get_tasks(): # print('_debug_name:', _debug_name) locals_['shell'] = shell#debug.debug.shell # Put all virtual satchels in the global namespace so Fabric can find them. for _module_alias in common.post_import_modules: exec("import %s" % _module_alias) # pylint: disable=exec-used locals_[_module_alias] = locals()[_module_alias] finally: del stack
[ "Automatically", "includes", "all", "submodules", "and", "role", "selectors", "in", "the", "top", "-", "level", "fabfile", "using", "spooky", "-", "scary", "black", "magic", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/__init__.py#L321-L372
[ "def", "populate_fabfile", "(", ")", ":", "stack", "=", "inspect", ".", "stack", "(", ")", "fab_frame", "=", "None", "for", "frame_obj", ",", "script_fn", ",", "line", ",", "_", ",", "_", ",", "_", "in", "stack", ":", "if", "'fabfile.py'", "in", "script_fn", ":", "fab_frame", "=", "frame_obj", "break", "if", "not", "fab_frame", ":", "return", "try", ":", "locals_", "=", "fab_frame", ".", "f_locals", "for", "module_name", ",", "module", "in", "sub_modules", ".", "items", "(", ")", ":", "locals_", "[", "module_name", "]", "=", "module", "for", "role_name", ",", "role_func", "in", "role_commands", ".", "items", "(", ")", ":", "assert", "role_name", "not", "in", "sub_modules", ",", "(", "'The role %s conflicts with a built-in submodule. '", "'Please choose a different name.'", ")", "%", "(", "role_name", ")", "locals_", "[", "role_name", "]", "=", "role_func", "locals_", "[", "'common'", "]", "=", "common", "# Put all debug commands into the global namespace.", "# for _debug_name in debug.debug.get_tasks():", "# print('_debug_name:', _debug_name)", "locals_", "[", "'shell'", "]", "=", "shell", "#debug.debug.shell", "# Put all virtual satchels in the global namespace so Fabric can find them.", "for", "_module_alias", "in", "common", ".", "post_import_modules", ":", "exec", "(", "\"import %s\"", "%", "_module_alias", ")", "# pylint: disable=exec-used", "locals_", "[", "_module_alias", "]", "=", "locals", "(", ")", "[", "_module_alias", "]", "finally", ":", "del", "stack" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
task_or_dryrun
Decorator declaring the wrapped function to be a new-style task. May be invoked as a simple, argument-less decorator (i.e. ``@task``) or with arguments customizing its behavior (e.g. ``@task(alias='myalias')``). Please see the :ref:`new-style task <task-decorator>` documentation for details on how to use this decorator. .. versionchanged:: 1.2 Added the ``alias``, ``aliases``, ``task_class`` and ``default`` keyword arguments. See :ref:`task-decorator-arguments` for details. .. versionchanged:: 1.5 Added the ``name`` keyword argument. .. seealso:: `~fabric.docs.unwrap_tasks`, `~fabric.tasks.WrappedCallableTask`
burlap/decorators.py
def task_or_dryrun(*args, **kwargs): """ Decorator declaring the wrapped function to be a new-style task. May be invoked as a simple, argument-less decorator (i.e. ``@task``) or with arguments customizing its behavior (e.g. ``@task(alias='myalias')``). Please see the :ref:`new-style task <task-decorator>` documentation for details on how to use this decorator. .. versionchanged:: 1.2 Added the ``alias``, ``aliases``, ``task_class`` and ``default`` keyword arguments. See :ref:`task-decorator-arguments` for details. .. versionchanged:: 1.5 Added the ``name`` keyword argument. .. seealso:: `~fabric.docs.unwrap_tasks`, `~fabric.tasks.WrappedCallableTask` """ invoked = bool(not args or kwargs) task_class = kwargs.pop("task_class", WrappedCallableTask) # if invoked: # func, args = args[0], () # else: func, args = args[0], () def wrapper(func): return task_class(func, *args, **kwargs) wrapper.is_task_or_dryrun = True wrapper.wrapped = func return wrapper if invoked else wrapper(func)
def task_or_dryrun(*args, **kwargs): """ Decorator declaring the wrapped function to be a new-style task. May be invoked as a simple, argument-less decorator (i.e. ``@task``) or with arguments customizing its behavior (e.g. ``@task(alias='myalias')``). Please see the :ref:`new-style task <task-decorator>` documentation for details on how to use this decorator. .. versionchanged:: 1.2 Added the ``alias``, ``aliases``, ``task_class`` and ``default`` keyword arguments. See :ref:`task-decorator-arguments` for details. .. versionchanged:: 1.5 Added the ``name`` keyword argument. .. seealso:: `~fabric.docs.unwrap_tasks`, `~fabric.tasks.WrappedCallableTask` """ invoked = bool(not args or kwargs) task_class = kwargs.pop("task_class", WrappedCallableTask) # if invoked: # func, args = args[0], () # else: func, args = args[0], () def wrapper(func): return task_class(func, *args, **kwargs) wrapper.is_task_or_dryrun = True wrapper.wrapped = func return wrapper if invoked else wrapper(func)
[ "Decorator", "declaring", "the", "wrapped", "function", "to", "be", "a", "new", "-", "style", "task", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/decorators.py#L14-L44
[ "def", "task_or_dryrun", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "invoked", "=", "bool", "(", "not", "args", "or", "kwargs", ")", "task_class", "=", "kwargs", ".", "pop", "(", "\"task_class\"", ",", "WrappedCallableTask", ")", "# if invoked:", "# func, args = args[0], ()", "# else:", "func", ",", "args", "=", "args", "[", "0", "]", ",", "(", ")", "def", "wrapper", "(", "func", ")", ":", "return", "task_class", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", "wrapper", ".", "is_task_or_dryrun", "=", "True", "wrapper", ".", "wrapped", "=", "func", "return", "wrapper", "if", "invoked", "else", "wrapper", "(", "func", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
task
Decorator for registering a satchel method as a Fabric task. Can be used like: @task def my_method(self): ... @task(precursors=['other_satchel']) def my_method(self): ...
burlap/decorators.py
def task(*args, **kwargs): """ Decorator for registering a satchel method as a Fabric task. Can be used like: @task def my_method(self): ... @task(precursors=['other_satchel']) def my_method(self): ... """ precursors = kwargs.pop('precursors', None) post_callback = kwargs.pop('post_callback', False) if args and callable(args[0]): # direct decoration, @task return _task(*args) # callable decoration, @task(precursors=['satchel']) def wrapper(meth): if precursors: meth.deploy_before = list(precursors) if post_callback: #from burlap.common import post_callbacks #post_callbacks.append(meth) meth.is_post_callback = True return _task(meth) return wrapper
def task(*args, **kwargs): """ Decorator for registering a satchel method as a Fabric task. Can be used like: @task def my_method(self): ... @task(precursors=['other_satchel']) def my_method(self): ... """ precursors = kwargs.pop('precursors', None) post_callback = kwargs.pop('post_callback', False) if args and callable(args[0]): # direct decoration, @task return _task(*args) # callable decoration, @task(precursors=['satchel']) def wrapper(meth): if precursors: meth.deploy_before = list(precursors) if post_callback: #from burlap.common import post_callbacks #post_callbacks.append(meth) meth.is_post_callback = True return _task(meth) return wrapper
[ "Decorator", "for", "registering", "a", "satchel", "method", "as", "a", "Fabric", "task", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/decorators.py#L68-L98
[ "def", "task", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "precursors", "=", "kwargs", ".", "pop", "(", "'precursors'", ",", "None", ")", "post_callback", "=", "kwargs", ".", "pop", "(", "'post_callback'", ",", "False", ")", "if", "args", "and", "callable", "(", "args", "[", "0", "]", ")", ":", "# direct decoration, @task", "return", "_task", "(", "*", "args", ")", "# callable decoration, @task(precursors=['satchel'])", "def", "wrapper", "(", "meth", ")", ":", "if", "precursors", ":", "meth", ".", "deploy_before", "=", "list", "(", "precursors", ")", "if", "post_callback", ":", "#from burlap.common import post_callbacks", "#post_callbacks.append(meth)", "meth", ".", "is_post_callback", "=", "True", "return", "_task", "(", "meth", ")", "return", "wrapper" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
runs_once
A wrapper around Fabric's runs_once() to support our dryrun feature.
burlap/decorators.py
def runs_once(meth): """ A wrapper around Fabric's runs_once() to support our dryrun feature. """ from burlap.common import get_dryrun, runs_once_methods if get_dryrun(): pass else: runs_once_methods.append(meth) _runs_once(meth) return meth
def runs_once(meth): """ A wrapper around Fabric's runs_once() to support our dryrun feature. """ from burlap.common import get_dryrun, runs_once_methods if get_dryrun(): pass else: runs_once_methods.append(meth) _runs_once(meth) return meth
[ "A", "wrapper", "around", "Fabric", "s", "runs_once", "()", "to", "support", "our", "dryrun", "feature", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/decorators.py#L100-L110
[ "def", "runs_once", "(", "meth", ")", ":", "from", "burlap", ".", "common", "import", "get_dryrun", ",", "runs_once_methods", "if", "get_dryrun", "(", ")", ":", "pass", "else", ":", "runs_once_methods", ".", "append", "(", "meth", ")", "_runs_once", "(", "meth", ")", "return", "meth" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.is_file
Check if a path exists, and is a file.
burlap/files.py
def is_file(self, path, use_sudo=False): """ Check if a path exists, and is a file. """ if self.is_local and not use_sudo: return os.path.isfile(path) else: func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -f "%(path)s" ]' % locals()).succeeded
def is_file(self, path, use_sudo=False): """ Check if a path exists, and is a file. """ if self.is_local and not use_sudo: return os.path.isfile(path) else: func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -f "%(path)s" ]' % locals()).succeeded
[ "Check", "if", "a", "path", "exists", "and", "is", "a", "file", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L107-L116
[ "def", "is_file", "(", "self", ",", "path", ",", "use_sudo", "=", "False", ")", ":", "if", "self", ".", "is_local", "and", "not", "use_sudo", ":", "return", "os", ".", "path", ".", "isfile", "(", "path", ")", "else", ":", "func", "=", "use_sudo", "and", "_sudo", "or", "_run", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'warnings'", ")", ",", "warn_only", "=", "True", ")", ":", "return", "func", "(", "'[ -f \"%(path)s\" ]'", "%", "locals", "(", ")", ")", ".", "succeeded" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.is_dir
Check if a path exists, and is a directory.
burlap/files.py
def is_dir(self, path, use_sudo=False): """ Check if a path exists, and is a directory. """ if self.is_local and not use_sudo: return os.path.isdir(path) else: func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -d "%(path)s" ]' % locals()).succeeded
def is_dir(self, path, use_sudo=False): """ Check if a path exists, and is a directory. """ if self.is_local and not use_sudo: return os.path.isdir(path) else: func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -d "%(path)s" ]' % locals()).succeeded
[ "Check", "if", "a", "path", "exists", "and", "is", "a", "directory", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L119-L128
[ "def", "is_dir", "(", "self", ",", "path", ",", "use_sudo", "=", "False", ")", ":", "if", "self", ".", "is_local", "and", "not", "use_sudo", ":", "return", "os", ".", "path", ".", "isdir", "(", "path", ")", "else", ":", "func", "=", "use_sudo", "and", "_sudo", "or", "_run", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'warnings'", ")", ",", "warn_only", "=", "True", ")", ":", "return", "func", "(", "'[ -d \"%(path)s\" ]'", "%", "locals", "(", ")", ")", ".", "succeeded" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.is_link
Check if a path exists, and is a symbolic link.
burlap/files.py
def is_link(self, path, use_sudo=False): """ Check if a path exists, and is a symbolic link. """ func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -L "%(path)s" ]' % locals()).succeeded
def is_link(self, path, use_sudo=False): """ Check if a path exists, and is a symbolic link. """ func = use_sudo and _sudo or _run with self.settings(hide('running', 'warnings'), warn_only=True): return func('[ -L "%(path)s" ]' % locals()).succeeded
[ "Check", "if", "a", "path", "exists", "and", "is", "a", "symbolic", "link", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L131-L137
[ "def", "is_link", "(", "self", ",", "path", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "_sudo", "or", "_run", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'warnings'", ")", ",", "warn_only", "=", "True", ")", ":", "return", "func", "(", "'[ -L \"%(path)s\" ]'", "%", "locals", "(", ")", ")", ".", "succeeded" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.get_owner
Get the owner name of a file or directory.
burlap/files.py
def get_owner(self, path, use_sudo=False): """ Get the owner name of a file or directory. """ func = use_sudo and run_as_root or self.run # I'd prefer to use quiet=True, but that's not supported with older # versions of Fabric. with self.settings(hide('running', 'stdout'), warn_only=True): result = func('stat -c %%U "%(path)s"' % locals()) if result.failed and 'stat: illegal option' in result: # Try the BSD version of stat return func('stat -f %%Su "%(path)s"' % locals()) return result
def get_owner(self, path, use_sudo=False): """ Get the owner name of a file or directory. """ func = use_sudo and run_as_root or self.run # I'd prefer to use quiet=True, but that's not supported with older # versions of Fabric. with self.settings(hide('running', 'stdout'), warn_only=True): result = func('stat -c %%U "%(path)s"' % locals()) if result.failed and 'stat: illegal option' in result: # Try the BSD version of stat return func('stat -f %%Su "%(path)s"' % locals()) return result
[ "Get", "the", "owner", "name", "of", "a", "file", "or", "directory", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L140-L152
[ "def", "get_owner", "(", "self", ",", "path", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "# I'd prefer to use quiet=True, but that's not supported with older", "# versions of Fabric.", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ")", ",", "warn_only", "=", "True", ")", ":", "result", "=", "func", "(", "'stat -c %%U \"%(path)s\"'", "%", "locals", "(", ")", ")", "if", "result", ".", "failed", "and", "'stat: illegal option'", "in", "result", ":", "# Try the BSD version of stat", "return", "func", "(", "'stat -f %%Su \"%(path)s\"'", "%", "locals", "(", ")", ")", "return", "result" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.umask
Get the user's umask. Returns a string such as ``'0002'``, representing the user's umask as an octal number. If `use_sudo` is `True`, this function returns root's umask.
burlap/files.py
def umask(self, use_sudo=False): """ Get the user's umask. Returns a string such as ``'0002'``, representing the user's umask as an octal number. If `use_sudo` is `True`, this function returns root's umask. """ func = use_sudo and run_as_root or self.run return func('umask')
def umask(self, use_sudo=False): """ Get the user's umask. Returns a string such as ``'0002'``, representing the user's umask as an octal number. If `use_sudo` is `True`, this function returns root's umask. """ func = use_sudo and run_as_root or self.run return func('umask')
[ "Get", "the", "user", "s", "umask", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L188-L198
[ "def", "umask", "(", "self", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "return", "func", "(", "'umask'", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.upload_template
Upload a template file. This is a wrapper around :func:`fabric.contrib.files.upload_template` that adds some extra parameters. If ``mkdir`` is True, then the remote directory will be created, as the current user or as ``user`` if specified. If ``chown`` is True, then it will ensure that the current user (or ``user`` if specified) is the owner of the remote file.
burlap/files.py
def upload_template(self, filename, destination, context=None, use_jinja=False, template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False, mode=None, mkdir=False, chown=False, user=None): """ Upload a template file. This is a wrapper around :func:`fabric.contrib.files.upload_template` that adds some extra parameters. If ``mkdir`` is True, then the remote directory will be created, as the current user or as ``user`` if specified. If ``chown`` is True, then it will ensure that the current user (or ``user`` if specified) is the owner of the remote file. """ if mkdir: remote_dir = os.path.dirname(destination) if use_sudo: self.sudo('mkdir -p %s' % quote(remote_dir), user=user) else: self.run('mkdir -p %s' % quote(remote_dir)) if not self.dryrun: _upload_template( filename=filename, destination=destination, context=context, use_jinja=use_jinja, template_dir=template_dir, use_sudo=use_sudo, backup=backup, mirror_local_mode=mirror_local_mode, mode=mode, ) if chown: if user is None: user = self.genv.user run_as_root('chown %s: %s' % (user, quote(destination)))
def upload_template(self, filename, destination, context=None, use_jinja=False, template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False, mode=None, mkdir=False, chown=False, user=None): """ Upload a template file. This is a wrapper around :func:`fabric.contrib.files.upload_template` that adds some extra parameters. If ``mkdir`` is True, then the remote directory will be created, as the current user or as ``user`` if specified. If ``chown`` is True, then it will ensure that the current user (or ``user`` if specified) is the owner of the remote file. """ if mkdir: remote_dir = os.path.dirname(destination) if use_sudo: self.sudo('mkdir -p %s' % quote(remote_dir), user=user) else: self.run('mkdir -p %s' % quote(remote_dir)) if not self.dryrun: _upload_template( filename=filename, destination=destination, context=context, use_jinja=use_jinja, template_dir=template_dir, use_sudo=use_sudo, backup=backup, mirror_local_mode=mirror_local_mode, mode=mode, ) if chown: if user is None: user = self.genv.user run_as_root('chown %s: %s' % (user, quote(destination)))
[ "Upload", "a", "template", "file", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L201-L241
[ "def", "upload_template", "(", "self", ",", "filename", ",", "destination", ",", "context", "=", "None", ",", "use_jinja", "=", "False", ",", "template_dir", "=", "None", ",", "use_sudo", "=", "False", ",", "backup", "=", "True", ",", "mirror_local_mode", "=", "False", ",", "mode", "=", "None", ",", "mkdir", "=", "False", ",", "chown", "=", "False", ",", "user", "=", "None", ")", ":", "if", "mkdir", ":", "remote_dir", "=", "os", ".", "path", ".", "dirname", "(", "destination", ")", "if", "use_sudo", ":", "self", ".", "sudo", "(", "'mkdir -p %s'", "%", "quote", "(", "remote_dir", ")", ",", "user", "=", "user", ")", "else", ":", "self", ".", "run", "(", "'mkdir -p %s'", "%", "quote", "(", "remote_dir", ")", ")", "if", "not", "self", ".", "dryrun", ":", "_upload_template", "(", "filename", "=", "filename", ",", "destination", "=", "destination", ",", "context", "=", "context", ",", "use_jinja", "=", "use_jinja", ",", "template_dir", "=", "template_dir", ",", "use_sudo", "=", "use_sudo", ",", "backup", "=", "backup", ",", "mirror_local_mode", "=", "mirror_local_mode", ",", "mode", "=", "mode", ",", ")", "if", "chown", ":", "if", "user", "is", "None", ":", "user", "=", "self", ".", "genv", ".", "user", "run_as_root", "(", "'chown %s: %s'", "%", "(", "user", ",", "quote", "(", "destination", ")", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.md5sum
Compute the MD5 sum of a file.
burlap/files.py
def md5sum(self, filename, use_sudo=False): """ Compute the MD5 sum of a file. """ func = use_sudo and run_as_root or self.run with self.settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): # Linux (LSB) if exists(u'/usr/bin/md5sum'): res = func(u'/usr/bin/md5sum %(filename)s' % locals()) # BSD / OS X elif exists(u'/sbin/md5'): res = func(u'/sbin/md5 -r %(filename)s' % locals()) # SmartOS Joyent build elif exists(u'/opt/local/gnu/bin/md5sum'): res = func(u'/opt/local/gnu/bin/md5sum %(filename)s' % locals()) # SmartOS Joyent build # (the former doesn't exist, at least on joyent_20130222T000747Z) elif exists(u'/opt/local/bin/md5sum'): res = func(u'/opt/local/bin/md5sum %(filename)s' % locals()) # Try to find ``md5sum`` or ``md5`` on ``$PATH`` or abort else: md5sum = func(u'which md5sum') md5 = func(u'which md5') if exists(md5sum): res = func('%(md5sum)s %(filename)s' % locals()) elif exists(md5): res = func('%(md5)s %(filename)s' % locals()) else: abort('No MD5 utility was found on this system.') if res.succeeded: _md5sum = res else: warn(res) _md5sum = None if isinstance(_md5sum, six.string_types): _md5sum = _md5sum.strip().split('\n')[-1].split()[0] return _md5sum
def md5sum(self, filename, use_sudo=False): """ Compute the MD5 sum of a file. """ func = use_sudo and run_as_root or self.run with self.settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True): # Linux (LSB) if exists(u'/usr/bin/md5sum'): res = func(u'/usr/bin/md5sum %(filename)s' % locals()) # BSD / OS X elif exists(u'/sbin/md5'): res = func(u'/sbin/md5 -r %(filename)s' % locals()) # SmartOS Joyent build elif exists(u'/opt/local/gnu/bin/md5sum'): res = func(u'/opt/local/gnu/bin/md5sum %(filename)s' % locals()) # SmartOS Joyent build # (the former doesn't exist, at least on joyent_20130222T000747Z) elif exists(u'/opt/local/bin/md5sum'): res = func(u'/opt/local/bin/md5sum %(filename)s' % locals()) # Try to find ``md5sum`` or ``md5`` on ``$PATH`` or abort else: md5sum = func(u'which md5sum') md5 = func(u'which md5') if exists(md5sum): res = func('%(md5sum)s %(filename)s' % locals()) elif exists(md5): res = func('%(md5)s %(filename)s' % locals()) else: abort('No MD5 utility was found on this system.') if res.succeeded: _md5sum = res else: warn(res) _md5sum = None if isinstance(_md5sum, six.string_types): _md5sum = _md5sum.strip().split('\n')[-1].split()[0] return _md5sum
[ "Compute", "the", "MD5", "sum", "of", "a", "file", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L244-L283
[ "def", "md5sum", "(", "self", ",", "filename", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ",", "'stderr'", ",", "'warnings'", ")", ",", "warn_only", "=", "True", ")", ":", "# Linux (LSB)", "if", "exists", "(", "u'/usr/bin/md5sum'", ")", ":", "res", "=", "func", "(", "u'/usr/bin/md5sum %(filename)s'", "%", "locals", "(", ")", ")", "# BSD / OS X", "elif", "exists", "(", "u'/sbin/md5'", ")", ":", "res", "=", "func", "(", "u'/sbin/md5 -r %(filename)s'", "%", "locals", "(", ")", ")", "# SmartOS Joyent build", "elif", "exists", "(", "u'/opt/local/gnu/bin/md5sum'", ")", ":", "res", "=", "func", "(", "u'/opt/local/gnu/bin/md5sum %(filename)s'", "%", "locals", "(", ")", ")", "# SmartOS Joyent build", "# (the former doesn't exist, at least on joyent_20130222T000747Z)", "elif", "exists", "(", "u'/opt/local/bin/md5sum'", ")", ":", "res", "=", "func", "(", "u'/opt/local/bin/md5sum %(filename)s'", "%", "locals", "(", ")", ")", "# Try to find ``md5sum`` or ``md5`` on ``$PATH`` or abort", "else", ":", "md5sum", "=", "func", "(", "u'which md5sum'", ")", "md5", "=", "func", "(", "u'which md5'", ")", "if", "exists", "(", "md5sum", ")", ":", "res", "=", "func", "(", "'%(md5sum)s %(filename)s'", "%", "locals", "(", ")", ")", "elif", "exists", "(", "md5", ")", ":", "res", "=", "func", "(", "'%(md5)s %(filename)s'", "%", "locals", "(", ")", ")", "else", ":", "abort", "(", "'No MD5 utility was found on this system.'", ")", "if", "res", ".", "succeeded", ":", "_md5sum", "=", "res", "else", ":", "warn", "(", "res", ")", "_md5sum", "=", "None", "if", "isinstance", "(", "_md5sum", ",", "six", ".", "string_types", ")", ":", "_md5sum", "=", "_md5sum", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", "[", "-", "1", "]", ".", "split", "(", ")", "[", "0", "]", "return", "_md5sum" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.uncommented_lines
Get the lines of a remote file, ignoring empty or commented ones
burlap/files.py
def uncommented_lines(self, filename, use_sudo=False): """ Get the lines of a remote file, ignoring empty or commented ones """ func = run_as_root if use_sudo else self.run res = func('cat %s' % quote(filename), quiet=True) if res.succeeded: return [line for line in res.splitlines() if line and not line.startswith('#')] return []
def uncommented_lines(self, filename, use_sudo=False): """ Get the lines of a remote file, ignoring empty or commented ones """ func = run_as_root if use_sudo else self.run res = func('cat %s' % quote(filename), quiet=True) if res.succeeded: return [line for line in res.splitlines() if line and not line.startswith('#')] return []
[ "Get", "the", "lines", "of", "a", "remote", "file", "ignoring", "empty", "or", "commented", "ones" ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L286-L294
[ "def", "uncommented_lines", "(", "self", ",", "filename", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "run_as_root", "if", "use_sudo", "else", "self", ".", "run", "res", "=", "func", "(", "'cat %s'", "%", "quote", "(", "filename", ")", ",", "quiet", "=", "True", ")", "if", "res", ".", "succeeded", ":", "return", "[", "line", "for", "line", "in", "res", ".", "splitlines", "(", ")", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", "]", "return", "[", "]" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.getmtime
Return the time of last modification of path. The return value is a number giving the number of seconds since the epoch Same as :py:func:`os.path.getmtime()`
burlap/files.py
def getmtime(self, path, use_sudo=False): """ Return the time of last modification of path. The return value is a number giving the number of seconds since the epoch Same as :py:func:`os.path.getmtime()` """ func = use_sudo and run_as_root or self.run with self.settings(hide('running', 'stdout')): return int(func('stat -c %%Y "%(path)s" ' % locals()).strip())
def getmtime(self, path, use_sudo=False): """ Return the time of last modification of path. The return value is a number giving the number of seconds since the epoch Same as :py:func:`os.path.getmtime()` """ func = use_sudo and run_as_root or self.run with self.settings(hide('running', 'stdout')): return int(func('stat -c %%Y "%(path)s" ' % locals()).strip())
[ "Return", "the", "time", "of", "last", "modification", "of", "path", ".", "The", "return", "value", "is", "a", "number", "giving", "the", "number", "of", "seconds", "since", "the", "epoch" ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L297-L306
[ "def", "getmtime", "(", "self", ",", "path", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ")", ")", ":", "return", "int", "(", "func", "(", "'stat -c %%Y \"%(path)s\" '", "%", "locals", "(", ")", ")", ".", "strip", "(", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.copy
Copy a file or directory
burlap/files.py
def copy(self, source, destination, recursive=False, use_sudo=False): """ Copy a file or directory """ func = use_sudo and run_as_root or self.run options = '-r ' if recursive else '' func('/bin/cp {0}{1} {2}'.format(options, quote(source), quote(destination)))
def copy(self, source, destination, recursive=False, use_sudo=False): """ Copy a file or directory """ func = use_sudo and run_as_root or self.run options = '-r ' if recursive else '' func('/bin/cp {0}{1} {2}'.format(options, quote(source), quote(destination)))
[ "Copy", "a", "file", "or", "directory" ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L309-L315
[ "def", "copy", "(", "self", ",", "source", ",", "destination", ",", "recursive", "=", "False", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "options", "=", "'-r '", "if", "recursive", "else", "''", "func", "(", "'/bin/cp {0}{1} {2}'", ".", "format", "(", "options", ",", "quote", "(", "source", ")", ",", "quote", "(", "destination", ")", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.move
Move a file or directory
burlap/files.py
def move(self, source, destination, use_sudo=False): """ Move a file or directory """ func = use_sudo and run_as_root or self.run func('/bin/mv {0} {1}'.format(quote(source), quote(destination)))
def move(self, source, destination, use_sudo=False): """ Move a file or directory """ func = use_sudo and run_as_root or self.run func('/bin/mv {0} {1}'.format(quote(source), quote(destination)))
[ "Move", "a", "file", "or", "directory" ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L318-L323
[ "def", "move", "(", "self", ",", "source", ",", "destination", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "func", "(", "'/bin/mv {0} {1}'", ".", "format", "(", "quote", "(", "source", ")", ",", "quote", "(", "destination", ")", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.remove
Remove a file or directory
burlap/files.py
def remove(self, path, recursive=False, use_sudo=False): """ Remove a file or directory """ func = use_sudo and run_as_root or self.run options = '-r ' if recursive else '' func('/bin/rm {0}{1}'.format(options, quote(path)))
def remove(self, path, recursive=False, use_sudo=False): """ Remove a file or directory """ func = use_sudo and run_as_root or self.run options = '-r ' if recursive else '' func('/bin/rm {0}{1}'.format(options, quote(path)))
[ "Remove", "a", "file", "or", "directory" ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L334-L340
[ "def", "remove", "(", "self", ",", "path", ",", "recursive", "=", "False", ",", "use_sudo", "=", "False", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "options", "=", "'-r '", "if", "recursive", "else", "''", "func", "(", "'/bin/rm {0}{1}'", ".", "format", "(", "options", ",", "quote", "(", "path", ")", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
FileSatchel.require
Require a file to exist and have specific contents and properties. You can provide either: - *contents*: the required contents of the file:: from fabtools import require require.file('/tmp/hello.txt', contents='Hello, world') - *source*: the local path of a file to upload:: from fabtools import require require.file('/tmp/hello.txt', source='files/hello.txt') - *url*: the URL of a file to download (*path* is then optional):: from fabric.api import cd from fabtools import require with cd('tmp'): require.file(url='http://example.com/files/hello.txt') If *verify_remote* is ``True`` (the default), then an MD5 comparison will be used to check whether the remote file is the same as the source. If this is ``False``, the file will be assumed to be the same if it is present. This is useful for very large files, where generating an MD5 sum may take a while. When providing either the *contents* or the *source* parameter, Fabric's ``put`` function will be used to upload the file to the remote host. When ``use_sudo`` is ``True``, the file will first be uploaded to a temporary directory, then moved to its final location. The default temporary directory is ``/tmp``, but can be overridden with the *temp_dir* parameter. If *temp_dir* is an empty string, then the user's home directory will be used. If `use_sudo` is `True`, then the remote file will be owned by root, and its mode will reflect root's default *umask*. The optional *owner*, *group* and *mode* parameters can be used to override these properties. .. note:: This function can be accessed directly from the ``fabtools.require`` module for convenience.
burlap/files.py
def require(self, path=None, contents=None, source=None, url=None, md5=None, use_sudo=False, owner=None, group='', mode=None, verify_remote=True, temp_dir='/tmp'): """ Require a file to exist and have specific contents and properties. You can provide either: - *contents*: the required contents of the file:: from fabtools import require require.file('/tmp/hello.txt', contents='Hello, world') - *source*: the local path of a file to upload:: from fabtools import require require.file('/tmp/hello.txt', source='files/hello.txt') - *url*: the URL of a file to download (*path* is then optional):: from fabric.api import cd from fabtools import require with cd('tmp'): require.file(url='http://example.com/files/hello.txt') If *verify_remote* is ``True`` (the default), then an MD5 comparison will be used to check whether the remote file is the same as the source. If this is ``False``, the file will be assumed to be the same if it is present. This is useful for very large files, where generating an MD5 sum may take a while. When providing either the *contents* or the *source* parameter, Fabric's ``put`` function will be used to upload the file to the remote host. When ``use_sudo`` is ``True``, the file will first be uploaded to a temporary directory, then moved to its final location. The default temporary directory is ``/tmp``, but can be overridden with the *temp_dir* parameter. If *temp_dir* is an empty string, then the user's home directory will be used. If `use_sudo` is `True`, then the remote file will be owned by root, and its mode will reflect root's default *umask*. The optional *owner*, *group* and *mode* parameters can be used to override these properties. .. note:: This function can be accessed directly from the ``fabtools.require`` module for convenience. """ func = use_sudo and run_as_root or self.run # 1) Only a path is given if path and not (contents or source or url): assert path if not self.is_file(path): func('touch "%(path)s"' % locals()) # 2) A URL is specified (path is optional) elif url: if not path: path = os.path.basename(urlparse(url).path) if not self.is_file(path) or md5 and self.md5sum(path) != md5: func('wget --progress=dot:mega "%(url)s" -O "%(path)s"' % locals()) # 3) A local filename, or a content string, is specified else: if source: assert not contents t = None else: fd, source = mkstemp() t = os.fdopen(fd, 'w') t.write(contents) t.close() if verify_remote: # Avoid reading the whole file into memory at once digest = hashlib.md5() f = open(source, 'rb') try: while True: d = f.read(BLOCKSIZE) if not d: break digest.update(d) finally: f.close() else: digest = None if (not self.is_file(path, use_sudo=use_sudo) or (verify_remote and self.md5sum(path, use_sudo=use_sudo) != digest.hexdigest())): with self.settings(hide('running')): self.put(local_path=source, remote_path=path, use_sudo=use_sudo, temp_dir=temp_dir) if t is not None: os.unlink(source) # Ensure correct owner if use_sudo and owner is None: owner = 'root' if (owner and self.get_owner(path, use_sudo) != owner) or \ (group and self.get_group(path, use_sudo) != group): func('chown %(owner)s:%(group)s "%(path)s"' % locals()) # Ensure correct mode if use_sudo and mode is None: mode = oct(0o666 & ~int(self.umask(use_sudo=True), base=8)) if mode and self.get_mode(path, use_sudo) != mode: func('chmod %(mode)s "%(path)s"' % locals())
def require(self, path=None, contents=None, source=None, url=None, md5=None, use_sudo=False, owner=None, group='', mode=None, verify_remote=True, temp_dir='/tmp'): """ Require a file to exist and have specific contents and properties. You can provide either: - *contents*: the required contents of the file:: from fabtools import require require.file('/tmp/hello.txt', contents='Hello, world') - *source*: the local path of a file to upload:: from fabtools import require require.file('/tmp/hello.txt', source='files/hello.txt') - *url*: the URL of a file to download (*path* is then optional):: from fabric.api import cd from fabtools import require with cd('tmp'): require.file(url='http://example.com/files/hello.txt') If *verify_remote* is ``True`` (the default), then an MD5 comparison will be used to check whether the remote file is the same as the source. If this is ``False``, the file will be assumed to be the same if it is present. This is useful for very large files, where generating an MD5 sum may take a while. When providing either the *contents* or the *source* parameter, Fabric's ``put`` function will be used to upload the file to the remote host. When ``use_sudo`` is ``True``, the file will first be uploaded to a temporary directory, then moved to its final location. The default temporary directory is ``/tmp``, but can be overridden with the *temp_dir* parameter. If *temp_dir* is an empty string, then the user's home directory will be used. If `use_sudo` is `True`, then the remote file will be owned by root, and its mode will reflect root's default *umask*. The optional *owner*, *group* and *mode* parameters can be used to override these properties. .. note:: This function can be accessed directly from the ``fabtools.require`` module for convenience. """ func = use_sudo and run_as_root or self.run # 1) Only a path is given if path and not (contents or source or url): assert path if not self.is_file(path): func('touch "%(path)s"' % locals()) # 2) A URL is specified (path is optional) elif url: if not path: path = os.path.basename(urlparse(url).path) if not self.is_file(path) or md5 and self.md5sum(path) != md5: func('wget --progress=dot:mega "%(url)s" -O "%(path)s"' % locals()) # 3) A local filename, or a content string, is specified else: if source: assert not contents t = None else: fd, source = mkstemp() t = os.fdopen(fd, 'w') t.write(contents) t.close() if verify_remote: # Avoid reading the whole file into memory at once digest = hashlib.md5() f = open(source, 'rb') try: while True: d = f.read(BLOCKSIZE) if not d: break digest.update(d) finally: f.close() else: digest = None if (not self.is_file(path, use_sudo=use_sudo) or (verify_remote and self.md5sum(path, use_sudo=use_sudo) != digest.hexdigest())): with self.settings(hide('running')): self.put(local_path=source, remote_path=path, use_sudo=use_sudo, temp_dir=temp_dir) if t is not None: os.unlink(source) # Ensure correct owner if use_sudo and owner is None: owner = 'root' if (owner and self.get_owner(path, use_sudo) != owner) or \ (group and self.get_group(path, use_sudo) != group): func('chown %(owner)s:%(group)s "%(path)s"' % locals()) # Ensure correct mode if use_sudo and mode is None: mode = oct(0o666 & ~int(self.umask(use_sudo=True), base=8)) if mode and self.get_mode(path, use_sudo) != mode: func('chmod %(mode)s "%(path)s"' % locals())
[ "Require", "a", "file", "to", "exist", "and", "have", "specific", "contents", "and", "properties", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/files.py#L352-L464
[ "def", "require", "(", "self", ",", "path", "=", "None", ",", "contents", "=", "None", ",", "source", "=", "None", ",", "url", "=", "None", ",", "md5", "=", "None", ",", "use_sudo", "=", "False", ",", "owner", "=", "None", ",", "group", "=", "''", ",", "mode", "=", "None", ",", "verify_remote", "=", "True", ",", "temp_dir", "=", "'/tmp'", ")", ":", "func", "=", "use_sudo", "and", "run_as_root", "or", "self", ".", "run", "# 1) Only a path is given", "if", "path", "and", "not", "(", "contents", "or", "source", "or", "url", ")", ":", "assert", "path", "if", "not", "self", ".", "is_file", "(", "path", ")", ":", "func", "(", "'touch \"%(path)s\"'", "%", "locals", "(", ")", ")", "# 2) A URL is specified (path is optional)", "elif", "url", ":", "if", "not", "path", ":", "path", "=", "os", ".", "path", ".", "basename", "(", "urlparse", "(", "url", ")", ".", "path", ")", "if", "not", "self", ".", "is_file", "(", "path", ")", "or", "md5", "and", "self", ".", "md5sum", "(", "path", ")", "!=", "md5", ":", "func", "(", "'wget --progress=dot:mega \"%(url)s\" -O \"%(path)s\"'", "%", "locals", "(", ")", ")", "# 3) A local filename, or a content string, is specified", "else", ":", "if", "source", ":", "assert", "not", "contents", "t", "=", "None", "else", ":", "fd", ",", "source", "=", "mkstemp", "(", ")", "t", "=", "os", ".", "fdopen", "(", "fd", ",", "'w'", ")", "t", ".", "write", "(", "contents", ")", "t", ".", "close", "(", ")", "if", "verify_remote", ":", "# Avoid reading the whole file into memory at once", "digest", "=", "hashlib", ".", "md5", "(", ")", "f", "=", "open", "(", "source", ",", "'rb'", ")", "try", ":", "while", "True", ":", "d", "=", "f", ".", "read", "(", "BLOCKSIZE", ")", "if", "not", "d", ":", "break", "digest", ".", "update", "(", "d", ")", "finally", ":", "f", ".", "close", "(", ")", "else", ":", "digest", "=", "None", "if", "(", "not", "self", ".", "is_file", "(", "path", ",", "use_sudo", "=", "use_sudo", ")", "or", "(", "verify_remote", "and", "self", ".", "md5sum", "(", "path", ",", "use_sudo", "=", "use_sudo", ")", "!=", "digest", ".", "hexdigest", "(", ")", ")", ")", ":", "with", "self", ".", "settings", "(", "hide", "(", "'running'", ")", ")", ":", "self", ".", "put", "(", "local_path", "=", "source", ",", "remote_path", "=", "path", ",", "use_sudo", "=", "use_sudo", ",", "temp_dir", "=", "temp_dir", ")", "if", "t", "is", "not", "None", ":", "os", ".", "unlink", "(", "source", ")", "# Ensure correct owner", "if", "use_sudo", "and", "owner", "is", "None", ":", "owner", "=", "'root'", "if", "(", "owner", "and", "self", ".", "get_owner", "(", "path", ",", "use_sudo", ")", "!=", "owner", ")", "or", "(", "group", "and", "self", ".", "get_group", "(", "path", ",", "use_sudo", ")", "!=", "group", ")", ":", "func", "(", "'chown %(owner)s:%(group)s \"%(path)s\"'", "%", "locals", "(", ")", ")", "# Ensure correct mode", "if", "use_sudo", "and", "mode", "is", "None", ":", "mode", "=", "oct", "(", "0o666", "&", "~", "int", "(", "self", ".", "umask", "(", "use_sudo", "=", "True", ")", ",", "base", "=", "8", ")", ")", "if", "mode", "and", "self", ".", "get_mode", "(", "path", ",", "use_sudo", ")", "!=", "mode", ":", "func", "(", "'chmod %(mode)s \"%(path)s\"'", "%", "locals", "(", ")", ")" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd
valid
SeleniumSatchel.check_for_change
Determines if a new release has been made.
burlap/selenium.py
def check_for_change(self): """ Determines if a new release has been made. """ r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if last_fingerprint != current_fingerprint: print('A new release is available. %s' % self.get_most_recent_version()) return True print('No updates found.') return False
def check_for_change(self): """ Determines if a new release has been made. """ r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if last_fingerprint != current_fingerprint: print('A new release is available. %s' % self.get_most_recent_version()) return True print('No updates found.') return False
[ "Determines", "if", "a", "new", "release", "has", "been", "made", "." ]
chrisspen/burlap
python
https://github.com/chrisspen/burlap/blob/a92b0a8e5206850bb777c74af8421ea8b33779bd/burlap/selenium.py#L73-L87
[ "def", "check_for_change", "(", "self", ")", ":", "r", "=", "self", ".", "local_renderer", "lm", "=", "self", ".", "last_manifest", "last_fingerprint", "=", "lm", ".", "fingerprint", "current_fingerprint", "=", "self", ".", "get_target_geckodriver_version_number", "(", ")", "self", ".", "vprint", "(", "'last_fingerprint:'", ",", "last_fingerprint", ")", "self", ".", "vprint", "(", "'current_fingerprint:'", ",", "current_fingerprint", ")", "if", "last_fingerprint", "!=", "current_fingerprint", ":", "print", "(", "'A new release is available. %s'", "%", "self", ".", "get_most_recent_version", "(", ")", ")", "return", "True", "print", "(", "'No updates found.'", ")", "return", "False" ]
a92b0a8e5206850bb777c74af8421ea8b33779bd