partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
valid
|
proxy_settings_module
|
Replaces a settings module with a Module proxy to intercept
an access to settings.
:param int depth: Frame count to go backward.
|
siteprefs/toolbox.py
|
def proxy_settings_module(depth=3):
"""Replaces a settings module with a Module proxy to intercept
an access to settings.
:param int depth: Frame count to go backward.
"""
proxies = []
modules = sys.modules
module_name = get_frame_locals(depth)['__name__']
module_real = modules[module_name]
for name, locals_dict in traverse_local_prefs(depth):
value = locals_dict[name]
if isinstance(value, PrefProxy):
proxies.append(name)
new_module = type(module_name, (ModuleType, ModuleProxy), {})(module_name) # ModuleProxy
new_module.bind(module_real, proxies)
modules[module_name] = new_module
|
def proxy_settings_module(depth=3):
"""Replaces a settings module with a Module proxy to intercept
an access to settings.
:param int depth: Frame count to go backward.
"""
proxies = []
modules = sys.modules
module_name = get_frame_locals(depth)['__name__']
module_real = modules[module_name]
for name, locals_dict in traverse_local_prefs(depth):
value = locals_dict[name]
if isinstance(value, PrefProxy):
proxies.append(name)
new_module = type(module_name, (ModuleType, ModuleProxy), {})(module_name) # ModuleProxy
new_module.bind(module_real, proxies)
modules[module_name] = new_module
|
[
"Replaces",
"a",
"settings",
"module",
"with",
"a",
"Module",
"proxy",
"to",
"intercept",
"an",
"access",
"to",
"settings",
"."
] |
idlesign/django-siteprefs
|
python
|
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L222-L246
|
[
"def",
"proxy_settings_module",
"(",
"depth",
"=",
"3",
")",
":",
"proxies",
"=",
"[",
"]",
"modules",
"=",
"sys",
".",
"modules",
"module_name",
"=",
"get_frame_locals",
"(",
"depth",
")",
"[",
"'__name__'",
"]",
"module_real",
"=",
"modules",
"[",
"module_name",
"]",
"for",
"name",
",",
"locals_dict",
"in",
"traverse_local_prefs",
"(",
"depth",
")",
":",
"value",
"=",
"locals_dict",
"[",
"name",
"]",
"if",
"isinstance",
"(",
"value",
",",
"PrefProxy",
")",
":",
"proxies",
".",
"append",
"(",
"name",
")",
"new_module",
"=",
"type",
"(",
"module_name",
",",
"(",
"ModuleType",
",",
"ModuleProxy",
")",
",",
"{",
"}",
")",
"(",
"module_name",
")",
"# ModuleProxy",
"new_module",
".",
"bind",
"(",
"module_real",
",",
"proxies",
")",
"modules",
"[",
"module_name",
"]",
"=",
"new_module"
] |
3d6bf5e64220fe921468a36fce68e15d7947cf92
|
valid
|
register_prefs
|
Registers preferences that should be handled by siteprefs.
Expects preferences as *args.
Use keyword arguments to batch apply params supported by
``PrefProxy`` to all preferences not constructed by ``pref`` and ``pref_group``.
Batch kwargs:
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:param bool swap_settings_module: Whether to automatically replace settings module
with a special ``ProxyModule`` object to access dynamic values of settings
transparently (so not to bother with calling ``.value`` of ``PrefProxy`` object).
|
siteprefs/toolbox.py
|
def register_prefs(*args, **kwargs):
"""Registers preferences that should be handled by siteprefs.
Expects preferences as *args.
Use keyword arguments to batch apply params supported by
``PrefProxy`` to all preferences not constructed by ``pref`` and ``pref_group``.
Batch kwargs:
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:param bool swap_settings_module: Whether to automatically replace settings module
with a special ``ProxyModule`` object to access dynamic values of settings
transparently (so not to bother with calling ``.value`` of ``PrefProxy`` object).
"""
swap_settings_module = bool(kwargs.get('swap_settings_module', True))
if __PATCHED_LOCALS_SENTINEL not in get_frame_locals(2):
raise SitePrefsException('Please call `patch_locals()` right before the `register_prefs()`.')
bind_proxy(args, **kwargs)
unpatch_locals()
swap_settings_module and proxy_settings_module()
|
def register_prefs(*args, **kwargs):
"""Registers preferences that should be handled by siteprefs.
Expects preferences as *args.
Use keyword arguments to batch apply params supported by
``PrefProxy`` to all preferences not constructed by ``pref`` and ``pref_group``.
Batch kwargs:
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:param bool swap_settings_module: Whether to automatically replace settings module
with a special ``ProxyModule`` object to access dynamic values of settings
transparently (so not to bother with calling ``.value`` of ``PrefProxy`` object).
"""
swap_settings_module = bool(kwargs.get('swap_settings_module', True))
if __PATCHED_LOCALS_SENTINEL not in get_frame_locals(2):
raise SitePrefsException('Please call `patch_locals()` right before the `register_prefs()`.')
bind_proxy(args, **kwargs)
unpatch_locals()
swap_settings_module and proxy_settings_module()
|
[
"Registers",
"preferences",
"that",
"should",
"be",
"handled",
"by",
"siteprefs",
"."
] |
idlesign/django-siteprefs
|
python
|
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L249-L279
|
[
"def",
"register_prefs",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"swap_settings_module",
"=",
"bool",
"(",
"kwargs",
".",
"get",
"(",
"'swap_settings_module'",
",",
"True",
")",
")",
"if",
"__PATCHED_LOCALS_SENTINEL",
"not",
"in",
"get_frame_locals",
"(",
"2",
")",
":",
"raise",
"SitePrefsException",
"(",
"'Please call `patch_locals()` right before the `register_prefs()`.'",
")",
"bind_proxy",
"(",
"args",
",",
"*",
"*",
"kwargs",
")",
"unpatch_locals",
"(",
")",
"swap_settings_module",
"and",
"proxy_settings_module",
"(",
")"
] |
3d6bf5e64220fe921468a36fce68e15d7947cf92
|
valid
|
pref_group
|
Marks preferences group.
:param str|unicode title: Group title
:param list|tuple prefs: Preferences to group.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
|
siteprefs/toolbox.py
|
def pref_group(title, prefs, help_text='', static=True, readonly=False):
"""Marks preferences group.
:param str|unicode title: Group title
:param list|tuple prefs: Preferences to group.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
"""
bind_proxy(prefs, title, help_text=help_text, static=static, readonly=readonly)
for proxy in prefs: # For preferences already marked by pref().
if isinstance(proxy, PrefProxy):
proxy.category = title
|
def pref_group(title, prefs, help_text='', static=True, readonly=False):
"""Marks preferences group.
:param str|unicode title: Group title
:param list|tuple prefs: Preferences to group.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
"""
bind_proxy(prefs, title, help_text=help_text, static=static, readonly=readonly)
for proxy in prefs: # For preferences already marked by pref().
if isinstance(proxy, PrefProxy):
proxy.category = title
|
[
"Marks",
"preferences",
"group",
"."
] |
idlesign/django-siteprefs
|
python
|
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L282-L300
|
[
"def",
"pref_group",
"(",
"title",
",",
"prefs",
",",
"help_text",
"=",
"''",
",",
"static",
"=",
"True",
",",
"readonly",
"=",
"False",
")",
":",
"bind_proxy",
"(",
"prefs",
",",
"title",
",",
"help_text",
"=",
"help_text",
",",
"static",
"=",
"static",
",",
"readonly",
"=",
"readonly",
")",
"for",
"proxy",
"in",
"prefs",
":",
"# For preferences already marked by pref().",
"if",
"isinstance",
"(",
"proxy",
",",
"PrefProxy",
")",
":",
"proxy",
".",
"category",
"=",
"title"
] |
3d6bf5e64220fe921468a36fce68e15d7947cf92
|
valid
|
pref
|
Marks a preference.
:param preference: Preference variable.
:param Field field: Django model field to represent this preference.
:param str|unicode verbose_name: Field verbose name.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:rtype: PrefProxy|None
|
siteprefs/toolbox.py
|
def pref(preference, field=None, verbose_name=None, help_text='', static=True, readonly=False):
"""Marks a preference.
:param preference: Preference variable.
:param Field field: Django model field to represent this preference.
:param str|unicode verbose_name: Field verbose name.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:rtype: PrefProxy|None
"""
try:
bound = bind_proxy(
(preference,),
field=field,
verbose_name=verbose_name,
help_text=help_text,
static=static,
readonly=readonly,
)
return bound[0]
except IndexError:
return
|
def pref(preference, field=None, verbose_name=None, help_text='', static=True, readonly=False):
"""Marks a preference.
:param preference: Preference variable.
:param Field field: Django model field to represent this preference.
:param str|unicode verbose_name: Field verbose name.
:param str|unicode help_text: Field help text.
:param bool static: Leave this preference static (do not store in DB).
:param bool readonly: Make this field read only.
:rtype: PrefProxy|None
"""
try:
bound = bind_proxy(
(preference,),
field=field,
verbose_name=verbose_name,
help_text=help_text,
static=static,
readonly=readonly,
)
return bound[0]
except IndexError:
return
|
[
"Marks",
"a",
"preference",
"."
] |
idlesign/django-siteprefs
|
python
|
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L303-L332
|
[
"def",
"pref",
"(",
"preference",
",",
"field",
"=",
"None",
",",
"verbose_name",
"=",
"None",
",",
"help_text",
"=",
"''",
",",
"static",
"=",
"True",
",",
"readonly",
"=",
"False",
")",
":",
"try",
":",
"bound",
"=",
"bind_proxy",
"(",
"(",
"preference",
",",
")",
",",
"field",
"=",
"field",
",",
"verbose_name",
"=",
"verbose_name",
",",
"help_text",
"=",
"help_text",
",",
"static",
"=",
"static",
",",
"readonly",
"=",
"readonly",
",",
")",
"return",
"bound",
"[",
"0",
"]",
"except",
"IndexError",
":",
"return"
] |
3d6bf5e64220fe921468a36fce68e15d7947cf92
|
valid
|
ModuleProxy.bind
|
:param ModuleType module:
:param list prefs: Preference names. Just to speed up __getattr__.
|
siteprefs/toolbox.py
|
def bind(self, module, prefs):
"""
:param ModuleType module:
:param list prefs: Preference names. Just to speed up __getattr__.
"""
self._module = module
self._prefs = set(prefs)
|
def bind(self, module, prefs):
"""
:param ModuleType module:
:param list prefs: Preference names. Just to speed up __getattr__.
"""
self._module = module
self._prefs = set(prefs)
|
[
":",
"param",
"ModuleType",
"module",
":",
":",
"param",
"list",
"prefs",
":",
"Preference",
"names",
".",
"Just",
"to",
"speed",
"up",
"__getattr__",
"."
] |
idlesign/django-siteprefs
|
python
|
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L203-L209
|
[
"def",
"bind",
"(",
"self",
",",
"module",
",",
"prefs",
")",
":",
"self",
".",
"_module",
"=",
"module",
"self",
".",
"_prefs",
"=",
"set",
"(",
"prefs",
")"
] |
3d6bf5e64220fe921468a36fce68e15d7947cf92
|
valid
|
generate_versionwarning_data_json
|
Generate the ``versionwarning-data.json`` file.
This file is included in the output and read by the AJAX request when
accessing to the documentation and used to compare the live versions with
the curent one.
Besides, this file contains meta data about the project, the API to use and
the banner itself.
|
versionwarning/signals.py
|
def generate_versionwarning_data_json(app, config=None, **kwargs):
"""
Generate the ``versionwarning-data.json`` file.
This file is included in the output and read by the AJAX request when
accessing to the documentation and used to compare the live versions with
the curent one.
Besides, this file contains meta data about the project, the API to use and
the banner itself.
"""
# In Sphinx >= 1.8 we use ``config-initied`` signal which comes with the
# ``config`` object and in Sphinx < 1.8 we use ``builder-initied`` signal
# that doesn't have the ``config`` object and we take it from the ``app``
config = config or kwargs.pop('config', None)
if config is None:
config = app.config
if config.versionwarning_project_version in config.versionwarning_messages:
custom = True
message = config.versionwarning_messages.get(config.versionwarning_project_version)
else:
custom = False
message = config.versionwarning_default_message
banner_html = config.versionwarning_banner_html.format(
id_div=config.versionwarning_banner_id_div,
banner_title=config.versionwarning_banner_title,
message=message.format(
**{config.versionwarning_message_placeholder: '<a href="#"></a>'},
),
admonition_type=config.versionwarning_admonition_type,
)
data = json.dumps({
'meta': {
'api_url': config.versionwarning_api_url,
},
'banner': {
'html': banner_html,
'id_div': config.versionwarning_banner_id_div,
'body_selector': config.versionwarning_body_selector,
'custom': custom,
},
'project': {
'slug': config.versionwarning_project_slug,
},
'version': {
'slug': config.versionwarning_project_version,
},
}, indent=4)
data_path = os.path.join(STATIC_PATH, 'data')
if not os.path.exists(data_path):
os.mkdir(data_path)
with open(os.path.join(data_path, JSON_DATA_FILENAME), 'w') as f:
f.write(data)
# Add the path where ``versionwarning-data.json`` file and
# ``versionwarning.js`` are saved
config.html_static_path.append(STATIC_PATH)
|
def generate_versionwarning_data_json(app, config=None, **kwargs):
"""
Generate the ``versionwarning-data.json`` file.
This file is included in the output and read by the AJAX request when
accessing to the documentation and used to compare the live versions with
the curent one.
Besides, this file contains meta data about the project, the API to use and
the banner itself.
"""
# In Sphinx >= 1.8 we use ``config-initied`` signal which comes with the
# ``config`` object and in Sphinx < 1.8 we use ``builder-initied`` signal
# that doesn't have the ``config`` object and we take it from the ``app``
config = config or kwargs.pop('config', None)
if config is None:
config = app.config
if config.versionwarning_project_version in config.versionwarning_messages:
custom = True
message = config.versionwarning_messages.get(config.versionwarning_project_version)
else:
custom = False
message = config.versionwarning_default_message
banner_html = config.versionwarning_banner_html.format(
id_div=config.versionwarning_banner_id_div,
banner_title=config.versionwarning_banner_title,
message=message.format(
**{config.versionwarning_message_placeholder: '<a href="#"></a>'},
),
admonition_type=config.versionwarning_admonition_type,
)
data = json.dumps({
'meta': {
'api_url': config.versionwarning_api_url,
},
'banner': {
'html': banner_html,
'id_div': config.versionwarning_banner_id_div,
'body_selector': config.versionwarning_body_selector,
'custom': custom,
},
'project': {
'slug': config.versionwarning_project_slug,
},
'version': {
'slug': config.versionwarning_project_version,
},
}, indent=4)
data_path = os.path.join(STATIC_PATH, 'data')
if not os.path.exists(data_path):
os.mkdir(data_path)
with open(os.path.join(data_path, JSON_DATA_FILENAME), 'w') as f:
f.write(data)
# Add the path where ``versionwarning-data.json`` file and
# ``versionwarning.js`` are saved
config.html_static_path.append(STATIC_PATH)
|
[
"Generate",
"the",
"versionwarning",
"-",
"data",
".",
"json",
"file",
"."
] |
humitos/sphinx-version-warning
|
python
|
https://github.com/humitos/sphinx-version-warning/blob/fa6e48eb1dc66f8deea2328ba6f069bf6a808713/versionwarning/signals.py#L11-L73
|
[
"def",
"generate_versionwarning_data_json",
"(",
"app",
",",
"config",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# In Sphinx >= 1.8 we use ``config-initied`` signal which comes with the",
"# ``config`` object and in Sphinx < 1.8 we use ``builder-initied`` signal",
"# that doesn't have the ``config`` object and we take it from the ``app``",
"config",
"=",
"config",
"or",
"kwargs",
".",
"pop",
"(",
"'config'",
",",
"None",
")",
"if",
"config",
"is",
"None",
":",
"config",
"=",
"app",
".",
"config",
"if",
"config",
".",
"versionwarning_project_version",
"in",
"config",
".",
"versionwarning_messages",
":",
"custom",
"=",
"True",
"message",
"=",
"config",
".",
"versionwarning_messages",
".",
"get",
"(",
"config",
".",
"versionwarning_project_version",
")",
"else",
":",
"custom",
"=",
"False",
"message",
"=",
"config",
".",
"versionwarning_default_message",
"banner_html",
"=",
"config",
".",
"versionwarning_banner_html",
".",
"format",
"(",
"id_div",
"=",
"config",
".",
"versionwarning_banner_id_div",
",",
"banner_title",
"=",
"config",
".",
"versionwarning_banner_title",
",",
"message",
"=",
"message",
".",
"format",
"(",
"*",
"*",
"{",
"config",
".",
"versionwarning_message_placeholder",
":",
"'<a href=\"#\"></a>'",
"}",
",",
")",
",",
"admonition_type",
"=",
"config",
".",
"versionwarning_admonition_type",
",",
")",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"'meta'",
":",
"{",
"'api_url'",
":",
"config",
".",
"versionwarning_api_url",
",",
"}",
",",
"'banner'",
":",
"{",
"'html'",
":",
"banner_html",
",",
"'id_div'",
":",
"config",
".",
"versionwarning_banner_id_div",
",",
"'body_selector'",
":",
"config",
".",
"versionwarning_body_selector",
",",
"'custom'",
":",
"custom",
",",
"}",
",",
"'project'",
":",
"{",
"'slug'",
":",
"config",
".",
"versionwarning_project_slug",
",",
"}",
",",
"'version'",
":",
"{",
"'slug'",
":",
"config",
".",
"versionwarning_project_version",
",",
"}",
",",
"}",
",",
"indent",
"=",
"4",
")",
"data_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"STATIC_PATH",
",",
"'data'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"data_path",
")",
":",
"os",
".",
"mkdir",
"(",
"data_path",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"JSON_DATA_FILENAME",
")",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"data",
")",
"# Add the path where ``versionwarning-data.json`` file and",
"# ``versionwarning.js`` are saved",
"config",
".",
"html_static_path",
".",
"append",
"(",
"STATIC_PATH",
")"
] |
fa6e48eb1dc66f8deea2328ba6f069bf6a808713
|
valid
|
objective
|
Gives objective functions a number of dimensions and parameter range
Parameters
----------
param_scales : (int, int)
Scale (std. dev.) for choosing each parameter
xstar : array_like
Optimal parameters
|
descent/objectives.py
|
def objective(param_scales=(1, 1), xstar=None, seed=None):
"""Gives objective functions a number of dimensions and parameter range
Parameters
----------
param_scales : (int, int)
Scale (std. dev.) for choosing each parameter
xstar : array_like
Optimal parameters
"""
ndim = len(param_scales)
def decorator(func):
@wraps(func)
def wrapper(theta):
return func(theta)
def param_init():
np.random.seed(seed)
return np.random.randn(ndim,) * np.array(param_scales)
wrapper.ndim = ndim
wrapper.param_init = param_init
wrapper.xstar = xstar
return wrapper
return decorator
|
def objective(param_scales=(1, 1), xstar=None, seed=None):
"""Gives objective functions a number of dimensions and parameter range
Parameters
----------
param_scales : (int, int)
Scale (std. dev.) for choosing each parameter
xstar : array_like
Optimal parameters
"""
ndim = len(param_scales)
def decorator(func):
@wraps(func)
def wrapper(theta):
return func(theta)
def param_init():
np.random.seed(seed)
return np.random.randn(ndim,) * np.array(param_scales)
wrapper.ndim = ndim
wrapper.param_init = param_init
wrapper.xstar = xstar
return wrapper
return decorator
|
[
"Gives",
"objective",
"functions",
"a",
"number",
"of",
"dimensions",
"and",
"parameter",
"range"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L11-L40
|
[
"def",
"objective",
"(",
"param_scales",
"=",
"(",
"1",
",",
"1",
")",
",",
"xstar",
"=",
"None",
",",
"seed",
"=",
"None",
")",
":",
"ndim",
"=",
"len",
"(",
"param_scales",
")",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"theta",
")",
":",
"return",
"func",
"(",
"theta",
")",
"def",
"param_init",
"(",
")",
":",
"np",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"return",
"np",
".",
"random",
".",
"randn",
"(",
"ndim",
",",
")",
"*",
"np",
".",
"array",
"(",
"param_scales",
")",
"wrapper",
".",
"ndim",
"=",
"ndim",
"wrapper",
".",
"param_init",
"=",
"param_init",
"wrapper",
".",
"xstar",
"=",
"xstar",
"return",
"wrapper",
"return",
"decorator"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
doublewell
|
Pointwise minimum of two quadratic bowls
|
descent/objectives.py
|
def doublewell(theta):
"""Pointwise minimum of two quadratic bowls"""
k0, k1, depth = 0.01, 100, 0.5
shallow = 0.5 * k0 * theta ** 2 + depth
deep = 0.5 * k1 * theta ** 2
obj = float(np.minimum(shallow, deep))
grad = np.where(deep < shallow, k1 * theta, k0 * theta)
return obj, grad
|
def doublewell(theta):
"""Pointwise minimum of two quadratic bowls"""
k0, k1, depth = 0.01, 100, 0.5
shallow = 0.5 * k0 * theta ** 2 + depth
deep = 0.5 * k1 * theta ** 2
obj = float(np.minimum(shallow, deep))
grad = np.where(deep < shallow, k1 * theta, k0 * theta)
return obj, grad
|
[
"Pointwise",
"minimum",
"of",
"two",
"quadratic",
"bowls"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L44-L51
|
[
"def",
"doublewell",
"(",
"theta",
")",
":",
"k0",
",",
"k1",
",",
"depth",
"=",
"0.01",
",",
"100",
",",
"0.5",
"shallow",
"=",
"0.5",
"*",
"k0",
"*",
"theta",
"**",
"2",
"+",
"depth",
"deep",
"=",
"0.5",
"*",
"k1",
"*",
"theta",
"**",
"2",
"obj",
"=",
"float",
"(",
"np",
".",
"minimum",
"(",
"shallow",
",",
"deep",
")",
")",
"grad",
"=",
"np",
".",
"where",
"(",
"deep",
"<",
"shallow",
",",
"k1",
"*",
"theta",
",",
"k0",
"*",
"theta",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
rosenbrock
|
Objective and gradient for the rosenbrock function
|
descent/objectives.py
|
def rosenbrock(theta):
"""Objective and gradient for the rosenbrock function"""
x, y = theta
obj = (1 - x)**2 + 100 * (y - x**2)**2
grad = np.zeros(2)
grad[0] = 2 * x - 400 * (x * y - x**3) - 2
grad[1] = 200 * (y - x**2)
return obj, grad
|
def rosenbrock(theta):
"""Objective and gradient for the rosenbrock function"""
x, y = theta
obj = (1 - x)**2 + 100 * (y - x**2)**2
grad = np.zeros(2)
grad[0] = 2 * x - 400 * (x * y - x**3) - 2
grad[1] = 200 * (y - x**2)
return obj, grad
|
[
"Objective",
"and",
"gradient",
"for",
"the",
"rosenbrock",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L55-L63
|
[
"def",
"rosenbrock",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"(",
"1",
"-",
"x",
")",
"**",
"2",
"+",
"100",
"*",
"(",
"y",
"-",
"x",
"**",
"2",
")",
"**",
"2",
"grad",
"=",
"np",
".",
"zeros",
"(",
"2",
")",
"grad",
"[",
"0",
"]",
"=",
"2",
"*",
"x",
"-",
"400",
"*",
"(",
"x",
"*",
"y",
"-",
"x",
"**",
"3",
")",
"-",
"2",
"grad",
"[",
"1",
"]",
"=",
"200",
"*",
"(",
"y",
"-",
"x",
"**",
"2",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
matyas
|
Matyas function
|
descent/objectives.py
|
def matyas(theta):
"""Matyas function"""
x, y = theta
obj = 0.26 * (x ** 2 + y ** 2) - 0.48 * x * y
grad = np.array([0.52 * x - 0.48 * y, 0.52 * y - 0.48 * x])
return obj, grad
|
def matyas(theta):
"""Matyas function"""
x, y = theta
obj = 0.26 * (x ** 2 + y ** 2) - 0.48 * x * y
grad = np.array([0.52 * x - 0.48 * y, 0.52 * y - 0.48 * x])
return obj, grad
|
[
"Matyas",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L73-L78
|
[
"def",
"matyas",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"0.26",
"*",
"(",
"x",
"**",
"2",
"+",
"y",
"**",
"2",
")",
"-",
"0.48",
"*",
"x",
"*",
"y",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"0.52",
"*",
"x",
"-",
"0.48",
"*",
"y",
",",
"0.52",
"*",
"y",
"-",
"0.48",
"*",
"x",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
beale
|
Beale's function
|
descent/objectives.py
|
def beale(theta):
"""Beale's function"""
x, y = theta
A = 1.5 - x + x * y
B = 2.25 - x + x * y**2
C = 2.625 - x + x * y**3
obj = A ** 2 + B ** 2 + C ** 2
grad = np.array([
2 * A * (y - 1) + 2 * B * (y ** 2 - 1) + 2 * C * (y ** 3 - 1),
2 * A * x + 4 * B * x * y + 6 * C * x * y ** 2
])
return obj, grad
|
def beale(theta):
"""Beale's function"""
x, y = theta
A = 1.5 - x + x * y
B = 2.25 - x + x * y**2
C = 2.625 - x + x * y**3
obj = A ** 2 + B ** 2 + C ** 2
grad = np.array([
2 * A * (y - 1) + 2 * B * (y ** 2 - 1) + 2 * C * (y ** 3 - 1),
2 * A * x + 4 * B * x * y + 6 * C * x * y ** 2
])
return obj, grad
|
[
"Beale",
"s",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L82-L93
|
[
"def",
"beale",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"A",
"=",
"1.5",
"-",
"x",
"+",
"x",
"*",
"y",
"B",
"=",
"2.25",
"-",
"x",
"+",
"x",
"*",
"y",
"**",
"2",
"C",
"=",
"2.625",
"-",
"x",
"+",
"x",
"*",
"y",
"**",
"3",
"obj",
"=",
"A",
"**",
"2",
"+",
"B",
"**",
"2",
"+",
"C",
"**",
"2",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2",
"*",
"A",
"*",
"(",
"y",
"-",
"1",
")",
"+",
"2",
"*",
"B",
"*",
"(",
"y",
"**",
"2",
"-",
"1",
")",
"+",
"2",
"*",
"C",
"*",
"(",
"y",
"**",
"3",
"-",
"1",
")",
",",
"2",
"*",
"A",
"*",
"x",
"+",
"4",
"*",
"B",
"*",
"x",
"*",
"y",
"+",
"6",
"*",
"C",
"*",
"x",
"*",
"y",
"**",
"2",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
booth
|
Booth's function
|
descent/objectives.py
|
def booth(theta):
"""Booth's function"""
x, y = theta
A = x + 2 * y - 7
B = 2 * x + y - 5
obj = A**2 + B**2
grad = np.array([2 * A + 4 * B, 4 * A + 2 * B])
return obj, grad
|
def booth(theta):
"""Booth's function"""
x, y = theta
A = x + 2 * y - 7
B = 2 * x + y - 5
obj = A**2 + B**2
grad = np.array([2 * A + 4 * B, 4 * A + 2 * B])
return obj, grad
|
[
"Booth",
"s",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L97-L105
|
[
"def",
"booth",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"A",
"=",
"x",
"+",
"2",
"*",
"y",
"-",
"7",
"B",
"=",
"2",
"*",
"x",
"+",
"y",
"-",
"5",
"obj",
"=",
"A",
"**",
"2",
"+",
"B",
"**",
"2",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2",
"*",
"A",
"+",
"4",
"*",
"B",
",",
"4",
"*",
"A",
"+",
"2",
"*",
"B",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
mccormick
|
McCormick function
|
descent/objectives.py
|
def mccormick(theta):
"""McCormick function"""
x, y = theta
obj = np.sin(x + y) + (x - y)**2 - 1.5 * x + 2.5 * y + 1
grad = np.array([np.cos(x + y) + 2 * (x - y) - 1.5,
np.cos(x + y) - 2 * (x - y) + 2.5])
return obj, grad
|
def mccormick(theta):
"""McCormick function"""
x, y = theta
obj = np.sin(x + y) + (x - y)**2 - 1.5 * x + 2.5 * y + 1
grad = np.array([np.cos(x + y) + 2 * (x - y) - 1.5,
np.cos(x + y) - 2 * (x - y) + 2.5])
return obj, grad
|
[
"McCormick",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L109-L115
|
[
"def",
"mccormick",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"np",
".",
"sin",
"(",
"x",
"+",
"y",
")",
"+",
"(",
"x",
"-",
"y",
")",
"**",
"2",
"-",
"1.5",
"*",
"x",
"+",
"2.5",
"*",
"y",
"+",
"1",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"cos",
"(",
"x",
"+",
"y",
")",
"+",
"2",
"*",
"(",
"x",
"-",
"y",
")",
"-",
"1.5",
",",
"np",
".",
"cos",
"(",
"x",
"+",
"y",
")",
"-",
"2",
"*",
"(",
"x",
"-",
"y",
")",
"+",
"2.5",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
camel
|
Three-hump camel function
|
descent/objectives.py
|
def camel(theta):
"""Three-hump camel function"""
x, y = theta
obj = 2 * x ** 2 - 1.05 * x ** 4 + x ** 6 / 6 + x * y + y ** 2
grad = np.array([
4 * x - 4.2 * x ** 3 + x ** 5 + y,
x + 2 * y
])
return obj, grad
|
def camel(theta):
"""Three-hump camel function"""
x, y = theta
obj = 2 * x ** 2 - 1.05 * x ** 4 + x ** 6 / 6 + x * y + y ** 2
grad = np.array([
4 * x - 4.2 * x ** 3 + x ** 5 + y,
x + 2 * y
])
return obj, grad
|
[
"Three",
"-",
"hump",
"camel",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L119-L127
|
[
"def",
"camel",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"2",
"*",
"x",
"**",
"2",
"-",
"1.05",
"*",
"x",
"**",
"4",
"+",
"x",
"**",
"6",
"/",
"6",
"+",
"x",
"*",
"y",
"+",
"y",
"**",
"2",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"4",
"*",
"x",
"-",
"4.2",
"*",
"x",
"**",
"3",
"+",
"x",
"**",
"5",
"+",
"y",
",",
"x",
"+",
"2",
"*",
"y",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
michalewicz
|
Michalewicz function
|
descent/objectives.py
|
def michalewicz(theta):
"""Michalewicz function"""
x, y = theta
obj = - np.sin(x) * np.sin(x ** 2 / np.pi) ** 20 - \
np.sin(y) * np.sin(2 * y ** 2 / np.pi) ** 20
grad = np.array([
- np.cos(x) * np.sin(x ** 2 / np.pi) ** 20 - (40 / np.pi) * x *
np.sin(x) * np.sin(x ** 2 / np.pi) ** 19 * np.cos(x ** 2 / np.pi),
- np.cos(y) * np.sin(2 * y ** 2 / np.pi) ** 20 - (80 / np.pi) * y * np.sin(y) *
np.sin(2 * y ** 2 / np.pi) ** 19 * np.cos(2 * y ** 2 / np.pi),
])
return obj, grad
|
def michalewicz(theta):
"""Michalewicz function"""
x, y = theta
obj = - np.sin(x) * np.sin(x ** 2 / np.pi) ** 20 - \
np.sin(y) * np.sin(2 * y ** 2 / np.pi) ** 20
grad = np.array([
- np.cos(x) * np.sin(x ** 2 / np.pi) ** 20 - (40 / np.pi) * x *
np.sin(x) * np.sin(x ** 2 / np.pi) ** 19 * np.cos(x ** 2 / np.pi),
- np.cos(y) * np.sin(2 * y ** 2 / np.pi) ** 20 - (80 / np.pi) * y * np.sin(y) *
np.sin(2 * y ** 2 / np.pi) ** 19 * np.cos(2 * y ** 2 / np.pi),
])
return obj, grad
|
[
"Michalewicz",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L131-L144
|
[
"def",
"michalewicz",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"-",
"np",
".",
"sin",
"(",
"x",
")",
"*",
"np",
".",
"sin",
"(",
"x",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"20",
"-",
"np",
".",
"sin",
"(",
"y",
")",
"*",
"np",
".",
"sin",
"(",
"2",
"*",
"y",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"20",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"-",
"np",
".",
"cos",
"(",
"x",
")",
"*",
"np",
".",
"sin",
"(",
"x",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"20",
"-",
"(",
"40",
"/",
"np",
".",
"pi",
")",
"*",
"x",
"*",
"np",
".",
"sin",
"(",
"x",
")",
"*",
"np",
".",
"sin",
"(",
"x",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"19",
"*",
"np",
".",
"cos",
"(",
"x",
"**",
"2",
"/",
"np",
".",
"pi",
")",
",",
"-",
"np",
".",
"cos",
"(",
"y",
")",
"*",
"np",
".",
"sin",
"(",
"2",
"*",
"y",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"20",
"-",
"(",
"80",
"/",
"np",
".",
"pi",
")",
"*",
"y",
"*",
"np",
".",
"sin",
"(",
"y",
")",
"*",
"np",
".",
"sin",
"(",
"2",
"*",
"y",
"**",
"2",
"/",
"np",
".",
"pi",
")",
"**",
"19",
"*",
"np",
".",
"cos",
"(",
"2",
"*",
"y",
"**",
"2",
"/",
"np",
".",
"pi",
")",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
bohachevsky1
|
One of the Bohachevsky functions
|
descent/objectives.py
|
def bohachevsky1(theta):
"""One of the Bohachevsky functions"""
x, y = theta
obj = x ** 2 + 2 * y ** 2 - 0.3 * np.cos(3 * np.pi * x) - 0.4 * np.cos(4 * np.pi * y) + 0.7
grad = np.array([
2 * x + 0.3 * np.sin(3 * np.pi * x) * 3 * np.pi,
4 * y + 0.4 * np.sin(4 * np.pi * y) * 4 * np.pi,
])
return obj, grad
|
def bohachevsky1(theta):
"""One of the Bohachevsky functions"""
x, y = theta
obj = x ** 2 + 2 * y ** 2 - 0.3 * np.cos(3 * np.pi * x) - 0.4 * np.cos(4 * np.pi * y) + 0.7
grad = np.array([
2 * x + 0.3 * np.sin(3 * np.pi * x) * 3 * np.pi,
4 * y + 0.4 * np.sin(4 * np.pi * y) * 4 * np.pi,
])
return obj, grad
|
[
"One",
"of",
"the",
"Bohachevsky",
"functions"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L148-L156
|
[
"def",
"bohachevsky1",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"x",
"**",
"2",
"+",
"2",
"*",
"y",
"**",
"2",
"-",
"0.3",
"*",
"np",
".",
"cos",
"(",
"3",
"*",
"np",
".",
"pi",
"*",
"x",
")",
"-",
"0.4",
"*",
"np",
".",
"cos",
"(",
"4",
"*",
"np",
".",
"pi",
"*",
"y",
")",
"+",
"0.7",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2",
"*",
"x",
"+",
"0.3",
"*",
"np",
".",
"sin",
"(",
"3",
"*",
"np",
".",
"pi",
"*",
"x",
")",
"*",
"3",
"*",
"np",
".",
"pi",
",",
"4",
"*",
"y",
"+",
"0.4",
"*",
"np",
".",
"sin",
"(",
"4",
"*",
"np",
".",
"pi",
"*",
"y",
")",
"*",
"4",
"*",
"np",
".",
"pi",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
zakharov
|
Zakharov function
|
descent/objectives.py
|
def zakharov(theta):
"""Zakharov function"""
x, y = theta
obj = x ** 2 + y ** 2 + (0.5 * x + y) ** 2 + (0.5 * x + y) ** 4
grad = np.array([
2.5 * x + y + 2 * (0.5 * x + y) ** 3,
4 * y + x + 4 * (0.5 * x + y) ** 3,
])
return obj, grad
|
def zakharov(theta):
"""Zakharov function"""
x, y = theta
obj = x ** 2 + y ** 2 + (0.5 * x + y) ** 2 + (0.5 * x + y) ** 4
grad = np.array([
2.5 * x + y + 2 * (0.5 * x + y) ** 3,
4 * y + x + 4 * (0.5 * x + y) ** 3,
])
return obj, grad
|
[
"Zakharov",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L160-L168
|
[
"def",
"zakharov",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"x",
"**",
"2",
"+",
"y",
"**",
"2",
"+",
"(",
"0.5",
"*",
"x",
"+",
"y",
")",
"**",
"2",
"+",
"(",
"0.5",
"*",
"x",
"+",
"y",
")",
"**",
"4",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2.5",
"*",
"x",
"+",
"y",
"+",
"2",
"*",
"(",
"0.5",
"*",
"x",
"+",
"y",
")",
"**",
"3",
",",
"4",
"*",
"y",
"+",
"x",
"+",
"4",
"*",
"(",
"0.5",
"*",
"x",
"+",
"y",
")",
"**",
"3",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
dixon_price
|
Dixon-Price function
|
descent/objectives.py
|
def dixon_price(theta):
"""Dixon-Price function"""
x, y = theta
obj = (x - 1) ** 2 + 2 * (2 * y ** 2 - x) ** 2
grad = np.array([
2 * x - 2 - 4 * (2 * y ** 2 - x),
16 * (2 * y ** 2 - x) * y,
])
return obj, grad
|
def dixon_price(theta):
"""Dixon-Price function"""
x, y = theta
obj = (x - 1) ** 2 + 2 * (2 * y ** 2 - x) ** 2
grad = np.array([
2 * x - 2 - 4 * (2 * y ** 2 - x),
16 * (2 * y ** 2 - x) * y,
])
return obj, grad
|
[
"Dixon",
"-",
"Price",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L172-L180
|
[
"def",
"dixon_price",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"(",
"x",
"-",
"1",
")",
"**",
"2",
"+",
"2",
"*",
"(",
"2",
"*",
"y",
"**",
"2",
"-",
"x",
")",
"**",
"2",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2",
"*",
"x",
"-",
"2",
"-",
"4",
"*",
"(",
"2",
"*",
"y",
"**",
"2",
"-",
"x",
")",
",",
"16",
"*",
"(",
"2",
"*",
"y",
"**",
"2",
"-",
"x",
")",
"*",
"y",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
goldstein_price
|
Goldstein-Price function
|
descent/objectives.py
|
def goldstein_price(theta):
"""Goldstein-Price function"""
x, y = theta
obj = (1 + (x + y + 1) ** 2 * (19 - 14 * x + 3 * x ** 2 - 14 * y + 6 * x * y + 3 * y ** 2)) * \
(30 + (2 * x - 3 * y) ** 2 *
(18 - 32 * x + 12 * x ** 2 + 48 * y - 36 * x * y + 27 * x ** 2))
grad = np.array([
((2 * x - 3 * y)**2 * (78 * x - 36 * y - 32) + (8 * x - 12 * y) *
(39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18)) *
((x + y + 1)**2 * (3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19) + 1) +
((2 * x - 3 * y)**2 * (39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18) + 30) *
((x + y + 1)**2 *
(6 * x + 6 * y - 14) + (2 * x + 2 * y + 2) *
(3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19)),
((-36 * x + 48) * (2 * x - 3 * y)**2 + (-12 * x + 18 * y) *
(39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18)) *
((x + y + 1)**2 * (3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19) + 1) +
((2 * x - 3 * y)**2 * (39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18) + 30) *
((x + y + 1)**2 * (6 * x + 6 * y - 14) + (2 * x + 2 * y + 2) *
(3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19)),
])
return obj, grad
|
def goldstein_price(theta):
"""Goldstein-Price function"""
x, y = theta
obj = (1 + (x + y + 1) ** 2 * (19 - 14 * x + 3 * x ** 2 - 14 * y + 6 * x * y + 3 * y ** 2)) * \
(30 + (2 * x - 3 * y) ** 2 *
(18 - 32 * x + 12 * x ** 2 + 48 * y - 36 * x * y + 27 * x ** 2))
grad = np.array([
((2 * x - 3 * y)**2 * (78 * x - 36 * y - 32) + (8 * x - 12 * y) *
(39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18)) *
((x + y + 1)**2 * (3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19) + 1) +
((2 * x - 3 * y)**2 * (39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18) + 30) *
((x + y + 1)**2 *
(6 * x + 6 * y - 14) + (2 * x + 2 * y + 2) *
(3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19)),
((-36 * x + 48) * (2 * x - 3 * y)**2 + (-12 * x + 18 * y) *
(39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18)) *
((x + y + 1)**2 * (3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19) + 1) +
((2 * x - 3 * y)**2 * (39 * x**2 - 36 * x * y - 32 * x + 48 * y + 18) + 30) *
((x + y + 1)**2 * (6 * x + 6 * y - 14) + (2 * x + 2 * y + 2) *
(3 * x**2 + 6 * x * y - 14 * x + 3 * y**2 - 14 * y + 19)),
])
return obj, grad
|
[
"Goldstein",
"-",
"Price",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L184-L205
|
[
"def",
"goldstein_price",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"(",
"1",
"+",
"(",
"x",
"+",
"y",
"+",
"1",
")",
"**",
"2",
"*",
"(",
"19",
"-",
"14",
"*",
"x",
"+",
"3",
"*",
"x",
"**",
"2",
"-",
"14",
"*",
"y",
"+",
"6",
"*",
"x",
"*",
"y",
"+",
"3",
"*",
"y",
"**",
"2",
")",
")",
"*",
"(",
"30",
"+",
"(",
"2",
"*",
"x",
"-",
"3",
"*",
"y",
")",
"**",
"2",
"*",
"(",
"18",
"-",
"32",
"*",
"x",
"+",
"12",
"*",
"x",
"**",
"2",
"+",
"48",
"*",
"y",
"-",
"36",
"*",
"x",
"*",
"y",
"+",
"27",
"*",
"x",
"**",
"2",
")",
")",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"(",
"(",
"2",
"*",
"x",
"-",
"3",
"*",
"y",
")",
"**",
"2",
"*",
"(",
"78",
"*",
"x",
"-",
"36",
"*",
"y",
"-",
"32",
")",
"+",
"(",
"8",
"*",
"x",
"-",
"12",
"*",
"y",
")",
"*",
"(",
"39",
"*",
"x",
"**",
"2",
"-",
"36",
"*",
"x",
"*",
"y",
"-",
"32",
"*",
"x",
"+",
"48",
"*",
"y",
"+",
"18",
")",
")",
"*",
"(",
"(",
"x",
"+",
"y",
"+",
"1",
")",
"**",
"2",
"*",
"(",
"3",
"*",
"x",
"**",
"2",
"+",
"6",
"*",
"x",
"*",
"y",
"-",
"14",
"*",
"x",
"+",
"3",
"*",
"y",
"**",
"2",
"-",
"14",
"*",
"y",
"+",
"19",
")",
"+",
"1",
")",
"+",
"(",
"(",
"2",
"*",
"x",
"-",
"3",
"*",
"y",
")",
"**",
"2",
"*",
"(",
"39",
"*",
"x",
"**",
"2",
"-",
"36",
"*",
"x",
"*",
"y",
"-",
"32",
"*",
"x",
"+",
"48",
"*",
"y",
"+",
"18",
")",
"+",
"30",
")",
"*",
"(",
"(",
"x",
"+",
"y",
"+",
"1",
")",
"**",
"2",
"*",
"(",
"6",
"*",
"x",
"+",
"6",
"*",
"y",
"-",
"14",
")",
"+",
"(",
"2",
"*",
"x",
"+",
"2",
"*",
"y",
"+",
"2",
")",
"*",
"(",
"3",
"*",
"x",
"**",
"2",
"+",
"6",
"*",
"x",
"*",
"y",
"-",
"14",
"*",
"x",
"+",
"3",
"*",
"y",
"**",
"2",
"-",
"14",
"*",
"y",
"+",
"19",
")",
")",
",",
"(",
"(",
"-",
"36",
"*",
"x",
"+",
"48",
")",
"*",
"(",
"2",
"*",
"x",
"-",
"3",
"*",
"y",
")",
"**",
"2",
"+",
"(",
"-",
"12",
"*",
"x",
"+",
"18",
"*",
"y",
")",
"*",
"(",
"39",
"*",
"x",
"**",
"2",
"-",
"36",
"*",
"x",
"*",
"y",
"-",
"32",
"*",
"x",
"+",
"48",
"*",
"y",
"+",
"18",
")",
")",
"*",
"(",
"(",
"x",
"+",
"y",
"+",
"1",
")",
"**",
"2",
"*",
"(",
"3",
"*",
"x",
"**",
"2",
"+",
"6",
"*",
"x",
"*",
"y",
"-",
"14",
"*",
"x",
"+",
"3",
"*",
"y",
"**",
"2",
"-",
"14",
"*",
"y",
"+",
"19",
")",
"+",
"1",
")",
"+",
"(",
"(",
"2",
"*",
"x",
"-",
"3",
"*",
"y",
")",
"**",
"2",
"*",
"(",
"39",
"*",
"x",
"**",
"2",
"-",
"36",
"*",
"x",
"*",
"y",
"-",
"32",
"*",
"x",
"+",
"48",
"*",
"y",
"+",
"18",
")",
"+",
"30",
")",
"*",
"(",
"(",
"x",
"+",
"y",
"+",
"1",
")",
"**",
"2",
"*",
"(",
"6",
"*",
"x",
"+",
"6",
"*",
"y",
"-",
"14",
")",
"+",
"(",
"2",
"*",
"x",
"+",
"2",
"*",
"y",
"+",
"2",
")",
"*",
"(",
"3",
"*",
"x",
"**",
"2",
"+",
"6",
"*",
"x",
"*",
"y",
"-",
"14",
"*",
"x",
"+",
"3",
"*",
"y",
"**",
"2",
"-",
"14",
"*",
"y",
"+",
"19",
")",
")",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
styblinski_tang
|
Styblinski-Tang function
|
descent/objectives.py
|
def styblinski_tang(theta):
"""Styblinski-Tang function"""
x, y = theta
obj = 0.5 * (x ** 4 - 16 * x ** 2 + 5 * x + y ** 4 - 16 * y ** 2 + 5 * y)
grad = np.array([
2 * x ** 3 - 16 * x + 2.5,
2 * y ** 3 - 16 * y + 2.5,
])
return obj, grad
|
def styblinski_tang(theta):
"""Styblinski-Tang function"""
x, y = theta
obj = 0.5 * (x ** 4 - 16 * x ** 2 + 5 * x + y ** 4 - 16 * y ** 2 + 5 * y)
grad = np.array([
2 * x ** 3 - 16 * x + 2.5,
2 * y ** 3 - 16 * y + 2.5,
])
return obj, grad
|
[
"Styblinski",
"-",
"Tang",
"function"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/objectives.py#L209-L217
|
[
"def",
"styblinski_tang",
"(",
"theta",
")",
":",
"x",
",",
"y",
"=",
"theta",
"obj",
"=",
"0.5",
"*",
"(",
"x",
"**",
"4",
"-",
"16",
"*",
"x",
"**",
"2",
"+",
"5",
"*",
"x",
"+",
"y",
"**",
"4",
"-",
"16",
"*",
"y",
"**",
"2",
"+",
"5",
"*",
"y",
")",
"grad",
"=",
"np",
".",
"array",
"(",
"[",
"2",
"*",
"x",
"**",
"3",
"-",
"16",
"*",
"x",
"+",
"2.5",
",",
"2",
"*",
"y",
"**",
"3",
"-",
"16",
"*",
"y",
"+",
"2.5",
",",
"]",
")",
"return",
"obj",
",",
"grad"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
S3Connection.get_all_buckets
|
Return a list of buckets in MimicDB.
:param boolean force: If true, API call is forced to S3
|
mimicdb/s3/connection.py
|
def get_all_buckets(self, *args, **kwargs):
"""Return a list of buckets in MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
buckets = super(S3Connection, self).get_all_buckets(*args, **kwargs)
for bucket in buckets:
mimicdb.backend.sadd(tpl.connection, bucket.name)
return buckets
return [Bucket(self, bucket) for bucket in mimicdb.backend.smembers(tpl.connection)]
|
def get_all_buckets(self, *args, **kwargs):
"""Return a list of buckets in MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
buckets = super(S3Connection, self).get_all_buckets(*args, **kwargs)
for bucket in buckets:
mimicdb.backend.sadd(tpl.connection, bucket.name)
return buckets
return [Bucket(self, bucket) for bucket in mimicdb.backend.smembers(tpl.connection)]
|
[
"Return",
"a",
"list",
"of",
"buckets",
"in",
"MimicDB",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/connection.py#L20-L33
|
[
"def",
"get_all_buckets",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
".",
"pop",
"(",
"'force'",
",",
"None",
")",
":",
"buckets",
"=",
"super",
"(",
"S3Connection",
",",
"self",
")",
".",
"get_all_buckets",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"bucket",
"in",
"buckets",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"connection",
",",
"bucket",
".",
"name",
")",
"return",
"buckets",
"return",
"[",
"Bucket",
"(",
"self",
",",
"bucket",
")",
"for",
"bucket",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"connection",
")",
"]"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
S3Connection.get_bucket
|
Return a bucket from MimicDB if it exists. Return a
S3ResponseError if the bucket does not exist and validate is passed.
:param boolean force: If true, API call is forced to S3
|
mimicdb/s3/connection.py
|
def get_bucket(self, bucket_name, validate=True, headers=None, force=None):
"""Return a bucket from MimicDB if it exists. Return a
S3ResponseError if the bucket does not exist and validate is passed.
:param boolean force: If true, API call is forced to S3
"""
if force:
bucket = super(S3Connection, self).get_bucket(bucket_name, validate, headers)
mimicdb.backend.sadd(tpl.connection, bucket.name)
return bucket
if mimicdb.backend.sismember(tpl.connection, bucket_name):
return Bucket(self, bucket_name)
else:
if validate:
raise S3ResponseError(404, 'NoSuchBucket')
else:
return Bucket(self, bucket_name)
|
def get_bucket(self, bucket_name, validate=True, headers=None, force=None):
"""Return a bucket from MimicDB if it exists. Return a
S3ResponseError if the bucket does not exist and validate is passed.
:param boolean force: If true, API call is forced to S3
"""
if force:
bucket = super(S3Connection, self).get_bucket(bucket_name, validate, headers)
mimicdb.backend.sadd(tpl.connection, bucket.name)
return bucket
if mimicdb.backend.sismember(tpl.connection, bucket_name):
return Bucket(self, bucket_name)
else:
if validate:
raise S3ResponseError(404, 'NoSuchBucket')
else:
return Bucket(self, bucket_name)
|
[
"Return",
"a",
"bucket",
"from",
"MimicDB",
"if",
"it",
"exists",
".",
"Return",
"a",
"S3ResponseError",
"if",
"the",
"bucket",
"does",
"not",
"exist",
"and",
"validate",
"is",
"passed",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/connection.py#L35-L52
|
[
"def",
"get_bucket",
"(",
"self",
",",
"bucket_name",
",",
"validate",
"=",
"True",
",",
"headers",
"=",
"None",
",",
"force",
"=",
"None",
")",
":",
"if",
"force",
":",
"bucket",
"=",
"super",
"(",
"S3Connection",
",",
"self",
")",
".",
"get_bucket",
"(",
"bucket_name",
",",
"validate",
",",
"headers",
")",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"connection",
",",
"bucket",
".",
"name",
")",
"return",
"bucket",
"if",
"mimicdb",
".",
"backend",
".",
"sismember",
"(",
"tpl",
".",
"connection",
",",
"bucket_name",
")",
":",
"return",
"Bucket",
"(",
"self",
",",
"bucket_name",
")",
"else",
":",
"if",
"validate",
":",
"raise",
"S3ResponseError",
"(",
"404",
",",
"'NoSuchBucket'",
")",
"else",
":",
"return",
"Bucket",
"(",
"self",
",",
"bucket_name",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
S3Connection.create_bucket
|
Add the bucket to MimicDB after successful creation.
|
mimicdb/s3/connection.py
|
def create_bucket(self, *args, **kwargs):
"""Add the bucket to MimicDB after successful creation.
"""
bucket = super(S3Connection, self).create_bucket(*args, **kwargs)
if bucket:
mimicdb.backend.sadd(tpl.connection, bucket.name)
return bucket
|
def create_bucket(self, *args, **kwargs):
"""Add the bucket to MimicDB after successful creation.
"""
bucket = super(S3Connection, self).create_bucket(*args, **kwargs)
if bucket:
mimicdb.backend.sadd(tpl.connection, bucket.name)
return bucket
|
[
"Add",
"the",
"bucket",
"to",
"MimicDB",
"after",
"successful",
"creation",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/connection.py#L54-L62
|
[
"def",
"create_bucket",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"bucket",
"=",
"super",
"(",
"S3Connection",
",",
"self",
")",
".",
"create_bucket",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"bucket",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"connection",
",",
"bucket",
".",
"name",
")",
"return",
"bucket"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
S3Connection.delete_bucket
|
Delete the bucket on S3 before removing it from MimicDB.
If the delete fails (usually because the bucket is not empty), do
not remove the bucket from the set.
|
mimicdb/s3/connection.py
|
def delete_bucket(self, *args, **kwargs):
"""Delete the bucket on S3 before removing it from MimicDB.
If the delete fails (usually because the bucket is not empty), do
not remove the bucket from the set.
"""
super(S3Connection, self).delete_bucket(*args, **kwargs)
bucket = kwargs.get('bucket_name', args[0] if args else None)
if bucket:
mimicdb.backend.srem(tpl.connection, bucket)
|
def delete_bucket(self, *args, **kwargs):
"""Delete the bucket on S3 before removing it from MimicDB.
If the delete fails (usually because the bucket is not empty), do
not remove the bucket from the set.
"""
super(S3Connection, self).delete_bucket(*args, **kwargs)
bucket = kwargs.get('bucket_name', args[0] if args else None)
if bucket:
mimicdb.backend.srem(tpl.connection, bucket)
|
[
"Delete",
"the",
"bucket",
"on",
"S3",
"before",
"removing",
"it",
"from",
"MimicDB",
".",
"If",
"the",
"delete",
"fails",
"(",
"usually",
"because",
"the",
"bucket",
"is",
"not",
"empty",
")",
"do",
"not",
"remove",
"the",
"bucket",
"from",
"the",
"set",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/connection.py#L64-L74
|
[
"def",
"delete_bucket",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"S3Connection",
",",
"self",
")",
".",
"delete_bucket",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"bucket",
"=",
"kwargs",
".",
"get",
"(",
"'bucket_name'",
",",
"args",
"[",
"0",
"]",
"if",
"args",
"else",
"None",
")",
"if",
"bucket",
":",
"mimicdb",
".",
"backend",
".",
"srem",
"(",
"tpl",
".",
"connection",
",",
"bucket",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
S3Connection.sync
|
Sync either a list of buckets or the entire connection.
Force all API calls to S3 and populate the database with the current
state of S3.
:param \*string \*buckets: Buckets to sync
|
mimicdb/s3/connection.py
|
def sync(self, *buckets):
"""Sync either a list of buckets or the entire connection.
Force all API calls to S3 and populate the database with the current
state of S3.
:param \*string \*buckets: Buckets to sync
"""
if buckets:
for _bucket in buckets:
for key in mimicdb.backend.smembers(tpl.bucket % _bucket):
mimicdb.backend.delete(tpl.key % (_bucket, key))
mimicdb.backend.delete(tpl.bucket % _bucket)
bucket = self.get_bucket(_bucket, force=True)
for key in bucket.list(force=True):
mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name)
mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
else:
for bucket in mimicdb.backend.smembers(tpl.connection):
for key in mimicdb.backend.smembers(tpl.bucket % bucket):
mimicdb.backend.delete(tpl.key % (bucket, key))
mimicdb.backend.delete(tpl.bucket % bucket)
for bucket in self.get_all_buckets(force=True):
for key in bucket.list(force=True):
mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name)
mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
|
def sync(self, *buckets):
"""Sync either a list of buckets or the entire connection.
Force all API calls to S3 and populate the database with the current
state of S3.
:param \*string \*buckets: Buckets to sync
"""
if buckets:
for _bucket in buckets:
for key in mimicdb.backend.smembers(tpl.bucket % _bucket):
mimicdb.backend.delete(tpl.key % (_bucket, key))
mimicdb.backend.delete(tpl.bucket % _bucket)
bucket = self.get_bucket(_bucket, force=True)
for key in bucket.list(force=True):
mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name)
mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
else:
for bucket in mimicdb.backend.smembers(tpl.connection):
for key in mimicdb.backend.smembers(tpl.bucket % bucket):
mimicdb.backend.delete(tpl.key % (bucket, key))
mimicdb.backend.delete(tpl.bucket % bucket)
for bucket in self.get_all_buckets(force=True):
for key in bucket.list(force=True):
mimicdb.backend.sadd(tpl.bucket % bucket.name, key.name)
mimicdb.backend.hmset(tpl.key % (bucket.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
|
[
"Sync",
"either",
"a",
"list",
"of",
"buckets",
"or",
"the",
"entire",
"connection",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/connection.py#L76-L106
|
[
"def",
"sync",
"(",
"self",
",",
"*",
"buckets",
")",
":",
"if",
"buckets",
":",
"for",
"_bucket",
"in",
"buckets",
":",
"for",
"key",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"bucket",
"%",
"_bucket",
")",
":",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"_bucket",
",",
"key",
")",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"bucket",
"%",
"_bucket",
")",
"bucket",
"=",
"self",
".",
"get_bucket",
"(",
"_bucket",
",",
"force",
"=",
"True",
")",
"for",
"key",
"in",
"bucket",
".",
"list",
"(",
"force",
"=",
"True",
")",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"bucket",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"bucket",
".",
"name",
",",
"key",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"key",
".",
"size",
",",
"md5",
"=",
"key",
".",
"etag",
".",
"strip",
"(",
"'\"'",
")",
")",
")",
"else",
":",
"for",
"bucket",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"connection",
")",
":",
"for",
"key",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"bucket",
"%",
"bucket",
")",
":",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"bucket",
",",
"key",
")",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"bucket",
"%",
"bucket",
")",
"for",
"bucket",
"in",
"self",
".",
"get_all_buckets",
"(",
"force",
"=",
"True",
")",
":",
"for",
"key",
"in",
"bucket",
".",
"list",
"(",
"force",
"=",
"True",
")",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"bucket",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"bucket",
".",
"name",
",",
"key",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"key",
".",
"size",
",",
"md5",
"=",
"key",
".",
"etag",
".",
"strip",
"(",
"'\"'",
")",
")",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket.get_key
|
Return the key from MimicDB.
:param boolean force: If true, API call is forced to S3
|
mimicdb/s3/bucket.py
|
def get_key(self, *args, **kwargs):
"""Return the key from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', {})
headers['force'] = True
kwargs['headers'] = headers
return super(Bucket, self).get_key(*args, **kwargs)
|
def get_key(self, *args, **kwargs):
"""Return the key from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', {})
headers['force'] = True
kwargs['headers'] = headers
return super(Bucket, self).get_key(*args, **kwargs)
|
[
"Return",
"the",
"key",
"from",
"MimicDB",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L26-L36
|
[
"def",
"get_key",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
".",
"pop",
"(",
"'force'",
",",
"None",
")",
":",
"headers",
"=",
"kwargs",
".",
"get",
"(",
"'headers'",
",",
"{",
"}",
")",
"headers",
"[",
"'force'",
"]",
"=",
"True",
"kwargs",
"[",
"'headers'",
"]",
"=",
"headers",
"return",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"get_key",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket._get_key_internal
|
Return None if key is not in the bucket set.
Pass 'force' in the headers to check S3 for the key, and after fetching
the key from S3, save the metadata and key to the bucket set.
|
mimicdb/s3/bucket.py
|
def _get_key_internal(self, *args, **kwargs):
"""Return None if key is not in the bucket set.
Pass 'force' in the headers to check S3 for the key, and after fetching
the key from S3, save the metadata and key to the bucket set.
"""
if args[1] is not None and 'force' in args[1]:
key, res = super(Bucket, self)._get_key_internal(*args, **kwargs)
if key:
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name),
dict(size=key.size,
md5=key.etag.strip('"')))
return key, res
key = None
if mimicdb.backend.sismember(tpl.bucket % self.name, args[0]):
key = Key(self)
key.name = args[0]
return key, None
|
def _get_key_internal(self, *args, **kwargs):
"""Return None if key is not in the bucket set.
Pass 'force' in the headers to check S3 for the key, and after fetching
the key from S3, save the metadata and key to the bucket set.
"""
if args[1] is not None and 'force' in args[1]:
key, res = super(Bucket, self)._get_key_internal(*args, **kwargs)
if key:
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name),
dict(size=key.size,
md5=key.etag.strip('"')))
return key, res
key = None
if mimicdb.backend.sismember(tpl.bucket % self.name, args[0]):
key = Key(self)
key.name = args[0]
return key, None
|
[
"Return",
"None",
"if",
"key",
"is",
"not",
"in",
"the",
"bucket",
"set",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L38-L60
|
[
"def",
"_get_key_internal",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"args",
"[",
"1",
"]",
"is",
"not",
"None",
"and",
"'force'",
"in",
"args",
"[",
"1",
"]",
":",
"key",
",",
"res",
"=",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"_get_key_internal",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"key",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"key",
".",
"size",
",",
"md5",
"=",
"key",
".",
"etag",
".",
"strip",
"(",
"'\"'",
")",
")",
")",
"return",
"key",
",",
"res",
"key",
"=",
"None",
"if",
"mimicdb",
".",
"backend",
".",
"sismember",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"args",
"[",
"0",
"]",
")",
":",
"key",
"=",
"Key",
"(",
"self",
")",
"key",
".",
"name",
"=",
"args",
"[",
"0",
"]",
"return",
"key",
",",
"None"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket.get_all_keys
|
Return a list of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
|
mimicdb/s3/bucket.py
|
def get_all_keys(self, *args, **kwargs):
"""Return a list of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', args[0] if len(args) else None) or dict()
headers['force'] = True
kwargs['headers'] = headers
return super(Bucket, self).get_all_keys(*args, **kwargs)
|
def get_all_keys(self, *args, **kwargs):
"""Return a list of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', args[0] if len(args) else None) or dict()
headers['force'] = True
kwargs['headers'] = headers
return super(Bucket, self).get_all_keys(*args, **kwargs)
|
[
"Return",
"a",
"list",
"of",
"keys",
"from",
"MimicDB",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L62-L72
|
[
"def",
"get_all_keys",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
".",
"pop",
"(",
"'force'",
",",
"None",
")",
":",
"headers",
"=",
"kwargs",
".",
"get",
"(",
"'headers'",
",",
"args",
"[",
"0",
"]",
"if",
"len",
"(",
"args",
")",
"else",
"None",
")",
"or",
"dict",
"(",
")",
"headers",
"[",
"'force'",
"]",
"=",
"True",
"kwargs",
"[",
"'headers'",
"]",
"=",
"headers",
"return",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"get_all_keys",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket.delete_keys
|
Remove each key or key name in an iterable from the bucket set.
|
mimicdb/s3/bucket.py
|
def delete_keys(self, *args, **kwargs):
"""Remove each key or key name in an iterable from the bucket set.
"""
ikeys = iter(kwargs.get('keys', args[0] if args else []))
while True:
try:
key = ikeys.next()
except StopIteration:
break
if isinstance(key, basestring):
mimicdb.backend.srem(tpl.bucket % self.name, key)
mimicdb.backend.delete(tpl.key % (self.name, key))
elif isinstance(key, BotoKey) or isinstance(key, Key):
mimicdb.backend.srem(tpl.bucket % self.name, key.name)
mimicdb.backend.delete(tpl.key % (self.name, key.name))
return super(Bucket, self).delete_keys(*args, **kwargs)
|
def delete_keys(self, *args, **kwargs):
"""Remove each key or key name in an iterable from the bucket set.
"""
ikeys = iter(kwargs.get('keys', args[0] if args else []))
while True:
try:
key = ikeys.next()
except StopIteration:
break
if isinstance(key, basestring):
mimicdb.backend.srem(tpl.bucket % self.name, key)
mimicdb.backend.delete(tpl.key % (self.name, key))
elif isinstance(key, BotoKey) or isinstance(key, Key):
mimicdb.backend.srem(tpl.bucket % self.name, key.name)
mimicdb.backend.delete(tpl.key % (self.name, key.name))
return super(Bucket, self).delete_keys(*args, **kwargs)
|
[
"Remove",
"each",
"key",
"or",
"key",
"name",
"in",
"an",
"iterable",
"from",
"the",
"bucket",
"set",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L74-L92
|
[
"def",
"delete_keys",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ikeys",
"=",
"iter",
"(",
"kwargs",
".",
"get",
"(",
"'keys'",
",",
"args",
"[",
"0",
"]",
"if",
"args",
"else",
"[",
"]",
")",
")",
"while",
"True",
":",
"try",
":",
"key",
"=",
"ikeys",
".",
"next",
"(",
")",
"except",
"StopIteration",
":",
"break",
"if",
"isinstance",
"(",
"key",
",",
"basestring",
")",
":",
"mimicdb",
".",
"backend",
".",
"srem",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"key",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
")",
")",
"elif",
"isinstance",
"(",
"key",
",",
"BotoKey",
")",
"or",
"isinstance",
"(",
"key",
",",
"Key",
")",
":",
"mimicdb",
".",
"backend",
".",
"srem",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
")",
"return",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"delete_keys",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket._delete_key_internal
|
Remove key name from bucket set.
|
mimicdb/s3/bucket.py
|
def _delete_key_internal(self, *args, **kwargs):
"""Remove key name from bucket set.
"""
mimicdb.backend.srem(tpl.bucket % self.name, args[0])
mimicdb.backend.delete(tpl.key % (self.name, args[0]))
return super(Bucket, self)._delete_key_internal(*args, **kwargs)
|
def _delete_key_internal(self, *args, **kwargs):
"""Remove key name from bucket set.
"""
mimicdb.backend.srem(tpl.bucket % self.name, args[0])
mimicdb.backend.delete(tpl.key % (self.name, args[0]))
return super(Bucket, self)._delete_key_internal(*args, **kwargs)
|
[
"Remove",
"key",
"name",
"from",
"bucket",
"set",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L94-L100
|
[
"def",
"_delete_key_internal",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"mimicdb",
".",
"backend",
".",
"srem",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"args",
"[",
"0",
"]",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"args",
"[",
"0",
"]",
")",
")",
"return",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"_delete_key_internal",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket.list
|
Return an iterable of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
|
mimicdb/s3/bucket.py
|
def list(self, *args, **kwargs):
"""Return an iterable of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', args[4] if len(args) > 4 else None) or dict()
headers['force'] = True
kwargs['headers'] = headers
for key in super(Bucket, self).list(*args, **kwargs):
yield key
else:
prefix = kwargs.get('prefix', args[0] if args else '')
for key in mimicdb.backend.smembers(tpl.bucket % self.name):
if key.startswith(prefix):
k = Key(self, key)
meta = mimicdb.backend.hgetall(tpl.key % (self.name, key))
if meta:
k._load_meta(meta['size'], meta['md5'])
yield k
|
def list(self, *args, **kwargs):
"""Return an iterable of keys from MimicDB.
:param boolean force: If true, API call is forced to S3
"""
if kwargs.pop('force', None):
headers = kwargs.get('headers', args[4] if len(args) > 4 else None) or dict()
headers['force'] = True
kwargs['headers'] = headers
for key in super(Bucket, self).list(*args, **kwargs):
yield key
else:
prefix = kwargs.get('prefix', args[0] if args else '')
for key in mimicdb.backend.smembers(tpl.bucket % self.name):
if key.startswith(prefix):
k = Key(self, key)
meta = mimicdb.backend.hgetall(tpl.key % (self.name, key))
if meta:
k._load_meta(meta['size'], meta['md5'])
yield k
|
[
"Return",
"an",
"iterable",
"of",
"keys",
"from",
"MimicDB",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L102-L127
|
[
"def",
"list",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
".",
"pop",
"(",
"'force'",
",",
"None",
")",
":",
"headers",
"=",
"kwargs",
".",
"get",
"(",
"'headers'",
",",
"args",
"[",
"4",
"]",
"if",
"len",
"(",
"args",
")",
">",
"4",
"else",
"None",
")",
"or",
"dict",
"(",
")",
"headers",
"[",
"'force'",
"]",
"=",
"True",
"kwargs",
"[",
"'headers'",
"]",
"=",
"headers",
"for",
"key",
"in",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"list",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"yield",
"key",
"else",
":",
"prefix",
"=",
"kwargs",
".",
"get",
"(",
"'prefix'",
",",
"args",
"[",
"0",
"]",
"if",
"args",
"else",
"''",
")",
"for",
"key",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"prefix",
")",
":",
"k",
"=",
"Key",
"(",
"self",
",",
"key",
")",
"meta",
"=",
"mimicdb",
".",
"backend",
".",
"hgetall",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
")",
")",
"if",
"meta",
":",
"k",
".",
"_load_meta",
"(",
"meta",
"[",
"'size'",
"]",
",",
"meta",
"[",
"'md5'",
"]",
")",
"yield",
"k"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket._get_all
|
If 'force' is in the headers, retrieve the list of keys from S3.
Otherwise, use the list() function to retrieve the keys from MimicDB.
|
mimicdb/s3/bucket.py
|
def _get_all(self, *args, **kwargs):
"""If 'force' is in the headers, retrieve the list of keys from S3.
Otherwise, use the list() function to retrieve the keys from MimicDB.
"""
headers = kwargs.get('headers', args[2] if len(args) > 2 else None) or dict()
if 'force' in headers:
keys = super(Bucket, self)._get_all(*args, **kwargs)
for key in keys:
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
key.name = key.name
return keys
prefix = kwargs.get('prefix', '')
return list(self.list(prefix=prefix))
|
def _get_all(self, *args, **kwargs):
"""If 'force' is in the headers, retrieve the list of keys from S3.
Otherwise, use the list() function to retrieve the keys from MimicDB.
"""
headers = kwargs.get('headers', args[2] if len(args) > 2 else None) or dict()
if 'force' in headers:
keys = super(Bucket, self)._get_all(*args, **kwargs)
for key in keys:
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
key.name = key.name
return keys
prefix = kwargs.get('prefix', '')
return list(self.list(prefix=prefix))
|
[
"If",
"force",
"is",
"in",
"the",
"headers",
"retrieve",
"the",
"list",
"of",
"keys",
"from",
"S3",
".",
"Otherwise",
"use",
"the",
"list",
"()",
"function",
"to",
"retrieve",
"the",
"keys",
"from",
"MimicDB",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L129-L148
|
[
"def",
"_get_all",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"headers",
"=",
"kwargs",
".",
"get",
"(",
"'headers'",
",",
"args",
"[",
"2",
"]",
"if",
"len",
"(",
"args",
")",
">",
"2",
"else",
"None",
")",
"or",
"dict",
"(",
")",
"if",
"'force'",
"in",
"headers",
":",
"keys",
"=",
"super",
"(",
"Bucket",
",",
"self",
")",
".",
"_get_all",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"key",
"in",
"keys",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"key",
".",
"size",
",",
"md5",
"=",
"key",
".",
"etag",
".",
"strip",
"(",
"'\"'",
")",
")",
")",
"key",
".",
"name",
"=",
"key",
".",
"name",
"return",
"keys",
"prefix",
"=",
"kwargs",
".",
"get",
"(",
"'prefix'",
",",
"''",
")",
"return",
"list",
"(",
"self",
".",
"list",
"(",
"prefix",
"=",
"prefix",
")",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Bucket.sync
|
Sync a bucket.
Force all API calls to S3 and populate the database with the current state of S3.
|
mimicdb/s3/bucket.py
|
def sync(self):
"""Sync a bucket.
Force all API calls to S3 and populate the database with the current state of S3.
"""
for key in mimicdb.backend.smembers(tpl.bucket % self.name):
mimicdb.backend.delete(tpl.key % (self.name, key))
mimicdb.backend.delete(tpl.bucket % self.name)
mimicdb.backend.sadd(tpl.connection, self.name)
for key in self.list(force=True):
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
|
def sync(self):
"""Sync a bucket.
Force all API calls to S3 and populate the database with the current state of S3.
"""
for key in mimicdb.backend.smembers(tpl.bucket % self.name):
mimicdb.backend.delete(tpl.key % (self.name, key))
mimicdb.backend.delete(tpl.bucket % self.name)
mimicdb.backend.sadd(tpl.connection, self.name)
for key in self.list(force=True):
mimicdb.backend.sadd(tpl.bucket % self.name, key.name)
mimicdb.backend.hmset(tpl.key % (self.name, key.name), dict(size=key.size, md5=key.etag.strip('"')))
|
[
"Sync",
"a",
"bucket",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/bucket.py#L150-L163
|
[
"def",
"sync",
"(",
"self",
")",
":",
"for",
"key",
"in",
"mimicdb",
".",
"backend",
".",
"smembers",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
")",
":",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
")",
")",
"mimicdb",
".",
"backend",
".",
"delete",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"connection",
",",
"self",
".",
"name",
")",
"for",
"key",
"in",
"self",
".",
"list",
"(",
"force",
"=",
"True",
")",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"name",
",",
"key",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"key",
".",
"size",
",",
"md5",
"=",
"key",
".",
"etag",
".",
"strip",
"(",
"'\"'",
")",
")",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
nucnorm
|
Nuclear norm
Parameters
----------
penalty : float
nuclear norm penalty hyperparameter
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
|
descent/proxops.py
|
def nucnorm(x, rho, penalty, newshape=None):
"""
Nuclear norm
Parameters
----------
penalty : float
nuclear norm penalty hyperparameter
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
"""
orig_shape = x.shape
if newshape is not None:
x = x.reshape(newshape)
u, s, v = np.linalg.svd(x, full_matrices=False)
sthr = np.maximum(s - (penalty / rho), 0)
return np.linalg.multi_dot((u, np.diag(sthr), v)).reshape(orig_shape)
|
def nucnorm(x, rho, penalty, newshape=None):
"""
Nuclear norm
Parameters
----------
penalty : float
nuclear norm penalty hyperparameter
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
"""
orig_shape = x.shape
if newshape is not None:
x = x.reshape(newshape)
u, s, v = np.linalg.svd(x, full_matrices=False)
sthr = np.maximum(s - (penalty / rho), 0)
return np.linalg.multi_dot((u, np.diag(sthr), v)).reshape(orig_shape)
|
[
"Nuclear",
"norm"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L62-L85
|
[
"def",
"nucnorm",
"(",
"x",
",",
"rho",
",",
"penalty",
",",
"newshape",
"=",
"None",
")",
":",
"orig_shape",
"=",
"x",
".",
"shape",
"if",
"newshape",
"is",
"not",
"None",
":",
"x",
"=",
"x",
".",
"reshape",
"(",
"newshape",
")",
"u",
",",
"s",
",",
"v",
"=",
"np",
".",
"linalg",
".",
"svd",
"(",
"x",
",",
"full_matrices",
"=",
"False",
")",
"sthr",
"=",
"np",
".",
"maximum",
"(",
"s",
"-",
"(",
"penalty",
"/",
"rho",
")",
",",
"0",
")",
"return",
"np",
".",
"linalg",
".",
"multi_dot",
"(",
"(",
"u",
",",
"np",
".",
"diag",
"(",
"sthr",
")",
",",
"v",
")",
")",
".",
"reshape",
"(",
"orig_shape",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
sparse
|
Proximal operator for the l1-norm: soft thresholding
Parameters
----------
penalty : float
Strength or weight on the l1-norm
|
descent/proxops.py
|
def sparse(x, rho, penalty):
"""
Proximal operator for the l1-norm: soft thresholding
Parameters
----------
penalty : float
Strength or weight on the l1-norm
"""
lmbda = penalty / rho
return (x - lmbda) * (x >= lmbda) + (x + lmbda) * (x <= -lmbda)
|
def sparse(x, rho, penalty):
"""
Proximal operator for the l1-norm: soft thresholding
Parameters
----------
penalty : float
Strength or weight on the l1-norm
"""
lmbda = penalty / rho
return (x - lmbda) * (x >= lmbda) + (x + lmbda) * (x <= -lmbda)
|
[
"Proximal",
"operator",
"for",
"the",
"l1",
"-",
"norm",
":",
"soft",
"thresholding"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L89-L100
|
[
"def",
"sparse",
"(",
"x",
",",
"rho",
",",
"penalty",
")",
":",
"lmbda",
"=",
"penalty",
"/",
"rho",
"return",
"(",
"x",
"-",
"lmbda",
")",
"*",
"(",
"x",
">=",
"lmbda",
")",
"+",
"(",
"x",
"+",
"lmbda",
")",
"*",
"(",
"x",
"<=",
"-",
"lmbda",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
lbfgs
|
Minimize the proximal operator of a given objective using L-BFGS
Parameters
----------
f_df : function
Returns the objective and gradient of the function to minimize
maxiter : int
Maximum number of L-BFGS iterations
|
descent/proxops.py
|
def lbfgs(x, rho, f_df, maxiter=20):
"""
Minimize the proximal operator of a given objective using L-BFGS
Parameters
----------
f_df : function
Returns the objective and gradient of the function to minimize
maxiter : int
Maximum number of L-BFGS iterations
"""
def f_df_augmented(theta):
f, df = f_df(theta)
obj = f + (rho / 2.) * np.linalg.norm(theta - x) ** 2
grad = df + rho * (theta - x)
return obj, grad
res = scipy_minimize(f_df_augmented, x, jac=True, method='L-BFGS-B',
options={'maxiter': maxiter, 'disp': False})
return res.x
|
def lbfgs(x, rho, f_df, maxiter=20):
"""
Minimize the proximal operator of a given objective using L-BFGS
Parameters
----------
f_df : function
Returns the objective and gradient of the function to minimize
maxiter : int
Maximum number of L-BFGS iterations
"""
def f_df_augmented(theta):
f, df = f_df(theta)
obj = f + (rho / 2.) * np.linalg.norm(theta - x) ** 2
grad = df + rho * (theta - x)
return obj, grad
res = scipy_minimize(f_df_augmented, x, jac=True, method='L-BFGS-B',
options={'maxiter': maxiter, 'disp': False})
return res.x
|
[
"Minimize",
"the",
"proximal",
"operator",
"of",
"a",
"given",
"objective",
"using",
"L",
"-",
"BFGS"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L141-L163
|
[
"def",
"lbfgs",
"(",
"x",
",",
"rho",
",",
"f_df",
",",
"maxiter",
"=",
"20",
")",
":",
"def",
"f_df_augmented",
"(",
"theta",
")",
":",
"f",
",",
"df",
"=",
"f_df",
"(",
"theta",
")",
"obj",
"=",
"f",
"+",
"(",
"rho",
"/",
"2.",
")",
"*",
"np",
".",
"linalg",
".",
"norm",
"(",
"theta",
"-",
"x",
")",
"**",
"2",
"grad",
"=",
"df",
"+",
"rho",
"*",
"(",
"theta",
"-",
"x",
")",
"return",
"obj",
",",
"grad",
"res",
"=",
"scipy_minimize",
"(",
"f_df_augmented",
",",
"x",
",",
"jac",
"=",
"True",
",",
"method",
"=",
"'L-BFGS-B'",
",",
"options",
"=",
"{",
"'maxiter'",
":",
"maxiter",
",",
"'disp'",
":",
"False",
"}",
")",
"return",
"res",
".",
"x"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
smooth
|
Applies a smoothing operator along one dimension
currently only accepts a matrix as input
Parameters
----------
penalty : float
axis : int, optional
Axis along which to apply the smoothing (Default: 0)
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
|
descent/proxops.py
|
def smooth(x, rho, penalty, axis=0, newshape=None):
"""
Applies a smoothing operator along one dimension
currently only accepts a matrix as input
Parameters
----------
penalty : float
axis : int, optional
Axis along which to apply the smoothing (Default: 0)
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
"""
orig_shape = x.shape
if newshape is not None:
x = x.reshape(newshape)
# Apply Laplacian smoothing (l2 norm on the parameters multiplied by
# the laplacian)
n = x.shape[axis]
lap_op = spdiags([(2 + rho / penalty) * np.ones(n),
-1 * np.ones(n), -1 * np.ones(n)],
[0, -1, 1], n, n, format='csc')
A = penalty * lap_op
b = rho * np.rollaxis(x, axis, 0)
return np.rollaxis(spsolve(A, b), axis, 0).reshape(orig_shape)
|
def smooth(x, rho, penalty, axis=0, newshape=None):
"""
Applies a smoothing operator along one dimension
currently only accepts a matrix as input
Parameters
----------
penalty : float
axis : int, optional
Axis along which to apply the smoothing (Default: 0)
newshape : tuple, optional
Desired shape of the parameters to apply the nuclear norm to. The given
parameters are reshaped to an array with this shape, or not reshaped if
the value of newshape is None. (Default: None)
"""
orig_shape = x.shape
if newshape is not None:
x = x.reshape(newshape)
# Apply Laplacian smoothing (l2 norm on the parameters multiplied by
# the laplacian)
n = x.shape[axis]
lap_op = spdiags([(2 + rho / penalty) * np.ones(n),
-1 * np.ones(n), -1 * np.ones(n)],
[0, -1, 1], n, n, format='csc')
A = penalty * lap_op
b = rho * np.rollaxis(x, axis, 0)
return np.rollaxis(spsolve(A, b), axis, 0).reshape(orig_shape)
|
[
"Applies",
"a",
"smoothing",
"operator",
"along",
"one",
"dimension"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L186-L219
|
[
"def",
"smooth",
"(",
"x",
",",
"rho",
",",
"penalty",
",",
"axis",
"=",
"0",
",",
"newshape",
"=",
"None",
")",
":",
"orig_shape",
"=",
"x",
".",
"shape",
"if",
"newshape",
"is",
"not",
"None",
":",
"x",
"=",
"x",
".",
"reshape",
"(",
"newshape",
")",
"# Apply Laplacian smoothing (l2 norm on the parameters multiplied by",
"# the laplacian)",
"n",
"=",
"x",
".",
"shape",
"[",
"axis",
"]",
"lap_op",
"=",
"spdiags",
"(",
"[",
"(",
"2",
"+",
"rho",
"/",
"penalty",
")",
"*",
"np",
".",
"ones",
"(",
"n",
")",
",",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"n",
")",
",",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"n",
")",
"]",
",",
"[",
"0",
",",
"-",
"1",
",",
"1",
"]",
",",
"n",
",",
"n",
",",
"format",
"=",
"'csc'",
")",
"A",
"=",
"penalty",
"*",
"lap_op",
"b",
"=",
"rho",
"*",
"np",
".",
"rollaxis",
"(",
"x",
",",
"axis",
",",
"0",
")",
"return",
"np",
".",
"rollaxis",
"(",
"spsolve",
"(",
"A",
",",
"b",
")",
",",
"axis",
",",
"0",
")",
".",
"reshape",
"(",
"orig_shape",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
sdcone
|
Projection onto the semidefinite cone
|
descent/proxops.py
|
def sdcone(x, rho):
"""Projection onto the semidefinite cone"""
U, V = np.linalg.eigh(x)
return V.dot(np.diag(np.maximum(U, 0)).dot(V.T))
|
def sdcone(x, rho):
"""Projection onto the semidefinite cone"""
U, V = np.linalg.eigh(x)
return V.dot(np.diag(np.maximum(U, 0)).dot(V.T))
|
[
"Projection",
"onto",
"the",
"semidefinite",
"cone"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L223-L226
|
[
"def",
"sdcone",
"(",
"x",
",",
"rho",
")",
":",
"U",
",",
"V",
"=",
"np",
".",
"linalg",
".",
"eigh",
"(",
"x",
")",
"return",
"V",
".",
"dot",
"(",
"np",
".",
"diag",
"(",
"np",
".",
"maximum",
"(",
"U",
",",
"0",
")",
")",
".",
"dot",
"(",
"V",
".",
"T",
")",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
simplex
|
Projection onto the probability simplex
http://arxiv.org/pdf/1309.1541v1.pdf
|
descent/proxops.py
|
def simplex(x, rho):
"""
Projection onto the probability simplex
http://arxiv.org/pdf/1309.1541v1.pdf
"""
# sort the elements in descending order
u = np.flipud(np.sort(x.ravel()))
lambdas = (1 - np.cumsum(u)) / (1. + np.arange(u.size))
ix = np.where(u + lambdas > 0)[0].max()
return np.maximum(x + lambdas[ix], 0)
|
def simplex(x, rho):
"""
Projection onto the probability simplex
http://arxiv.org/pdf/1309.1541v1.pdf
"""
# sort the elements in descending order
u = np.flipud(np.sort(x.ravel()))
lambdas = (1 - np.cumsum(u)) / (1. + np.arange(u.size))
ix = np.where(u + lambdas > 0)[0].max()
return np.maximum(x + lambdas[ix], 0)
|
[
"Projection",
"onto",
"the",
"probability",
"simplex"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L236-L247
|
[
"def",
"simplex",
"(",
"x",
",",
"rho",
")",
":",
"# sort the elements in descending order",
"u",
"=",
"np",
".",
"flipud",
"(",
"np",
".",
"sort",
"(",
"x",
".",
"ravel",
"(",
")",
")",
")",
"lambdas",
"=",
"(",
"1",
"-",
"np",
".",
"cumsum",
"(",
"u",
")",
")",
"/",
"(",
"1.",
"+",
"np",
".",
"arange",
"(",
"u",
".",
"size",
")",
")",
"ix",
"=",
"np",
".",
"where",
"(",
"u",
"+",
"lambdas",
">",
"0",
")",
"[",
"0",
"]",
".",
"max",
"(",
")",
"return",
"np",
".",
"maximum",
"(",
"x",
"+",
"lambdas",
"[",
"ix",
"]",
",",
"0",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
columns
|
Applies a proximal operator to the columns of a matrix
|
descent/proxops.py
|
def columns(x, rho, proxop):
"""Applies a proximal operator to the columns of a matrix"""
xnext = np.zeros_like(x)
for ix in range(x.shape[1]):
xnext[:, ix] = proxop(x[:, ix], rho)
return xnext
|
def columns(x, rho, proxop):
"""Applies a proximal operator to the columns of a matrix"""
xnext = np.zeros_like(x)
for ix in range(x.shape[1]):
xnext[:, ix] = proxop(x[:, ix], rho)
return xnext
|
[
"Applies",
"a",
"proximal",
"operator",
"to",
"the",
"columns",
"of",
"a",
"matrix"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L251-L259
|
[
"def",
"columns",
"(",
"x",
",",
"rho",
",",
"proxop",
")",
":",
"xnext",
"=",
"np",
".",
"zeros_like",
"(",
"x",
")",
"for",
"ix",
"in",
"range",
"(",
"x",
".",
"shape",
"[",
"1",
"]",
")",
":",
"xnext",
"[",
":",
",",
"ix",
"]",
"=",
"proxop",
"(",
"x",
"[",
":",
",",
"ix",
"]",
",",
"rho",
")",
"return",
"xnext"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
fantope
|
Projection onto the fantope [1]_
.. [1] Vu, Vincent Q., et al. "Fantope projection and selection: A
near-optimal convex relaxation of sparse PCA." Advances in
neural information processing systems. 2013.
|
descent/proxops.py
|
def fantope(x, rho, dim, tol=1e-4):
"""
Projection onto the fantope [1]_
.. [1] Vu, Vincent Q., et al. "Fantope projection and selection: A
near-optimal convex relaxation of sparse PCA." Advances in
neural information processing systems. 2013.
"""
U, V = np.linalg.eigh(x)
minval, maxval = np.maximum(U.min(), 0), np.maximum(U.max(), 20 * dim)
while True:
theta = 0.5 * (maxval + minval)
thr_eigvals = np.minimum(np.maximum((U - theta), 0), 1)
constraint = np.sum(thr_eigvals)
if np.abs(constraint - dim) <= tol:
break
elif constraint < dim:
maxval = theta
elif constraint > dim:
minval = theta
else:
break
return np.linalg.multi_dot((V, np.diag(thr_eigvals), V.T))
|
def fantope(x, rho, dim, tol=1e-4):
"""
Projection onto the fantope [1]_
.. [1] Vu, Vincent Q., et al. "Fantope projection and selection: A
near-optimal convex relaxation of sparse PCA." Advances in
neural information processing systems. 2013.
"""
U, V = np.linalg.eigh(x)
minval, maxval = np.maximum(U.min(), 0), np.maximum(U.max(), 20 * dim)
while True:
theta = 0.5 * (maxval + minval)
thr_eigvals = np.minimum(np.maximum((U - theta), 0), 1)
constraint = np.sum(thr_eigvals)
if np.abs(constraint - dim) <= tol:
break
elif constraint < dim:
maxval = theta
elif constraint > dim:
minval = theta
else:
break
return np.linalg.multi_dot((V, np.diag(thr_eigvals), V.T))
|
[
"Projection",
"onto",
"the",
"fantope",
"[",
"1",
"]",
"_"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/proxops.py#L269-L300
|
[
"def",
"fantope",
"(",
"x",
",",
"rho",
",",
"dim",
",",
"tol",
"=",
"1e-4",
")",
":",
"U",
",",
"V",
"=",
"np",
".",
"linalg",
".",
"eigh",
"(",
"x",
")",
"minval",
",",
"maxval",
"=",
"np",
".",
"maximum",
"(",
"U",
".",
"min",
"(",
")",
",",
"0",
")",
",",
"np",
".",
"maximum",
"(",
"U",
".",
"max",
"(",
")",
",",
"20",
"*",
"dim",
")",
"while",
"True",
":",
"theta",
"=",
"0.5",
"*",
"(",
"maxval",
"+",
"minval",
")",
"thr_eigvals",
"=",
"np",
".",
"minimum",
"(",
"np",
".",
"maximum",
"(",
"(",
"U",
"-",
"theta",
")",
",",
"0",
")",
",",
"1",
")",
"constraint",
"=",
"np",
".",
"sum",
"(",
"thr_eigvals",
")",
"if",
"np",
".",
"abs",
"(",
"constraint",
"-",
"dim",
")",
"<=",
"tol",
":",
"break",
"elif",
"constraint",
"<",
"dim",
":",
"maxval",
"=",
"theta",
"elif",
"constraint",
">",
"dim",
":",
"minval",
"=",
"theta",
"else",
":",
"break",
"return",
"np",
".",
"linalg",
".",
"multi_dot",
"(",
"(",
"V",
",",
"np",
".",
"diag",
"(",
"thr_eigvals",
")",
",",
"V",
".",
"T",
")",
")"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
gradient_optimizer
|
Turns a coroutine into a gradient based optimizer.
|
descent/main.py
|
def gradient_optimizer(coro):
"""Turns a coroutine into a gradient based optimizer."""
class GradientOptimizer(Optimizer):
@wraps(coro)
def __init__(self, *args, **kwargs):
self.algorithm = coro(*args, **kwargs)
self.algorithm.send(None)
self.operators = []
def set_transform(self, func):
self.transform = compose(destruct, func, self.restruct)
def minimize(self, f_df, x0, display=sys.stdout, maxiter=1e3):
self.display = display
self.theta = x0
# setup
xk = self.algorithm.send(destruct(x0).copy())
store = defaultdict(list)
runtimes = []
if len(self.operators) == 0:
self.operators = [proxops.identity()]
# setup
obj, grad = wrap(f_df, x0)
transform = compose(destruct, *reversed(self.operators), self.restruct)
self.optional_print(tp.header(['Iteration', 'Objective', '||Grad||', 'Runtime']))
try:
for k in count():
# setup
tstart = perf_counter()
f = obj(xk)
df = grad(xk)
xk = transform(self.algorithm.send(df))
runtimes.append(perf_counter() - tstart)
store['f'].append(f)
# Update display
self.optional_print(tp.row([k,
f,
np.linalg.norm(destruct(df)),
tp.humantime(runtimes[-1])]))
if k >= maxiter:
break
except KeyboardInterrupt:
pass
self.optional_print(tp.bottom(4))
# cleanup
self.optional_print(u'\u279b Final objective: {}'.format(store['f'][-1]))
self.optional_print(u'\u279b Total runtime: {}'.format(tp.humantime(sum(runtimes))))
self.optional_print(u'\u279b Per iteration runtime: {} +/- {}'.format(
tp.humantime(np.mean(runtimes)),
tp.humantime(np.std(runtimes)),
))
# result
return OptimizeResult({
'x': self.restruct(xk),
'f': f,
'df': self.restruct(df),
'k': k,
'obj': np.array(store['f']),
})
return GradientOptimizer
|
def gradient_optimizer(coro):
"""Turns a coroutine into a gradient based optimizer."""
class GradientOptimizer(Optimizer):
@wraps(coro)
def __init__(self, *args, **kwargs):
self.algorithm = coro(*args, **kwargs)
self.algorithm.send(None)
self.operators = []
def set_transform(self, func):
self.transform = compose(destruct, func, self.restruct)
def minimize(self, f_df, x0, display=sys.stdout, maxiter=1e3):
self.display = display
self.theta = x0
# setup
xk = self.algorithm.send(destruct(x0).copy())
store = defaultdict(list)
runtimes = []
if len(self.operators) == 0:
self.operators = [proxops.identity()]
# setup
obj, grad = wrap(f_df, x0)
transform = compose(destruct, *reversed(self.operators), self.restruct)
self.optional_print(tp.header(['Iteration', 'Objective', '||Grad||', 'Runtime']))
try:
for k in count():
# setup
tstart = perf_counter()
f = obj(xk)
df = grad(xk)
xk = transform(self.algorithm.send(df))
runtimes.append(perf_counter() - tstart)
store['f'].append(f)
# Update display
self.optional_print(tp.row([k,
f,
np.linalg.norm(destruct(df)),
tp.humantime(runtimes[-1])]))
if k >= maxiter:
break
except KeyboardInterrupt:
pass
self.optional_print(tp.bottom(4))
# cleanup
self.optional_print(u'\u279b Final objective: {}'.format(store['f'][-1]))
self.optional_print(u'\u279b Total runtime: {}'.format(tp.humantime(sum(runtimes))))
self.optional_print(u'\u279b Per iteration runtime: {} +/- {}'.format(
tp.humantime(np.mean(runtimes)),
tp.humantime(np.std(runtimes)),
))
# result
return OptimizeResult({
'x': self.restruct(xk),
'f': f,
'df': self.restruct(df),
'k': k,
'obj': np.array(store['f']),
})
return GradientOptimizer
|
[
"Turns",
"a",
"coroutine",
"into",
"a",
"gradient",
"based",
"optimizer",
"."
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/main.py#L121-L194
|
[
"def",
"gradient_optimizer",
"(",
"coro",
")",
":",
"class",
"GradientOptimizer",
"(",
"Optimizer",
")",
":",
"@",
"wraps",
"(",
"coro",
")",
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"algorithm",
"=",
"coro",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"algorithm",
".",
"send",
"(",
"None",
")",
"self",
".",
"operators",
"=",
"[",
"]",
"def",
"set_transform",
"(",
"self",
",",
"func",
")",
":",
"self",
".",
"transform",
"=",
"compose",
"(",
"destruct",
",",
"func",
",",
"self",
".",
"restruct",
")",
"def",
"minimize",
"(",
"self",
",",
"f_df",
",",
"x0",
",",
"display",
"=",
"sys",
".",
"stdout",
",",
"maxiter",
"=",
"1e3",
")",
":",
"self",
".",
"display",
"=",
"display",
"self",
".",
"theta",
"=",
"x0",
"# setup",
"xk",
"=",
"self",
".",
"algorithm",
".",
"send",
"(",
"destruct",
"(",
"x0",
")",
".",
"copy",
"(",
")",
")",
"store",
"=",
"defaultdict",
"(",
"list",
")",
"runtimes",
"=",
"[",
"]",
"if",
"len",
"(",
"self",
".",
"operators",
")",
"==",
"0",
":",
"self",
".",
"operators",
"=",
"[",
"proxops",
".",
"identity",
"(",
")",
"]",
"# setup",
"obj",
",",
"grad",
"=",
"wrap",
"(",
"f_df",
",",
"x0",
")",
"transform",
"=",
"compose",
"(",
"destruct",
",",
"*",
"reversed",
"(",
"self",
".",
"operators",
")",
",",
"self",
".",
"restruct",
")",
"self",
".",
"optional_print",
"(",
"tp",
".",
"header",
"(",
"[",
"'Iteration'",
",",
"'Objective'",
",",
"'||Grad||'",
",",
"'Runtime'",
"]",
")",
")",
"try",
":",
"for",
"k",
"in",
"count",
"(",
")",
":",
"# setup",
"tstart",
"=",
"perf_counter",
"(",
")",
"f",
"=",
"obj",
"(",
"xk",
")",
"df",
"=",
"grad",
"(",
"xk",
")",
"xk",
"=",
"transform",
"(",
"self",
".",
"algorithm",
".",
"send",
"(",
"df",
")",
")",
"runtimes",
".",
"append",
"(",
"perf_counter",
"(",
")",
"-",
"tstart",
")",
"store",
"[",
"'f'",
"]",
".",
"append",
"(",
"f",
")",
"# Update display",
"self",
".",
"optional_print",
"(",
"tp",
".",
"row",
"(",
"[",
"k",
",",
"f",
",",
"np",
".",
"linalg",
".",
"norm",
"(",
"destruct",
"(",
"df",
")",
")",
",",
"tp",
".",
"humantime",
"(",
"runtimes",
"[",
"-",
"1",
"]",
")",
"]",
")",
")",
"if",
"k",
">=",
"maxiter",
":",
"break",
"except",
"KeyboardInterrupt",
":",
"pass",
"self",
".",
"optional_print",
"(",
"tp",
".",
"bottom",
"(",
"4",
")",
")",
"# cleanup",
"self",
".",
"optional_print",
"(",
"u'\\u279b Final objective: {}'",
".",
"format",
"(",
"store",
"[",
"'f'",
"]",
"[",
"-",
"1",
"]",
")",
")",
"self",
".",
"optional_print",
"(",
"u'\\u279b Total runtime: {}'",
".",
"format",
"(",
"tp",
".",
"humantime",
"(",
"sum",
"(",
"runtimes",
")",
")",
")",
")",
"self",
".",
"optional_print",
"(",
"u'\\u279b Per iteration runtime: {} +/- {}'",
".",
"format",
"(",
"tp",
".",
"humantime",
"(",
"np",
".",
"mean",
"(",
"runtimes",
")",
")",
",",
"tp",
".",
"humantime",
"(",
"np",
".",
"std",
"(",
"runtimes",
")",
")",
",",
")",
")",
"# result",
"return",
"OptimizeResult",
"(",
"{",
"'x'",
":",
"self",
".",
"restruct",
"(",
"xk",
")",
",",
"'f'",
":",
"f",
",",
"'df'",
":",
"self",
".",
"restruct",
"(",
"df",
")",
",",
"'k'",
":",
"k",
",",
"'obj'",
":",
"np",
".",
"array",
"(",
"store",
"[",
"'f'",
"]",
")",
",",
"}",
")",
"return",
"GradientOptimizer"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
Optimizer.add
|
Adds a proximal operator to the list of operators
|
descent/main.py
|
def add(self, operator, *args):
"""Adds a proximal operator to the list of operators"""
if isinstance(operator, str):
op = getattr(proxops, operator)(*args)
elif isinstance(operator, proxops.ProximalOperatorBaseClass):
op = operator
else:
raise ValueError("operator must be a string or a subclass of ProximalOperator")
self.operators.append(op)
return self
|
def add(self, operator, *args):
"""Adds a proximal operator to the list of operators"""
if isinstance(operator, str):
op = getattr(proxops, operator)(*args)
elif isinstance(operator, proxops.ProximalOperatorBaseClass):
op = operator
else:
raise ValueError("operator must be a string or a subclass of ProximalOperator")
self.operators.append(op)
return self
|
[
"Adds",
"a",
"proximal",
"operator",
"to",
"the",
"list",
"of",
"operators"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/main.py#L29-L40
|
[
"def",
"add",
"(",
"self",
",",
"operator",
",",
"*",
"args",
")",
":",
"if",
"isinstance",
"(",
"operator",
",",
"str",
")",
":",
"op",
"=",
"getattr",
"(",
"proxops",
",",
"operator",
")",
"(",
"*",
"args",
")",
"elif",
"isinstance",
"(",
"operator",
",",
"proxops",
".",
"ProximalOperatorBaseClass",
")",
":",
"op",
"=",
"operator",
"else",
":",
"raise",
"ValueError",
"(",
"\"operator must be a string or a subclass of ProximalOperator\"",
")",
"self",
".",
"operators",
".",
"append",
"(",
"op",
")",
"return",
"self"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
Key._load_meta
|
Set key attributes to retrived metadata. Might be extended in the
future to support more attributes.
|
mimicdb/s3/key.py
|
def _load_meta(self, size, md5):
"""Set key attributes to retrived metadata. Might be extended in the
future to support more attributes.
"""
if not hasattr(self, 'local_hashes'):
self.local_hashes = {}
self.size = int(size)
if (re.match('^[a-fA-F0-9]{32}$', md5)):
self.md5 = md5
|
def _load_meta(self, size, md5):
"""Set key attributes to retrived metadata. Might be extended in the
future to support more attributes.
"""
if not hasattr(self, 'local_hashes'):
self.local_hashes = {}
self.size = int(size)
if (re.match('^[a-fA-F0-9]{32}$', md5)):
self.md5 = md5
|
[
"Set",
"key",
"attributes",
"to",
"retrived",
"metadata",
".",
"Might",
"be",
"extended",
"in",
"the",
"future",
"to",
"support",
"more",
"attributes",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/key.py#L32-L42
|
[
"def",
"_load_meta",
"(",
"self",
",",
"size",
",",
"md5",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'local_hashes'",
")",
":",
"self",
".",
"local_hashes",
"=",
"{",
"}",
"self",
".",
"size",
"=",
"int",
"(",
"size",
")",
"if",
"(",
"re",
".",
"match",
"(",
"'^[a-fA-F0-9]{32}$'",
",",
"md5",
")",
")",
":",
"self",
".",
"md5",
"=",
"md5"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Key.name
|
Key name can be set by Key.key or Key.name. Key.key sets Key.name
internally, so just handle this property. When changing the key
name, try to load it's metadata from MimicDB. If it's not available,
the key hasn't been uploaded, downloaded or synced so don't add it to
the bucket set (it also might have just been deleted,
see boto.s3.bucket.py#785)
|
mimicdb/s3/key.py
|
def name(self, value):
"""Key name can be set by Key.key or Key.name. Key.key sets Key.name
internally, so just handle this property. When changing the key
name, try to load it's metadata from MimicDB. If it's not available,
the key hasn't been uploaded, downloaded or synced so don't add it to
the bucket set (it also might have just been deleted,
see boto.s3.bucket.py#785)
"""
self._name = value
if value:
meta = mimicdb.backend.hgetall(tpl.key % (self.bucket.name, value))
if meta:
mimicdb.backend.sadd(tpl.bucket % self.bucket.name, value)
self._load_meta(meta['size'], meta['md5'])
|
def name(self, value):
"""Key name can be set by Key.key or Key.name. Key.key sets Key.name
internally, so just handle this property. When changing the key
name, try to load it's metadata from MimicDB. If it's not available,
the key hasn't been uploaded, downloaded or synced so don't add it to
the bucket set (it also might have just been deleted,
see boto.s3.bucket.py#785)
"""
self._name = value
if value:
meta = mimicdb.backend.hgetall(tpl.key % (self.bucket.name, value))
if meta:
mimicdb.backend.sadd(tpl.bucket % self.bucket.name, value)
self._load_meta(meta['size'], meta['md5'])
|
[
"Key",
"name",
"can",
"be",
"set",
"by",
"Key",
".",
"key",
"or",
"Key",
".",
"name",
".",
"Key",
".",
"key",
"sets",
"Key",
".",
"name",
"internally",
"so",
"just",
"handle",
"this",
"property",
".",
"When",
"changing",
"the",
"key",
"name",
"try",
"to",
"load",
"it",
"s",
"metadata",
"from",
"MimicDB",
".",
"If",
"it",
"s",
"not",
"available",
"the",
"key",
"hasn",
"t",
"been",
"uploaded",
"downloaded",
"or",
"synced",
"so",
"don",
"t",
"add",
"it",
"to",
"the",
"bucket",
"set",
"(",
"it",
"also",
"might",
"have",
"just",
"been",
"deleted",
"see",
"boto",
".",
"s3",
".",
"bucket",
".",
"py#785",
")"
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/key.py#L49-L64
|
[
"def",
"name",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"_name",
"=",
"value",
"if",
"value",
":",
"meta",
"=",
"mimicdb",
".",
"backend",
".",
"hgetall",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"bucket",
".",
"name",
",",
"value",
")",
")",
"if",
"meta",
":",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"bucket",
".",
"name",
",",
"value",
")",
"self",
".",
"_load_meta",
"(",
"meta",
"[",
"'size'",
"]",
",",
"meta",
"[",
"'md5'",
"]",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
Key._send_file_internal
|
Called internally for any type of upload. After upload finishes,
make sure the key is in the bucket set and save the metadata.
|
mimicdb/s3/key.py
|
def _send_file_internal(self, *args, **kwargs):
"""Called internally for any type of upload. After upload finishes,
make sure the key is in the bucket set and save the metadata.
"""
super(Key, self)._send_file_internal(*args, **kwargs)
mimicdb.backend.sadd(tpl.bucket % self.bucket.name, self.name)
mimicdb.backend.hmset(tpl.key % (self.bucket.name, self.name),
dict(size=self.size, md5=self.md5))
|
def _send_file_internal(self, *args, **kwargs):
"""Called internally for any type of upload. After upload finishes,
make sure the key is in the bucket set and save the metadata.
"""
super(Key, self)._send_file_internal(*args, **kwargs)
mimicdb.backend.sadd(tpl.bucket % self.bucket.name, self.name)
mimicdb.backend.hmset(tpl.key % (self.bucket.name, self.name),
dict(size=self.size, md5=self.md5))
|
[
"Called",
"internally",
"for",
"any",
"type",
"of",
"upload",
".",
"After",
"upload",
"finishes",
"make",
"sure",
"the",
"key",
"is",
"in",
"the",
"bucket",
"set",
"and",
"save",
"the",
"metadata",
"."
] |
nathancahill/mimicdb
|
python
|
https://github.com/nathancahill/mimicdb/blob/9d0e8ebcba31d937f73752f9b88e5a4fec860765/mimicdb/s3/key.py#L66-L74
|
[
"def",
"_send_file_internal",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"Key",
",",
"self",
")",
".",
"_send_file_internal",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"mimicdb",
".",
"backend",
".",
"sadd",
"(",
"tpl",
".",
"bucket",
"%",
"self",
".",
"bucket",
".",
"name",
",",
"self",
".",
"name",
")",
"mimicdb",
".",
"backend",
".",
"hmset",
"(",
"tpl",
".",
"key",
"%",
"(",
"self",
".",
"bucket",
".",
"name",
",",
"self",
".",
"name",
")",
",",
"dict",
"(",
"size",
"=",
"self",
".",
"size",
",",
"md5",
"=",
"self",
".",
"md5",
")",
")"
] |
9d0e8ebcba31d937f73752f9b88e5a4fec860765
|
valid
|
wrap
|
Memoizes an objective + gradient function, and splits it into
two functions that return just the objective and gradient, respectively.
Parameters
----------
f_df : function
Must be unary (takes a single argument)
xref : list, dict, or array_like
The form of the parameters
size : int, optional
Size of the cache (Default=1)
|
descent/utils.py
|
def wrap(f_df, xref, size=1):
"""
Memoizes an objective + gradient function, and splits it into
two functions that return just the objective and gradient, respectively.
Parameters
----------
f_df : function
Must be unary (takes a single argument)
xref : list, dict, or array_like
The form of the parameters
size : int, optional
Size of the cache (Default=1)
"""
memoized_f_df = lrucache(lambda x: f_df(restruct(x, xref)), size)
objective = compose(first, memoized_f_df)
gradient = compose(destruct, second, memoized_f_df)
return objective, gradient
|
def wrap(f_df, xref, size=1):
"""
Memoizes an objective + gradient function, and splits it into
two functions that return just the objective and gradient, respectively.
Parameters
----------
f_df : function
Must be unary (takes a single argument)
xref : list, dict, or array_like
The form of the parameters
size : int, optional
Size of the cache (Default=1)
"""
memoized_f_df = lrucache(lambda x: f_df(restruct(x, xref)), size)
objective = compose(first, memoized_f_df)
gradient = compose(destruct, second, memoized_f_df)
return objective, gradient
|
[
"Memoizes",
"an",
"objective",
"+",
"gradient",
"function",
"and",
"splits",
"it",
"into",
"two",
"functions",
"that",
"return",
"just",
"the",
"objective",
"and",
"gradient",
"respectively",
"."
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/utils.py#L17-L36
|
[
"def",
"wrap",
"(",
"f_df",
",",
"xref",
",",
"size",
"=",
"1",
")",
":",
"memoized_f_df",
"=",
"lrucache",
"(",
"lambda",
"x",
":",
"f_df",
"(",
"restruct",
"(",
"x",
",",
"xref",
")",
")",
",",
"size",
")",
"objective",
"=",
"compose",
"(",
"first",
",",
"memoized_f_df",
")",
"gradient",
"=",
"compose",
"(",
"destruct",
",",
"second",
",",
"memoized_f_df",
")",
"return",
"objective",
",",
"gradient"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
docstring
|
Decorates a function with the given docstring
Parameters
----------
docstr : string
|
descent/utils.py
|
def docstring(docstr):
"""
Decorates a function with the given docstring
Parameters
----------
docstr : string
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.__doc__ = docstr
return wrapper
return decorator
|
def docstring(docstr):
"""
Decorates a function with the given docstring
Parameters
----------
docstr : string
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.__doc__ = docstr
return wrapper
return decorator
|
[
"Decorates",
"a",
"function",
"with",
"the",
"given",
"docstring"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/utils.py#L39-L53
|
[
"def",
"docstring",
"(",
"docstr",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"wrapper",
".",
"__doc__",
"=",
"docstr",
"return",
"wrapper",
"return",
"decorator"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
lrucache
|
A simple implementation of a least recently used (LRU) cache.
Memoizes the recent calls of a computationally intensive function.
Parameters
----------
func : function
Must be unary (takes a single argument)
size : int
The size of the cache (number of previous calls to store)
|
descent/utils.py
|
def lrucache(func, size):
"""
A simple implementation of a least recently used (LRU) cache.
Memoizes the recent calls of a computationally intensive function.
Parameters
----------
func : function
Must be unary (takes a single argument)
size : int
The size of the cache (number of previous calls to store)
"""
if size == 0:
return func
elif size < 0:
raise ValueError("size argument must be a positive integer")
# this only works for unary functions
if not is_arity(1, func):
raise ValueError("The function must be unary (take a single argument)")
# initialize the cache
cache = OrderedDict()
def wrapper(x):
if not(type(x) is np.ndarray):
raise ValueError("Input must be an ndarray")
# hash the input, using tostring for small and repr for large arrays
if x.size <= 1e4:
key = hash(x.tostring())
else:
key = hash(repr(x))
# if the key is not in the cache, evalute the function
if key not in cache:
# clear space if necessary (keeps the most recent keys)
if len(cache) >= size:
cache.popitem(last=False)
# store the new value in the cache
cache[key] = func(x)
return cache[key]
return wrapper
|
def lrucache(func, size):
"""
A simple implementation of a least recently used (LRU) cache.
Memoizes the recent calls of a computationally intensive function.
Parameters
----------
func : function
Must be unary (takes a single argument)
size : int
The size of the cache (number of previous calls to store)
"""
if size == 0:
return func
elif size < 0:
raise ValueError("size argument must be a positive integer")
# this only works for unary functions
if not is_arity(1, func):
raise ValueError("The function must be unary (take a single argument)")
# initialize the cache
cache = OrderedDict()
def wrapper(x):
if not(type(x) is np.ndarray):
raise ValueError("Input must be an ndarray")
# hash the input, using tostring for small and repr for large arrays
if x.size <= 1e4:
key = hash(x.tostring())
else:
key = hash(repr(x))
# if the key is not in the cache, evalute the function
if key not in cache:
# clear space if necessary (keeps the most recent keys)
if len(cache) >= size:
cache.popitem(last=False)
# store the new value in the cache
cache[key] = func(x)
return cache[key]
return wrapper
|
[
"A",
"simple",
"implementation",
"of",
"a",
"least",
"recently",
"used",
"(",
"LRU",
")",
"cache",
".",
"Memoizes",
"the",
"recent",
"calls",
"of",
"a",
"computationally",
"intensive",
"function",
"."
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/utils.py#L56-L104
|
[
"def",
"lrucache",
"(",
"func",
",",
"size",
")",
":",
"if",
"size",
"==",
"0",
":",
"return",
"func",
"elif",
"size",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"size argument must be a positive integer\"",
")",
"# this only works for unary functions",
"if",
"not",
"is_arity",
"(",
"1",
",",
"func",
")",
":",
"raise",
"ValueError",
"(",
"\"The function must be unary (take a single argument)\"",
")",
"# initialize the cache",
"cache",
"=",
"OrderedDict",
"(",
")",
"def",
"wrapper",
"(",
"x",
")",
":",
"if",
"not",
"(",
"type",
"(",
"x",
")",
"is",
"np",
".",
"ndarray",
")",
":",
"raise",
"ValueError",
"(",
"\"Input must be an ndarray\"",
")",
"# hash the input, using tostring for small and repr for large arrays",
"if",
"x",
".",
"size",
"<=",
"1e4",
":",
"key",
"=",
"hash",
"(",
"x",
".",
"tostring",
"(",
")",
")",
"else",
":",
"key",
"=",
"hash",
"(",
"repr",
"(",
"x",
")",
")",
"# if the key is not in the cache, evalute the function",
"if",
"key",
"not",
"in",
"cache",
":",
"# clear space if necessary (keeps the most recent keys)",
"if",
"len",
"(",
"cache",
")",
">=",
"size",
":",
"cache",
".",
"popitem",
"(",
"last",
"=",
"False",
")",
"# store the new value in the cache",
"cache",
"[",
"key",
"]",
"=",
"func",
"(",
"x",
")",
"return",
"cache",
"[",
"key",
"]",
"return",
"wrapper"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
check_grad
|
Compares the numerical gradient to the analytic gradient
Parameters
----------
f_df : function
The analytic objective and gradient function to check
x0 : array_like
Parameter values to check the gradient at
stepsize : float, optional
Stepsize for the numerical gradient. Too big and this will poorly estimate the gradient.
Too small and you will run into precision issues (default: 1e-6)
tol : float, optional
Tolerance to use when coloring correct/incorrect gradients (default: 1e-5)
width : int, optional
Width of the table columns (default: 15)
style : string, optional
Style of the printed table, see tableprint for a list of styles (default: 'round')
|
descent/utils.py
|
def check_grad(f_df, xref, stepsize=1e-6, tol=1e-6, width=15, style='round', out=sys.stdout):
"""
Compares the numerical gradient to the analytic gradient
Parameters
----------
f_df : function
The analytic objective and gradient function to check
x0 : array_like
Parameter values to check the gradient at
stepsize : float, optional
Stepsize for the numerical gradient. Too big and this will poorly estimate the gradient.
Too small and you will run into precision issues (default: 1e-6)
tol : float, optional
Tolerance to use when coloring correct/incorrect gradients (default: 1e-5)
width : int, optional
Width of the table columns (default: 15)
style : string, optional
Style of the printed table, see tableprint for a list of styles (default: 'round')
"""
CORRECT = u'\x1b[32m\N{CHECK MARK}\x1b[0m'
INCORRECT = u'\x1b[31m\N{BALLOT X}\x1b[0m'
obj, grad = wrap(f_df, xref, size=0)
x0 = destruct(xref)
df = grad(x0)
# header
out.write(tp.header(["Numerical", "Analytic", "Error"], width=width, style=style) + "\n")
out.flush()
# helper function to parse a number
def parse_error(number):
# colors
failure = "\033[91m"
passing = "\033[92m"
warning = "\033[93m"
end = "\033[0m"
base = "{}{:0.3e}{}"
# correct
if error < 0.1 * tol:
return base.format(passing, error, end)
# warning
elif error < tol:
return base.format(warning, error, end)
# failure
else:
return base.format(failure, error, end)
# check each dimension
num_errors = 0
for j in range(x0.size):
# take a small step in one dimension
dx = np.zeros(x0.size)
dx[j] = stepsize
# compute the centered difference formula
df_approx = (obj(x0 + dx) - obj(x0 - dx)) / (2 * stepsize)
df_analytic = df[j]
# absolute error
abs_error = np.linalg.norm(df_approx - df_analytic)
# relative error
error = abs_error if np.allclose(abs_error, 0) else abs_error / \
(np.linalg.norm(df_analytic) + np.linalg.norm(df_approx))
num_errors += error >= tol
errstr = CORRECT if error < tol else INCORRECT
out.write(tp.row([df_approx, df_analytic, parse_error(error) + ' ' + errstr],
width=width, style=style) + "\n")
out.flush()
out.write(tp.bottom(3, width=width, style=style) + "\n")
return num_errors
|
def check_grad(f_df, xref, stepsize=1e-6, tol=1e-6, width=15, style='round', out=sys.stdout):
"""
Compares the numerical gradient to the analytic gradient
Parameters
----------
f_df : function
The analytic objective and gradient function to check
x0 : array_like
Parameter values to check the gradient at
stepsize : float, optional
Stepsize for the numerical gradient. Too big and this will poorly estimate the gradient.
Too small and you will run into precision issues (default: 1e-6)
tol : float, optional
Tolerance to use when coloring correct/incorrect gradients (default: 1e-5)
width : int, optional
Width of the table columns (default: 15)
style : string, optional
Style of the printed table, see tableprint for a list of styles (default: 'round')
"""
CORRECT = u'\x1b[32m\N{CHECK MARK}\x1b[0m'
INCORRECT = u'\x1b[31m\N{BALLOT X}\x1b[0m'
obj, grad = wrap(f_df, xref, size=0)
x0 = destruct(xref)
df = grad(x0)
# header
out.write(tp.header(["Numerical", "Analytic", "Error"], width=width, style=style) + "\n")
out.flush()
# helper function to parse a number
def parse_error(number):
# colors
failure = "\033[91m"
passing = "\033[92m"
warning = "\033[93m"
end = "\033[0m"
base = "{}{:0.3e}{}"
# correct
if error < 0.1 * tol:
return base.format(passing, error, end)
# warning
elif error < tol:
return base.format(warning, error, end)
# failure
else:
return base.format(failure, error, end)
# check each dimension
num_errors = 0
for j in range(x0.size):
# take a small step in one dimension
dx = np.zeros(x0.size)
dx[j] = stepsize
# compute the centered difference formula
df_approx = (obj(x0 + dx) - obj(x0 - dx)) / (2 * stepsize)
df_analytic = df[j]
# absolute error
abs_error = np.linalg.norm(df_approx - df_analytic)
# relative error
error = abs_error if np.allclose(abs_error, 0) else abs_error / \
(np.linalg.norm(df_analytic) + np.linalg.norm(df_approx))
num_errors += error >= tol
errstr = CORRECT if error < tol else INCORRECT
out.write(tp.row([df_approx, df_analytic, parse_error(error) + ' ' + errstr],
width=width, style=style) + "\n")
out.flush()
out.write(tp.bottom(3, width=width, style=style) + "\n")
return num_errors
|
[
"Compares",
"the",
"numerical",
"gradient",
"to",
"the",
"analytic",
"gradient"
] |
nirum/descent
|
python
|
https://github.com/nirum/descent/blob/074c8452f15a0da638668a4fe139fde06ccfae7f/descent/utils.py#L107-L191
|
[
"def",
"check_grad",
"(",
"f_df",
",",
"xref",
",",
"stepsize",
"=",
"1e-6",
",",
"tol",
"=",
"1e-6",
",",
"width",
"=",
"15",
",",
"style",
"=",
"'round'",
",",
"out",
"=",
"sys",
".",
"stdout",
")",
":",
"CORRECT",
"=",
"u'\\x1b[32m\\N{CHECK MARK}\\x1b[0m'",
"INCORRECT",
"=",
"u'\\x1b[31m\\N{BALLOT X}\\x1b[0m'",
"obj",
",",
"grad",
"=",
"wrap",
"(",
"f_df",
",",
"xref",
",",
"size",
"=",
"0",
")",
"x0",
"=",
"destruct",
"(",
"xref",
")",
"df",
"=",
"grad",
"(",
"x0",
")",
"# header",
"out",
".",
"write",
"(",
"tp",
".",
"header",
"(",
"[",
"\"Numerical\"",
",",
"\"Analytic\"",
",",
"\"Error\"",
"]",
",",
"width",
"=",
"width",
",",
"style",
"=",
"style",
")",
"+",
"\"\\n\"",
")",
"out",
".",
"flush",
"(",
")",
"# helper function to parse a number",
"def",
"parse_error",
"(",
"number",
")",
":",
"# colors",
"failure",
"=",
"\"\\033[91m\"",
"passing",
"=",
"\"\\033[92m\"",
"warning",
"=",
"\"\\033[93m\"",
"end",
"=",
"\"\\033[0m\"",
"base",
"=",
"\"{}{:0.3e}{}\"",
"# correct",
"if",
"error",
"<",
"0.1",
"*",
"tol",
":",
"return",
"base",
".",
"format",
"(",
"passing",
",",
"error",
",",
"end",
")",
"# warning",
"elif",
"error",
"<",
"tol",
":",
"return",
"base",
".",
"format",
"(",
"warning",
",",
"error",
",",
"end",
")",
"# failure",
"else",
":",
"return",
"base",
".",
"format",
"(",
"failure",
",",
"error",
",",
"end",
")",
"# check each dimension",
"num_errors",
"=",
"0",
"for",
"j",
"in",
"range",
"(",
"x0",
".",
"size",
")",
":",
"# take a small step in one dimension",
"dx",
"=",
"np",
".",
"zeros",
"(",
"x0",
".",
"size",
")",
"dx",
"[",
"j",
"]",
"=",
"stepsize",
"# compute the centered difference formula",
"df_approx",
"=",
"(",
"obj",
"(",
"x0",
"+",
"dx",
")",
"-",
"obj",
"(",
"x0",
"-",
"dx",
")",
")",
"/",
"(",
"2",
"*",
"stepsize",
")",
"df_analytic",
"=",
"df",
"[",
"j",
"]",
"# absolute error",
"abs_error",
"=",
"np",
".",
"linalg",
".",
"norm",
"(",
"df_approx",
"-",
"df_analytic",
")",
"# relative error",
"error",
"=",
"abs_error",
"if",
"np",
".",
"allclose",
"(",
"abs_error",
",",
"0",
")",
"else",
"abs_error",
"/",
"(",
"np",
".",
"linalg",
".",
"norm",
"(",
"df_analytic",
")",
"+",
"np",
".",
"linalg",
".",
"norm",
"(",
"df_approx",
")",
")",
"num_errors",
"+=",
"error",
">=",
"tol",
"errstr",
"=",
"CORRECT",
"if",
"error",
"<",
"tol",
"else",
"INCORRECT",
"out",
".",
"write",
"(",
"tp",
".",
"row",
"(",
"[",
"df_approx",
",",
"df_analytic",
",",
"parse_error",
"(",
"error",
")",
"+",
"' '",
"+",
"errstr",
"]",
",",
"width",
"=",
"width",
",",
"style",
"=",
"style",
")",
"+",
"\"\\n\"",
")",
"out",
".",
"flush",
"(",
")",
"out",
".",
"write",
"(",
"tp",
".",
"bottom",
"(",
"3",
",",
"width",
"=",
"width",
",",
"style",
"=",
"style",
")",
"+",
"\"\\n\"",
")",
"return",
"num_errors"
] |
074c8452f15a0da638668a4fe139fde06ccfae7f
|
valid
|
RegressionQualityValidator.evaluate
|
Evaluate the files identified for checksum.
|
dgitcore/contrib/validators/regression_quality.py
|
def evaluate(self, repo, spec, args):
"""
Evaluate the files identified for checksum.
"""
status = []
# Do we have to any thing at all?
if len(spec['files']) == 0:
return status
with cd(repo.rootdir):
rules = None
if 'rules-files' in spec and len(spec['rules-files']) > 0:
rulesfiles = spec['rules-files']
rules = {}
for f in rulesfiles:
d = json.loads(open(f).read())
rules.update(d)
elif 'rules' in spec:
rules = {
'inline': spec['rules']
}
if rules is None or len(rules) == 0:
print("Regression quality validation has been enabled but no rules file has been specified")
print("Example: { 'min-r2': 0.25 }. Put this either in file or in dgit.json")
raise InvalidParameters("Regression quality checking rules missing")
files = dict([(f, open(f).read()) for f in spec['files']])
for r in rules:
if 'min-r2' not in rules[r]:
continue
minr2 = float(rules[r]['min-r2'])
for f in files:
match = re.search(r"R-squared:\s+(\d.\d+)", files[f])
if match is None:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "ERROR",
'message': "Invalid model output"
})
else:
r2 = match.group(1)
r2 = float(r2)
if r2 > minr2:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "OK",
'message': "Acceptable R2"
})
else:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "ERROR",
'message': "R2 is too low"
})
return status
|
def evaluate(self, repo, spec, args):
"""
Evaluate the files identified for checksum.
"""
status = []
# Do we have to any thing at all?
if len(spec['files']) == 0:
return status
with cd(repo.rootdir):
rules = None
if 'rules-files' in spec and len(spec['rules-files']) > 0:
rulesfiles = spec['rules-files']
rules = {}
for f in rulesfiles:
d = json.loads(open(f).read())
rules.update(d)
elif 'rules' in spec:
rules = {
'inline': spec['rules']
}
if rules is None or len(rules) == 0:
print("Regression quality validation has been enabled but no rules file has been specified")
print("Example: { 'min-r2': 0.25 }. Put this either in file or in dgit.json")
raise InvalidParameters("Regression quality checking rules missing")
files = dict([(f, open(f).read()) for f in spec['files']])
for r in rules:
if 'min-r2' not in rules[r]:
continue
minr2 = float(rules[r]['min-r2'])
for f in files:
match = re.search(r"R-squared:\s+(\d.\d+)", files[f])
if match is None:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "ERROR",
'message': "Invalid model output"
})
else:
r2 = match.group(1)
r2 = float(r2)
if r2 > minr2:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "OK",
'message': "Acceptable R2"
})
else:
status.append({
'target': f,
'validator': self.name,
'description': self.description,
'rules': r,
'status': "ERROR",
'message': "R2 is too low"
})
return status
|
[
"Evaluate",
"the",
"files",
"identified",
"for",
"checksum",
"."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/validators/regression_quality.py#L54-L123
|
[
"def",
"evaluate",
"(",
"self",
",",
"repo",
",",
"spec",
",",
"args",
")",
":",
"status",
"=",
"[",
"]",
"# Do we have to any thing at all? ",
"if",
"len",
"(",
"spec",
"[",
"'files'",
"]",
")",
"==",
"0",
":",
"return",
"status",
"with",
"cd",
"(",
"repo",
".",
"rootdir",
")",
":",
"rules",
"=",
"None",
"if",
"'rules-files'",
"in",
"spec",
"and",
"len",
"(",
"spec",
"[",
"'rules-files'",
"]",
")",
">",
"0",
":",
"rulesfiles",
"=",
"spec",
"[",
"'rules-files'",
"]",
"rules",
"=",
"{",
"}",
"for",
"f",
"in",
"rulesfiles",
":",
"d",
"=",
"json",
".",
"loads",
"(",
"open",
"(",
"f",
")",
".",
"read",
"(",
")",
")",
"rules",
".",
"update",
"(",
"d",
")",
"elif",
"'rules'",
"in",
"spec",
":",
"rules",
"=",
"{",
"'inline'",
":",
"spec",
"[",
"'rules'",
"]",
"}",
"if",
"rules",
"is",
"None",
"or",
"len",
"(",
"rules",
")",
"==",
"0",
":",
"print",
"(",
"\"Regression quality validation has been enabled but no rules file has been specified\"",
")",
"print",
"(",
"\"Example: { 'min-r2': 0.25 }. Put this either in file or in dgit.json\"",
")",
"raise",
"InvalidParameters",
"(",
"\"Regression quality checking rules missing\"",
")",
"files",
"=",
"dict",
"(",
"[",
"(",
"f",
",",
"open",
"(",
"f",
")",
".",
"read",
"(",
")",
")",
"for",
"f",
"in",
"spec",
"[",
"'files'",
"]",
"]",
")",
"for",
"r",
"in",
"rules",
":",
"if",
"'min-r2'",
"not",
"in",
"rules",
"[",
"r",
"]",
":",
"continue",
"minr2",
"=",
"float",
"(",
"rules",
"[",
"r",
"]",
"[",
"'min-r2'",
"]",
")",
"for",
"f",
"in",
"files",
":",
"match",
"=",
"re",
".",
"search",
"(",
"r\"R-squared:\\s+(\\d.\\d+)\"",
",",
"files",
"[",
"f",
"]",
")",
"if",
"match",
"is",
"None",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'rules'",
":",
"r",
",",
"'status'",
":",
"\"ERROR\"",
",",
"'message'",
":",
"\"Invalid model output\"",
"}",
")",
"else",
":",
"r2",
"=",
"match",
".",
"group",
"(",
"1",
")",
"r2",
"=",
"float",
"(",
"r2",
")",
"if",
"r2",
">",
"minr2",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'rules'",
":",
"r",
",",
"'status'",
":",
"\"OK\"",
",",
"'message'",
":",
"\"Acceptable R2\"",
"}",
")",
"else",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'rules'",
":",
"r",
",",
"'status'",
":",
"\"ERROR\"",
",",
"'message'",
":",
"\"R2 is too low\"",
"}",
")",
"return",
"status"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
MetadataValidator.evaluate
|
Check the integrity of the datapackage.json
|
dgitcore/contrib/validators/metadata_validator.py
|
def evaluate(self, repo, spec, args):
"""
Check the integrity of the datapackage.json
"""
status = []
with cd(repo.rootdir):
files = spec.get('files', ['*'])
resource_files = repo.find_matching_files(files)
files = glob2.glob("**/*")
disk_files = [f for f in files if os.path.isfile(f) and f != "datapackage.json"]
allfiles = list(set(resource_files + disk_files))
allfiles.sort()
for f in allfiles:
if f in resource_files and f in disk_files:
r = repo.get_resource(f)
coded_sha256 = r['sha256']
computed_sha256 = compute_sha256(f)
if computed_sha256 != coded_sha256:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "Mismatch in checksum on disk and in datapackage.json"
})
else:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'OK',
'message': ""
})
elif f in resource_files:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "In datapackage.json but not in repo"
})
else:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "In repo but not in datapackage.json"
})
return status
|
def evaluate(self, repo, spec, args):
"""
Check the integrity of the datapackage.json
"""
status = []
with cd(repo.rootdir):
files = spec.get('files', ['*'])
resource_files = repo.find_matching_files(files)
files = glob2.glob("**/*")
disk_files = [f for f in files if os.path.isfile(f) and f != "datapackage.json"]
allfiles = list(set(resource_files + disk_files))
allfiles.sort()
for f in allfiles:
if f in resource_files and f in disk_files:
r = repo.get_resource(f)
coded_sha256 = r['sha256']
computed_sha256 = compute_sha256(f)
if computed_sha256 != coded_sha256:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "Mismatch in checksum on disk and in datapackage.json"
})
else:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'OK',
'message': ""
})
elif f in resource_files:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "In datapackage.json but not in repo"
})
else:
status.append({
'target': f,
'rules': "",
'validator': self.name,
'description': self.description,
'status': 'ERROR',
'message': "In repo but not in datapackage.json"
})
return status
|
[
"Check",
"the",
"integrity",
"of",
"the",
"datapackage",
".",
"json"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/validators/metadata_validator.py#L48-L106
|
[
"def",
"evaluate",
"(",
"self",
",",
"repo",
",",
"spec",
",",
"args",
")",
":",
"status",
"=",
"[",
"]",
"with",
"cd",
"(",
"repo",
".",
"rootdir",
")",
":",
"files",
"=",
"spec",
".",
"get",
"(",
"'files'",
",",
"[",
"'*'",
"]",
")",
"resource_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"files",
")",
"files",
"=",
"glob2",
".",
"glob",
"(",
"\"**/*\"",
")",
"disk_files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"f",
")",
"and",
"f",
"!=",
"\"datapackage.json\"",
"]",
"allfiles",
"=",
"list",
"(",
"set",
"(",
"resource_files",
"+",
"disk_files",
")",
")",
"allfiles",
".",
"sort",
"(",
")",
"for",
"f",
"in",
"allfiles",
":",
"if",
"f",
"in",
"resource_files",
"and",
"f",
"in",
"disk_files",
":",
"r",
"=",
"repo",
".",
"get_resource",
"(",
"f",
")",
"coded_sha256",
"=",
"r",
"[",
"'sha256'",
"]",
"computed_sha256",
"=",
"compute_sha256",
"(",
"f",
")",
"if",
"computed_sha256",
"!=",
"coded_sha256",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'rules'",
":",
"\"\"",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'status'",
":",
"'ERROR'",
",",
"'message'",
":",
"\"Mismatch in checksum on disk and in datapackage.json\"",
"}",
")",
"else",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'rules'",
":",
"\"\"",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'status'",
":",
"'OK'",
",",
"'message'",
":",
"\"\"",
"}",
")",
"elif",
"f",
"in",
"resource_files",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'rules'",
":",
"\"\"",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'status'",
":",
"'ERROR'",
",",
"'message'",
":",
"\"In datapackage.json but not in repo\"",
"}",
")",
"else",
":",
"status",
".",
"append",
"(",
"{",
"'target'",
":",
"f",
",",
"'rules'",
":",
"\"\"",
",",
"'validator'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"'status'",
":",
"'ERROR'",
",",
"'message'",
":",
"\"In repo but not in datapackage.json\"",
"}",
")",
"return",
"status"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
TableRepresentation.read_file
|
Guess the filetype and read the file into row sets
|
dgitcore/contrib/representations/tableformat.py
|
def read_file(self, filename):
"""
Guess the filetype and read the file into row sets
"""
#print("Reading file", filename)
try:
fh = open(filename, 'rb')
table_set = any_tableset(fh) # guess the type...
except:
#traceback.print_exc()
# Cannot find the schema.
table_set = None
return table_set
|
def read_file(self, filename):
"""
Guess the filetype and read the file into row sets
"""
#print("Reading file", filename)
try:
fh = open(filename, 'rb')
table_set = any_tableset(fh) # guess the type...
except:
#traceback.print_exc()
# Cannot find the schema.
table_set = None
return table_set
|
[
"Guess",
"the",
"filetype",
"and",
"read",
"the",
"file",
"into",
"row",
"sets"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/representations/tableformat.py#L42-L56
|
[
"def",
"read_file",
"(",
"self",
",",
"filename",
")",
":",
"#print(\"Reading file\", filename)",
"try",
":",
"fh",
"=",
"open",
"(",
"filename",
",",
"'rb'",
")",
"table_set",
"=",
"any_tableset",
"(",
"fh",
")",
"# guess the type...",
"except",
":",
"#traceback.print_exc()",
"# Cannot find the schema.",
"table_set",
"=",
"None",
"return",
"table_set"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
TableRepresentation.get_schema
|
Guess schema using messytables
|
dgitcore/contrib/representations/tableformat.py
|
def get_schema(self, filename):
"""
Guess schema using messytables
"""
table_set = self.read_file(filename)
# Have I been able to read the filename
if table_set is None:
return []
# Get the first table as rowset
row_set = table_set.tables[0]
offset, headers = headers_guess(row_set.sample)
row_set.register_processor(headers_processor(headers))
row_set.register_processor(offset_processor(offset + 1))
types = type_guess(row_set.sample, strict=True)
# Get a sample as well..
sample = next(row_set.sample)
clean = lambda v: str(v) if not isinstance(v, str) else v
schema = []
for i, h in enumerate(headers):
schema.append([h,
str(types[i]),
clean(sample[i].value)])
return schema
|
def get_schema(self, filename):
"""
Guess schema using messytables
"""
table_set = self.read_file(filename)
# Have I been able to read the filename
if table_set is None:
return []
# Get the first table as rowset
row_set = table_set.tables[0]
offset, headers = headers_guess(row_set.sample)
row_set.register_processor(headers_processor(headers))
row_set.register_processor(offset_processor(offset + 1))
types = type_guess(row_set.sample, strict=True)
# Get a sample as well..
sample = next(row_set.sample)
clean = lambda v: str(v) if not isinstance(v, str) else v
schema = []
for i, h in enumerate(headers):
schema.append([h,
str(types[i]),
clean(sample[i].value)])
return schema
|
[
"Guess",
"schema",
"using",
"messytables"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/representations/tableformat.py#L58-L86
|
[
"def",
"get_schema",
"(",
"self",
",",
"filename",
")",
":",
"table_set",
"=",
"self",
".",
"read_file",
"(",
"filename",
")",
"# Have I been able to read the filename",
"if",
"table_set",
"is",
"None",
":",
"return",
"[",
"]",
"# Get the first table as rowset",
"row_set",
"=",
"table_set",
".",
"tables",
"[",
"0",
"]",
"offset",
",",
"headers",
"=",
"headers_guess",
"(",
"row_set",
".",
"sample",
")",
"row_set",
".",
"register_processor",
"(",
"headers_processor",
"(",
"headers",
")",
")",
"row_set",
".",
"register_processor",
"(",
"offset_processor",
"(",
"offset",
"+",
"1",
")",
")",
"types",
"=",
"type_guess",
"(",
"row_set",
".",
"sample",
",",
"strict",
"=",
"True",
")",
"# Get a sample as well..",
"sample",
"=",
"next",
"(",
"row_set",
".",
"sample",
")",
"clean",
"=",
"lambda",
"v",
":",
"str",
"(",
"v",
")",
"if",
"not",
"isinstance",
"(",
"v",
",",
"str",
")",
"else",
"v",
"schema",
"=",
"[",
"]",
"for",
"i",
",",
"h",
"in",
"enumerate",
"(",
"headers",
")",
":",
"schema",
".",
"append",
"(",
"[",
"h",
",",
"str",
"(",
"types",
"[",
"i",
"]",
")",
",",
"clean",
"(",
"sample",
"[",
"i",
"]",
".",
"value",
")",
"]",
")",
"return",
"schema"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
int2fin_reference
|
Calculates a checksum for a Finnish national reference number
|
holviapi/utils.py
|
def int2fin_reference(n):
"""Calculates a checksum for a Finnish national reference number"""
checksum = 10 - (sum([int(c) * i for c, i in zip(str(n)[::-1], it.cycle((7, 3, 1)))]) % 10)
if checksum == 10:
checksum = 0
return "%s%s" % (n, checksum)
|
def int2fin_reference(n):
"""Calculates a checksum for a Finnish national reference number"""
checksum = 10 - (sum([int(c) * i for c, i in zip(str(n)[::-1], it.cycle((7, 3, 1)))]) % 10)
if checksum == 10:
checksum = 0
return "%s%s" % (n, checksum)
|
[
"Calculates",
"a",
"checksum",
"for",
"a",
"Finnish",
"national",
"reference",
"number"
] |
rambo/python-holviapi
|
python
|
https://github.com/rambo/python-holviapi/blob/f57f44e7b0a1030786aafd6f387114abb546bb32/holviapi/utils.py#L154-L159
|
[
"def",
"int2fin_reference",
"(",
"n",
")",
":",
"checksum",
"=",
"10",
"-",
"(",
"sum",
"(",
"[",
"int",
"(",
"c",
")",
"*",
"i",
"for",
"c",
",",
"i",
"in",
"zip",
"(",
"str",
"(",
"n",
")",
"[",
":",
":",
"-",
"1",
"]",
",",
"it",
".",
"cycle",
"(",
"(",
"7",
",",
"3",
",",
"1",
")",
")",
")",
"]",
")",
"%",
"10",
")",
"if",
"checksum",
"==",
"10",
":",
"checksum",
"=",
"0",
"return",
"\"%s%s\"",
"%",
"(",
"n",
",",
"checksum",
")"
] |
f57f44e7b0a1030786aafd6f387114abb546bb32
|
valid
|
iso_reference_valid_char
|
Helper to make sure the given character is valid for a reference number
|
holviapi/utils.py
|
def iso_reference_valid_char(c, raise_error=True):
"""Helper to make sure the given character is valid for a reference number"""
if c in ISO_REFERENCE_VALID:
return True
if raise_error:
raise ValueError("'%s' is not in '%s'" % (c, ISO_REFERENCE_VALID))
return False
|
def iso_reference_valid_char(c, raise_error=True):
"""Helper to make sure the given character is valid for a reference number"""
if c in ISO_REFERENCE_VALID:
return True
if raise_error:
raise ValueError("'%s' is not in '%s'" % (c, ISO_REFERENCE_VALID))
return False
|
[
"Helper",
"to",
"make",
"sure",
"the",
"given",
"character",
"is",
"valid",
"for",
"a",
"reference",
"number"
] |
rambo/python-holviapi
|
python
|
https://github.com/rambo/python-holviapi/blob/f57f44e7b0a1030786aafd6f387114abb546bb32/holviapi/utils.py#L167-L173
|
[
"def",
"iso_reference_valid_char",
"(",
"c",
",",
"raise_error",
"=",
"True",
")",
":",
"if",
"c",
"in",
"ISO_REFERENCE_VALID",
":",
"return",
"True",
"if",
"raise_error",
":",
"raise",
"ValueError",
"(",
"\"'%s' is not in '%s'\"",
"%",
"(",
"c",
",",
"ISO_REFERENCE_VALID",
")",
")",
"return",
"False"
] |
f57f44e7b0a1030786aafd6f387114abb546bb32
|
valid
|
iso_reference_str2int
|
Creates the huge number from ISO alphanumeric ISO reference
|
holviapi/utils.py
|
def iso_reference_str2int(n):
"""Creates the huge number from ISO alphanumeric ISO reference"""
n = n.upper()
numbers = []
for c in n:
iso_reference_valid_char(c)
if c in ISO_REFERENCE_VALID_NUMERIC:
numbers.append(c)
else:
numbers.append(str(iso_reference_char2int(c)))
return int(''.join(numbers))
|
def iso_reference_str2int(n):
"""Creates the huge number from ISO alphanumeric ISO reference"""
n = n.upper()
numbers = []
for c in n:
iso_reference_valid_char(c)
if c in ISO_REFERENCE_VALID_NUMERIC:
numbers.append(c)
else:
numbers.append(str(iso_reference_char2int(c)))
return int(''.join(numbers))
|
[
"Creates",
"the",
"huge",
"number",
"from",
"ISO",
"alphanumeric",
"ISO",
"reference"
] |
rambo/python-holviapi
|
python
|
https://github.com/rambo/python-holviapi/blob/f57f44e7b0a1030786aafd6f387114abb546bb32/holviapi/utils.py#L182-L192
|
[
"def",
"iso_reference_str2int",
"(",
"n",
")",
":",
"n",
"=",
"n",
".",
"upper",
"(",
")",
"numbers",
"=",
"[",
"]",
"for",
"c",
"in",
"n",
":",
"iso_reference_valid_char",
"(",
"c",
")",
"if",
"c",
"in",
"ISO_REFERENCE_VALID_NUMERIC",
":",
"numbers",
".",
"append",
"(",
"c",
")",
"else",
":",
"numbers",
".",
"append",
"(",
"str",
"(",
"iso_reference_char2int",
"(",
"c",
")",
")",
")",
"return",
"int",
"(",
"''",
".",
"join",
"(",
"numbers",
")",
")"
] |
f57f44e7b0a1030786aafd6f387114abb546bb32
|
valid
|
iso_reference_isvalid
|
Validates ISO reference number
|
holviapi/utils.py
|
def iso_reference_isvalid(ref):
"""Validates ISO reference number"""
ref = str(ref)
cs_source = ref[4:] + ref[:4]
return (iso_reference_str2int(cs_source) % 97) == 1
|
def iso_reference_isvalid(ref):
"""Validates ISO reference number"""
ref = str(ref)
cs_source = ref[4:] + ref[:4]
return (iso_reference_str2int(cs_source) % 97) == 1
|
[
"Validates",
"ISO",
"reference",
"number"
] |
rambo/python-holviapi
|
python
|
https://github.com/rambo/python-holviapi/blob/f57f44e7b0a1030786aafd6f387114abb546bb32/holviapi/utils.py#L209-L213
|
[
"def",
"iso_reference_isvalid",
"(",
"ref",
")",
":",
"ref",
"=",
"str",
"(",
"ref",
")",
"cs_source",
"=",
"ref",
"[",
"4",
":",
"]",
"+",
"ref",
"[",
":",
"4",
"]",
"return",
"(",
"iso_reference_str2int",
"(",
"cs_source",
")",
"%",
"97",
")",
"==",
"1"
] |
f57f44e7b0a1030786aafd6f387114abb546bb32
|
valid
|
barcode
|
Calculates virtual barcode for IBAN account number and ISO reference
Arguments:
iban {string} -- IBAN formed account number
reference {string} -- ISO 11649 creditor reference
amount {decimal.Decimal} -- Amount in euros, 0.01 - 999999.99
due {datetime.date} -- due date
|
holviapi/utils.py
|
def barcode(iban, reference, amount, due=None):
"""Calculates virtual barcode for IBAN account number and ISO reference
Arguments:
iban {string} -- IBAN formed account number
reference {string} -- ISO 11649 creditor reference
amount {decimal.Decimal} -- Amount in euros, 0.01 - 999999.99
due {datetime.date} -- due date
"""
iban = iban.replace(' ', '')
reference = reference.replace(' ', '')
if reference.startswith('RF'):
version = 5
else:
version = 4
if version == 5:
reference = reference[2:] # test RF and add 00 where needed
if len(reference) < 23:
reference = reference[:2] + ("0" * (23 - len(reference))) + reference[2:]
elif version == 4:
reference = reference.zfill(20)
if not iban.startswith('FI'):
raise BarcodeException('Barcodes can be printed only for IBANs starting with FI')
iban = iban[2:]
amount = "%08d" % (amount.quantize(Decimal('.01')).shift(2).to_integral_value())
if len(amount) != 8:
raise BarcodeException("Barcode payment amount must be less than 1000000.00")
if due:
due = due.strftime("%y%m%d")
else:
due = "000000"
if version == 4:
barcode = "%s%s%s000%s%s" % (version, iban, amount, reference, due)
elif version == 5:
barcode = "%s%s%s%s%s" % (version, iban, amount, reference, due)
return barcode
|
def barcode(iban, reference, amount, due=None):
"""Calculates virtual barcode for IBAN account number and ISO reference
Arguments:
iban {string} -- IBAN formed account number
reference {string} -- ISO 11649 creditor reference
amount {decimal.Decimal} -- Amount in euros, 0.01 - 999999.99
due {datetime.date} -- due date
"""
iban = iban.replace(' ', '')
reference = reference.replace(' ', '')
if reference.startswith('RF'):
version = 5
else:
version = 4
if version == 5:
reference = reference[2:] # test RF and add 00 where needed
if len(reference) < 23:
reference = reference[:2] + ("0" * (23 - len(reference))) + reference[2:]
elif version == 4:
reference = reference.zfill(20)
if not iban.startswith('FI'):
raise BarcodeException('Barcodes can be printed only for IBANs starting with FI')
iban = iban[2:]
amount = "%08d" % (amount.quantize(Decimal('.01')).shift(2).to_integral_value())
if len(amount) != 8:
raise BarcodeException("Barcode payment amount must be less than 1000000.00")
if due:
due = due.strftime("%y%m%d")
else:
due = "000000"
if version == 4:
barcode = "%s%s%s000%s%s" % (version, iban, amount, reference, due)
elif version == 5:
barcode = "%s%s%s%s%s" % (version, iban, amount, reference, due)
return barcode
|
[
"Calculates",
"virtual",
"barcode",
"for",
"IBAN",
"account",
"number",
"and",
"ISO",
"reference"
] |
rambo/python-holviapi
|
python
|
https://github.com/rambo/python-holviapi/blob/f57f44e7b0a1030786aafd6f387114abb546bb32/holviapi/utils.py#L220-L263
|
[
"def",
"barcode",
"(",
"iban",
",",
"reference",
",",
"amount",
",",
"due",
"=",
"None",
")",
":",
"iban",
"=",
"iban",
".",
"replace",
"(",
"' '",
",",
"''",
")",
"reference",
"=",
"reference",
".",
"replace",
"(",
"' '",
",",
"''",
")",
"if",
"reference",
".",
"startswith",
"(",
"'RF'",
")",
":",
"version",
"=",
"5",
"else",
":",
"version",
"=",
"4",
"if",
"version",
"==",
"5",
":",
"reference",
"=",
"reference",
"[",
"2",
":",
"]",
"# test RF and add 00 where needed",
"if",
"len",
"(",
"reference",
")",
"<",
"23",
":",
"reference",
"=",
"reference",
"[",
":",
"2",
"]",
"+",
"(",
"\"0\"",
"*",
"(",
"23",
"-",
"len",
"(",
"reference",
")",
")",
")",
"+",
"reference",
"[",
"2",
":",
"]",
"elif",
"version",
"==",
"4",
":",
"reference",
"=",
"reference",
".",
"zfill",
"(",
"20",
")",
"if",
"not",
"iban",
".",
"startswith",
"(",
"'FI'",
")",
":",
"raise",
"BarcodeException",
"(",
"'Barcodes can be printed only for IBANs starting with FI'",
")",
"iban",
"=",
"iban",
"[",
"2",
":",
"]",
"amount",
"=",
"\"%08d\"",
"%",
"(",
"amount",
".",
"quantize",
"(",
"Decimal",
"(",
"'.01'",
")",
")",
".",
"shift",
"(",
"2",
")",
".",
"to_integral_value",
"(",
")",
")",
"if",
"len",
"(",
"amount",
")",
"!=",
"8",
":",
"raise",
"BarcodeException",
"(",
"\"Barcode payment amount must be less than 1000000.00\"",
")",
"if",
"due",
":",
"due",
"=",
"due",
".",
"strftime",
"(",
"\"%y%m%d\"",
")",
"else",
":",
"due",
"=",
"\"000000\"",
"if",
"version",
"==",
"4",
":",
"barcode",
"=",
"\"%s%s%s000%s%s\"",
"%",
"(",
"version",
",",
"iban",
",",
"amount",
",",
"reference",
",",
"due",
")",
"elif",
"version",
"==",
"5",
":",
"barcode",
"=",
"\"%s%s%s%s%s\"",
"%",
"(",
"version",
",",
"iban",
",",
"amount",
",",
"reference",
",",
"due",
")",
"return",
"barcode"
] |
f57f44e7b0a1030786aafd6f387114abb546bb32
|
valid
|
add_file_normal
|
Add a normal file including its source
|
dgitcore/datasets/files.py
|
def add_file_normal(f, targetdir, generator,script, source):
"""
Add a normal file including its source
"""
basename = os.path.basename(f)
if targetdir != ".":
relativepath = os.path.join(targetdir, basename)
else:
relativepath = basename
relpath = os.path.relpath(f, os.getcwd())
filetype = 'data'
if script:
filetype = 'script'
if generator:
filetype = 'generator'
update = OrderedDict([
('type', filetype),
('generator', generator),
('relativepath', relativepath),
('content', ""),
('source', source),
('localfullpath', f),
('localrelativepath', relpath)
])
update = annotate_record(update)
return (basename, update)
|
def add_file_normal(f, targetdir, generator,script, source):
"""
Add a normal file including its source
"""
basename = os.path.basename(f)
if targetdir != ".":
relativepath = os.path.join(targetdir, basename)
else:
relativepath = basename
relpath = os.path.relpath(f, os.getcwd())
filetype = 'data'
if script:
filetype = 'script'
if generator:
filetype = 'generator'
update = OrderedDict([
('type', filetype),
('generator', generator),
('relativepath', relativepath),
('content', ""),
('source', source),
('localfullpath', f),
('localrelativepath', relpath)
])
update = annotate_record(update)
return (basename, update)
|
[
"Add",
"a",
"normal",
"file",
"including",
"its",
"source"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/files.py#L66-L96
|
[
"def",
"add_file_normal",
"(",
"f",
",",
"targetdir",
",",
"generator",
",",
"script",
",",
"source",
")",
":",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
"if",
"targetdir",
"!=",
"\".\"",
":",
"relativepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"targetdir",
",",
"basename",
")",
"else",
":",
"relativepath",
"=",
"basename",
"relpath",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"f",
",",
"os",
".",
"getcwd",
"(",
")",
")",
"filetype",
"=",
"'data'",
"if",
"script",
":",
"filetype",
"=",
"'script'",
"if",
"generator",
":",
"filetype",
"=",
"'generator'",
"update",
"=",
"OrderedDict",
"(",
"[",
"(",
"'type'",
",",
"filetype",
")",
",",
"(",
"'generator'",
",",
"generator",
")",
",",
"(",
"'relativepath'",
",",
"relativepath",
")",
",",
"(",
"'content'",
",",
"\"\"",
")",
",",
"(",
"'source'",
",",
"source",
")",
",",
"(",
"'localfullpath'",
",",
"f",
")",
",",
"(",
"'localrelativepath'",
",",
"relpath",
")",
"]",
")",
"update",
"=",
"annotate_record",
"(",
"update",
")",
"return",
"(",
"basename",
",",
"update",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
extract_files
|
Extract the files to be added based on the includes
|
dgitcore/datasets/files.py
|
def extract_files(filename, includes):
"""
Extract the files to be added based on the includes
"""
# Load the execution strace log
lines = open(filename).readlines()
# Extract only open files - whether for read or write. You often
# want to capture the json/ini configuration file as well
files = {}
lines = [l.strip() for l in lines if 'open(' in l]
for l in lines:
# Check both these formats...
# 20826 open("/usr/lib/locale/locale-archive", O_RDONLY|O_CLOEXEC) = 3
#[28940] access(b'/etc/ld.so.nohwcap', F_OK) = -2 (No such file or directory)
matchedfile = re.search('open\([b]["\'](.+?)["\']', l)
if matchedfile is None:
matchedfile = re.search('open\("(.+?)\"', l)
if matchedfile is None:
continue
matchedfile = matchedfile.group(1)
if os.path.exists(matchedfile) and os.path.isfile(matchedfile):
#print("Looking at ", matchedfile)
# Check what action is being performed on these
action = 'input' if 'O_RDONLY' in l else 'output'
matchedfile = os.path.relpath(matchedfile, ".")
#print("Matched file's relative path", matchedfile)
for i in includes:
if fnmatch.fnmatch(matchedfile, i):
# Exclude python libraries
if 'site-packages' in matchedfile:
continue
if matchedfile not in files:
files[matchedfile] = [action]
else:
if action not in files[matchedfile]:
files[matchedfile].append(action)
# A single file may be opened and closed multiple times
if len(files) == 0:
print("No input or output files found that match pattern")
return []
print('We captured files that matched the pattern you specified.')
print('Please select files to keep (press ENTER)')
# Let the user have the final say on which files must be included.
filenames = list(files.keys())
filenames.sort()
with tempfile.NamedTemporaryFile(suffix=".tmp") as temp:
temp.write(yaml.dump(filenames, default_flow_style=False).encode('utf-8'))
temp.flush()
EDITOR = os.environ.get('EDITOR','/usr/bin/vi')
subprocess.call("%s %s" %(EDITOR,temp.name), shell=True)
temp.seek(0)
data = temp.read()
selected = yaml.load(data)
print("You selected", len(selected), "file(s)")
if len(selected) == 0:
return []
# Get the action corresponding to the selected files
filenames = [f for f in filenames if f in selected]
# Now we know the list of files. Where should they go?
print('Please select target locations for the various directories we found')
print('Please make sure you do not delete any rows or edit the keys.')
input('(press ENTER)')
prefixes = {}
for f in filenames:
dirname = os.path.dirname(f)
if dirname == "":
dirname = "."
prefixes[dirname] = dirname
while True:
with tempfile.NamedTemporaryFile(suffix=".tmp") as temp:
temp.write(yaml.dump(prefixes, default_flow_style=False).encode('utf-8'))
temp.flush()
EDITOR = os.environ.get('EDITOR','/usr/bin/vi')
subprocess.call("%s %s" %(EDITOR,temp.name), shell=True)
temp.seek(0)
data = temp.read()
try:
revised = yaml.load(data)
except Exception as e:
revised = {}
#print(list(revised.keys()))
#print(list(prefixes.keys()))
if set(list(revised.keys())) == set(list(prefixes.keys())):
prefixes = revised
break
else:
print("Could not process edited file. Either some rows are missing or entry has YAML syntax errors")
input("Press ENTER to continue")
# Add the root directory back
if "." in prefixes:
prefixes[""] = prefixes["."]
result = []
ts = datetime.now().isoformat()
for f in filenames:
relativepath = prefixes[os.path.dirname(f)]
if relativepath == ".":
relativepath = os.path.basename(f)
else:
relativepath = os.path.join(relativepath, os.path.basename(f))
result.append(OrderedDict([
('relativepath', relativepath),
('type', 'run-output'),
('actions', files[f]),
('mimetypes', mimetypes.guess_type(f)[0]),
('content', open(f).read(512)),
('sha256', compute_sha256(f)),
('ts', ts),
('localrelativepath', os.path.relpath(f, ".")),
('localfullpath', os.path.abspath(f)),
]))
print(json.dumps(result, indent=4))
return result
|
def extract_files(filename, includes):
"""
Extract the files to be added based on the includes
"""
# Load the execution strace log
lines = open(filename).readlines()
# Extract only open files - whether for read or write. You often
# want to capture the json/ini configuration file as well
files = {}
lines = [l.strip() for l in lines if 'open(' in l]
for l in lines:
# Check both these formats...
# 20826 open("/usr/lib/locale/locale-archive", O_RDONLY|O_CLOEXEC) = 3
#[28940] access(b'/etc/ld.so.nohwcap', F_OK) = -2 (No such file or directory)
matchedfile = re.search('open\([b]["\'](.+?)["\']', l)
if matchedfile is None:
matchedfile = re.search('open\("(.+?)\"', l)
if matchedfile is None:
continue
matchedfile = matchedfile.group(1)
if os.path.exists(matchedfile) and os.path.isfile(matchedfile):
#print("Looking at ", matchedfile)
# Check what action is being performed on these
action = 'input' if 'O_RDONLY' in l else 'output'
matchedfile = os.path.relpath(matchedfile, ".")
#print("Matched file's relative path", matchedfile)
for i in includes:
if fnmatch.fnmatch(matchedfile, i):
# Exclude python libraries
if 'site-packages' in matchedfile:
continue
if matchedfile not in files:
files[matchedfile] = [action]
else:
if action not in files[matchedfile]:
files[matchedfile].append(action)
# A single file may be opened and closed multiple times
if len(files) == 0:
print("No input or output files found that match pattern")
return []
print('We captured files that matched the pattern you specified.')
print('Please select files to keep (press ENTER)')
# Let the user have the final say on which files must be included.
filenames = list(files.keys())
filenames.sort()
with tempfile.NamedTemporaryFile(suffix=".tmp") as temp:
temp.write(yaml.dump(filenames, default_flow_style=False).encode('utf-8'))
temp.flush()
EDITOR = os.environ.get('EDITOR','/usr/bin/vi')
subprocess.call("%s %s" %(EDITOR,temp.name), shell=True)
temp.seek(0)
data = temp.read()
selected = yaml.load(data)
print("You selected", len(selected), "file(s)")
if len(selected) == 0:
return []
# Get the action corresponding to the selected files
filenames = [f for f in filenames if f in selected]
# Now we know the list of files. Where should they go?
print('Please select target locations for the various directories we found')
print('Please make sure you do not delete any rows or edit the keys.')
input('(press ENTER)')
prefixes = {}
for f in filenames:
dirname = os.path.dirname(f)
if dirname == "":
dirname = "."
prefixes[dirname] = dirname
while True:
with tempfile.NamedTemporaryFile(suffix=".tmp") as temp:
temp.write(yaml.dump(prefixes, default_flow_style=False).encode('utf-8'))
temp.flush()
EDITOR = os.environ.get('EDITOR','/usr/bin/vi')
subprocess.call("%s %s" %(EDITOR,temp.name), shell=True)
temp.seek(0)
data = temp.read()
try:
revised = yaml.load(data)
except Exception as e:
revised = {}
#print(list(revised.keys()))
#print(list(prefixes.keys()))
if set(list(revised.keys())) == set(list(prefixes.keys())):
prefixes = revised
break
else:
print("Could not process edited file. Either some rows are missing or entry has YAML syntax errors")
input("Press ENTER to continue")
# Add the root directory back
if "." in prefixes:
prefixes[""] = prefixes["."]
result = []
ts = datetime.now().isoformat()
for f in filenames:
relativepath = prefixes[os.path.dirname(f)]
if relativepath == ".":
relativepath = os.path.basename(f)
else:
relativepath = os.path.join(relativepath, os.path.basename(f))
result.append(OrderedDict([
('relativepath', relativepath),
('type', 'run-output'),
('actions', files[f]),
('mimetypes', mimetypes.guess_type(f)[0]),
('content', open(f).read(512)),
('sha256', compute_sha256(f)),
('ts', ts),
('localrelativepath', os.path.relpath(f, ".")),
('localfullpath', os.path.abspath(f)),
]))
print(json.dumps(result, indent=4))
return result
|
[
"Extract",
"the",
"files",
"to",
"be",
"added",
"based",
"on",
"the",
"includes"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/files.py#L132-L267
|
[
"def",
"extract_files",
"(",
"filename",
",",
"includes",
")",
":",
"# Load the execution strace log",
"lines",
"=",
"open",
"(",
"filename",
")",
".",
"readlines",
"(",
")",
"# Extract only open files - whether for read or write. You often",
"# want to capture the json/ini configuration file as well",
"files",
"=",
"{",
"}",
"lines",
"=",
"[",
"l",
".",
"strip",
"(",
")",
"for",
"l",
"in",
"lines",
"if",
"'open('",
"in",
"l",
"]",
"for",
"l",
"in",
"lines",
":",
"# Check both these formats...",
"# 20826 open(\"/usr/lib/locale/locale-archive\", O_RDONLY|O_CLOEXEC) = 3",
"#[28940] access(b'/etc/ld.so.nohwcap', F_OK) = -2 (No such file or directory)",
"matchedfile",
"=",
"re",
".",
"search",
"(",
"'open\\([b][\"\\'](.+?)[\"\\']'",
",",
"l",
")",
"if",
"matchedfile",
"is",
"None",
":",
"matchedfile",
"=",
"re",
".",
"search",
"(",
"'open\\(\"(.+?)\\\"'",
",",
"l",
")",
"if",
"matchedfile",
"is",
"None",
":",
"continue",
"matchedfile",
"=",
"matchedfile",
".",
"group",
"(",
"1",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"matchedfile",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"matchedfile",
")",
":",
"#print(\"Looking at \", matchedfile)",
"# Check what action is being performed on these",
"action",
"=",
"'input'",
"if",
"'O_RDONLY'",
"in",
"l",
"else",
"'output'",
"matchedfile",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"matchedfile",
",",
"\".\"",
")",
"#print(\"Matched file's relative path\", matchedfile)",
"for",
"i",
"in",
"includes",
":",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"matchedfile",
",",
"i",
")",
":",
"# Exclude python libraries",
"if",
"'site-packages'",
"in",
"matchedfile",
":",
"continue",
"if",
"matchedfile",
"not",
"in",
"files",
":",
"files",
"[",
"matchedfile",
"]",
"=",
"[",
"action",
"]",
"else",
":",
"if",
"action",
"not",
"in",
"files",
"[",
"matchedfile",
"]",
":",
"files",
"[",
"matchedfile",
"]",
".",
"append",
"(",
"action",
")",
"# A single file may be opened and closed multiple times",
"if",
"len",
"(",
"files",
")",
"==",
"0",
":",
"print",
"(",
"\"No input or output files found that match pattern\"",
")",
"return",
"[",
"]",
"print",
"(",
"'We captured files that matched the pattern you specified.'",
")",
"print",
"(",
"'Please select files to keep (press ENTER)'",
")",
"# Let the user have the final say on which files must be included.",
"filenames",
"=",
"list",
"(",
"files",
".",
"keys",
"(",
")",
")",
"filenames",
".",
"sort",
"(",
")",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"suffix",
"=",
"\".tmp\"",
")",
"as",
"temp",
":",
"temp",
".",
"write",
"(",
"yaml",
".",
"dump",
"(",
"filenames",
",",
"default_flow_style",
"=",
"False",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"temp",
".",
"flush",
"(",
")",
"EDITOR",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'EDITOR'",
",",
"'/usr/bin/vi'",
")",
"subprocess",
".",
"call",
"(",
"\"%s %s\"",
"%",
"(",
"EDITOR",
",",
"temp",
".",
"name",
")",
",",
"shell",
"=",
"True",
")",
"temp",
".",
"seek",
"(",
"0",
")",
"data",
"=",
"temp",
".",
"read",
"(",
")",
"selected",
"=",
"yaml",
".",
"load",
"(",
"data",
")",
"print",
"(",
"\"You selected\"",
",",
"len",
"(",
"selected",
")",
",",
"\"file(s)\"",
")",
"if",
"len",
"(",
"selected",
")",
"==",
"0",
":",
"return",
"[",
"]",
"# Get the action corresponding to the selected files",
"filenames",
"=",
"[",
"f",
"for",
"f",
"in",
"filenames",
"if",
"f",
"in",
"selected",
"]",
"# Now we know the list of files. Where should they go?",
"print",
"(",
"'Please select target locations for the various directories we found'",
")",
"print",
"(",
"'Please make sure you do not delete any rows or edit the keys.'",
")",
"input",
"(",
"'(press ENTER)'",
")",
"prefixes",
"=",
"{",
"}",
"for",
"f",
"in",
"filenames",
":",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"f",
")",
"if",
"dirname",
"==",
"\"\"",
":",
"dirname",
"=",
"\".\"",
"prefixes",
"[",
"dirname",
"]",
"=",
"dirname",
"while",
"True",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"suffix",
"=",
"\".tmp\"",
")",
"as",
"temp",
":",
"temp",
".",
"write",
"(",
"yaml",
".",
"dump",
"(",
"prefixes",
",",
"default_flow_style",
"=",
"False",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"temp",
".",
"flush",
"(",
")",
"EDITOR",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'EDITOR'",
",",
"'/usr/bin/vi'",
")",
"subprocess",
".",
"call",
"(",
"\"%s %s\"",
"%",
"(",
"EDITOR",
",",
"temp",
".",
"name",
")",
",",
"shell",
"=",
"True",
")",
"temp",
".",
"seek",
"(",
"0",
")",
"data",
"=",
"temp",
".",
"read",
"(",
")",
"try",
":",
"revised",
"=",
"yaml",
".",
"load",
"(",
"data",
")",
"except",
"Exception",
"as",
"e",
":",
"revised",
"=",
"{",
"}",
"#print(list(revised.keys()))",
"#print(list(prefixes.keys()))",
"if",
"set",
"(",
"list",
"(",
"revised",
".",
"keys",
"(",
")",
")",
")",
"==",
"set",
"(",
"list",
"(",
"prefixes",
".",
"keys",
"(",
")",
")",
")",
":",
"prefixes",
"=",
"revised",
"break",
"else",
":",
"print",
"(",
"\"Could not process edited file. Either some rows are missing or entry has YAML syntax errors\"",
")",
"input",
"(",
"\"Press ENTER to continue\"",
")",
"# Add the root directory back",
"if",
"\".\"",
"in",
"prefixes",
":",
"prefixes",
"[",
"\"\"",
"]",
"=",
"prefixes",
"[",
"\".\"",
"]",
"result",
"=",
"[",
"]",
"ts",
"=",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
"for",
"f",
"in",
"filenames",
":",
"relativepath",
"=",
"prefixes",
"[",
"os",
".",
"path",
".",
"dirname",
"(",
"f",
")",
"]",
"if",
"relativepath",
"==",
"\".\"",
":",
"relativepath",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
"else",
":",
"relativepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"relativepath",
",",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
")",
"result",
".",
"append",
"(",
"OrderedDict",
"(",
"[",
"(",
"'relativepath'",
",",
"relativepath",
")",
",",
"(",
"'type'",
",",
"'run-output'",
")",
",",
"(",
"'actions'",
",",
"files",
"[",
"f",
"]",
")",
",",
"(",
"'mimetypes'",
",",
"mimetypes",
".",
"guess_type",
"(",
"f",
")",
"[",
"0",
"]",
")",
",",
"(",
"'content'",
",",
"open",
"(",
"f",
")",
".",
"read",
"(",
"512",
")",
")",
",",
"(",
"'sha256'",
",",
"compute_sha256",
"(",
"f",
")",
")",
",",
"(",
"'ts'",
",",
"ts",
")",
",",
"(",
"'localrelativepath'",
",",
"os",
".",
"path",
".",
"relpath",
"(",
"f",
",",
"\".\"",
")",
")",
",",
"(",
"'localfullpath'",
",",
"os",
".",
"path",
".",
"abspath",
"(",
"f",
")",
")",
",",
"]",
")",
")",
"print",
"(",
"json",
".",
"dumps",
"(",
"result",
",",
"indent",
"=",
"4",
")",
")",
"return",
"result"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
run_executable
|
Run the executable and capture the input and output...
|
dgitcore/datasets/files.py
|
def run_executable(repo, args, includes):
"""
Run the executable and capture the input and output...
"""
# Get platform information
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
platform_metadata = repomgr.get_metadata()
print("Obtaining Commit Information")
(executable, commiturl) = \
find_executable_commitpath(repo, args)
# Create a local directory
tmpdir = tempfile.mkdtemp()
# Construct the strace command
print("Running the command")
strace_filename = os.path.join(tmpdir,'strace.out.txt')
cmd = ["strace.py", "-f", "-o", strace_filename,
"-s", "1024", "-q", "--"] + args
# Run the command
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
# Capture the stdout/stderr
stdout = os.path.join(tmpdir, 'stdout.log.txt')
with open(stdout, 'w') as fd:
fd.write(out.decode('utf-8'))
stderr = os.path.join(tmpdir, 'stderr.log.txt')
with open(stderr, 'w') as fd:
fd.write(err.decode('utf-8'))
# Check the strace output
files = extract_files(strace_filename, includes)
# Now insert the execution metadata
execution_metadata = {
'likelyexecutable': executable,
'commitpath': commiturl,
'args': args,
}
execution_metadata.update(platform_metadata)
for i in range(len(files)):
files[i]['execution_metadata'] = execution_metadata
return files
|
def run_executable(repo, args, includes):
"""
Run the executable and capture the input and output...
"""
# Get platform information
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
platform_metadata = repomgr.get_metadata()
print("Obtaining Commit Information")
(executable, commiturl) = \
find_executable_commitpath(repo, args)
# Create a local directory
tmpdir = tempfile.mkdtemp()
# Construct the strace command
print("Running the command")
strace_filename = os.path.join(tmpdir,'strace.out.txt')
cmd = ["strace.py", "-f", "-o", strace_filename,
"-s", "1024", "-q", "--"] + args
# Run the command
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
# Capture the stdout/stderr
stdout = os.path.join(tmpdir, 'stdout.log.txt')
with open(stdout, 'w') as fd:
fd.write(out.decode('utf-8'))
stderr = os.path.join(tmpdir, 'stderr.log.txt')
with open(stderr, 'w') as fd:
fd.write(err.decode('utf-8'))
# Check the strace output
files = extract_files(strace_filename, includes)
# Now insert the execution metadata
execution_metadata = {
'likelyexecutable': executable,
'commitpath': commiturl,
'args': args,
}
execution_metadata.update(platform_metadata)
for i in range(len(files)):
files[i]['execution_metadata'] = execution_metadata
return files
|
[
"Run",
"the",
"executable",
"and",
"capture",
"the",
"input",
"and",
"output",
"..."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/files.py#L290-L344
|
[
"def",
"run_executable",
"(",
"repo",
",",
"args",
",",
"includes",
")",
":",
"# Get platform information",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'instrumentation'",
",",
"name",
"=",
"'platform'",
")",
"platform_metadata",
"=",
"repomgr",
".",
"get_metadata",
"(",
")",
"print",
"(",
"\"Obtaining Commit Information\"",
")",
"(",
"executable",
",",
"commiturl",
")",
"=",
"find_executable_commitpath",
"(",
"repo",
",",
"args",
")",
"# Create a local directory",
"tmpdir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"# Construct the strace command",
"print",
"(",
"\"Running the command\"",
")",
"strace_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'strace.out.txt'",
")",
"cmd",
"=",
"[",
"\"strace.py\"",
",",
"\"-f\"",
",",
"\"-o\"",
",",
"strace_filename",
",",
"\"-s\"",
",",
"\"1024\"",
",",
"\"-q\"",
",",
"\"--\"",
"]",
"+",
"args",
"# Run the command",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"out",
",",
"err",
"=",
"p",
".",
"communicate",
"(",
")",
"# Capture the stdout/stderr",
"stdout",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'stdout.log.txt'",
")",
"with",
"open",
"(",
"stdout",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"out",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"stderr",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'stderr.log.txt'",
")",
"with",
"open",
"(",
"stderr",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"err",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"# Check the strace output",
"files",
"=",
"extract_files",
"(",
"strace_filename",
",",
"includes",
")",
"# Now insert the execution metadata",
"execution_metadata",
"=",
"{",
"'likelyexecutable'",
":",
"executable",
",",
"'commitpath'",
":",
"commiturl",
",",
"'args'",
":",
"args",
",",
"}",
"execution_metadata",
".",
"update",
"(",
"platform_metadata",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"files",
")",
")",
":",
"files",
"[",
"i",
"]",
"[",
"'execution_metadata'",
"]",
"=",
"execution_metadata",
"return",
"files"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
add
|
Add files to the repository by explicitly specifying them or by
specifying a pattern over files accessed during execution of an
executable.
Parameters
----------
repo: Repository
args: files or command line
(a) If simply adding files, then the list of files that must
be added (including any additional arguments to be passed to
git
(b) If files to be added are an output of a command line, then
args is the command lined
targetdir: Target directory to store the files
execute: Args are not files to be added but scripts that must be run.
includes: patterns used to select files to
script: Is this a script?
generator: Is this a generator
source: Link to the original source of the data
|
dgitcore/datasets/files.py
|
def add(repo, args, targetdir,
execute=False, generator=False,
includes=[], script=False,
source=None):
"""
Add files to the repository by explicitly specifying them or by
specifying a pattern over files accessed during execution of an
executable.
Parameters
----------
repo: Repository
args: files or command line
(a) If simply adding files, then the list of files that must
be added (including any additional arguments to be passed to
git
(b) If files to be added are an output of a command line, then
args is the command lined
targetdir: Target directory to store the files
execute: Args are not files to be added but scripts that must be run.
includes: patterns used to select files to
script: Is this a script?
generator: Is this a generator
source: Link to the original source of the data
"""
# Gather the files...
if not execute:
files = add_files(args=args,
targetdir=targetdir,
source=source,
script=script,
generator=generator)
else:
files = run_executable(repo, args, includes)
if files is None or len(files) == 0:
return repo
# Update the repo package but with only those that have changed.
filtered_files = []
package = repo.package
for h in files:
found = False
for i, r in enumerate(package['resources']):
if h['relativepath'] == r['relativepath']:
found = True
if h['sha256'] == r['sha256']:
change = False
for attr in ['source']:
if h[attr] != r[attr]:
r[attr] = h[attr]
change = True
if change:
filtered_files.append(h)
continue
else:
filtered_files.append(h)
package['resources'][i] = h
break
if not found:
filtered_files.append(h)
package['resources'].append(h)
if len(filtered_files) == 0:
return 0
# Copy the files
repo.manager.add_files(repo, filtered_files)
# Write to disk...
rootdir = repo.rootdir
with cd(rootdir):
datapath = "datapackage.json"
with open(datapath, 'w') as fd:
fd.write(json.dumps(package, indent=4))
return len(filtered_files)
|
def add(repo, args, targetdir,
execute=False, generator=False,
includes=[], script=False,
source=None):
"""
Add files to the repository by explicitly specifying them or by
specifying a pattern over files accessed during execution of an
executable.
Parameters
----------
repo: Repository
args: files or command line
(a) If simply adding files, then the list of files that must
be added (including any additional arguments to be passed to
git
(b) If files to be added are an output of a command line, then
args is the command lined
targetdir: Target directory to store the files
execute: Args are not files to be added but scripts that must be run.
includes: patterns used to select files to
script: Is this a script?
generator: Is this a generator
source: Link to the original source of the data
"""
# Gather the files...
if not execute:
files = add_files(args=args,
targetdir=targetdir,
source=source,
script=script,
generator=generator)
else:
files = run_executable(repo, args, includes)
if files is None or len(files) == 0:
return repo
# Update the repo package but with only those that have changed.
filtered_files = []
package = repo.package
for h in files:
found = False
for i, r in enumerate(package['resources']):
if h['relativepath'] == r['relativepath']:
found = True
if h['sha256'] == r['sha256']:
change = False
for attr in ['source']:
if h[attr] != r[attr]:
r[attr] = h[attr]
change = True
if change:
filtered_files.append(h)
continue
else:
filtered_files.append(h)
package['resources'][i] = h
break
if not found:
filtered_files.append(h)
package['resources'].append(h)
if len(filtered_files) == 0:
return 0
# Copy the files
repo.manager.add_files(repo, filtered_files)
# Write to disk...
rootdir = repo.rootdir
with cd(rootdir):
datapath = "datapackage.json"
with open(datapath, 'w') as fd:
fd.write(json.dumps(package, indent=4))
return len(filtered_files)
|
[
"Add",
"files",
"to",
"the",
"repository",
"by",
"explicitly",
"specifying",
"them",
"or",
"by",
"specifying",
"a",
"pattern",
"over",
"files",
"accessed",
"during",
"execution",
"of",
"an",
"executable",
"."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/files.py#L349-L431
|
[
"def",
"add",
"(",
"repo",
",",
"args",
",",
"targetdir",
",",
"execute",
"=",
"False",
",",
"generator",
"=",
"False",
",",
"includes",
"=",
"[",
"]",
",",
"script",
"=",
"False",
",",
"source",
"=",
"None",
")",
":",
"# Gather the files...",
"if",
"not",
"execute",
":",
"files",
"=",
"add_files",
"(",
"args",
"=",
"args",
",",
"targetdir",
"=",
"targetdir",
",",
"source",
"=",
"source",
",",
"script",
"=",
"script",
",",
"generator",
"=",
"generator",
")",
"else",
":",
"files",
"=",
"run_executable",
"(",
"repo",
",",
"args",
",",
"includes",
")",
"if",
"files",
"is",
"None",
"or",
"len",
"(",
"files",
")",
"==",
"0",
":",
"return",
"repo",
"# Update the repo package but with only those that have changed.",
"filtered_files",
"=",
"[",
"]",
"package",
"=",
"repo",
".",
"package",
"for",
"h",
"in",
"files",
":",
"found",
"=",
"False",
"for",
"i",
",",
"r",
"in",
"enumerate",
"(",
"package",
"[",
"'resources'",
"]",
")",
":",
"if",
"h",
"[",
"'relativepath'",
"]",
"==",
"r",
"[",
"'relativepath'",
"]",
":",
"found",
"=",
"True",
"if",
"h",
"[",
"'sha256'",
"]",
"==",
"r",
"[",
"'sha256'",
"]",
":",
"change",
"=",
"False",
"for",
"attr",
"in",
"[",
"'source'",
"]",
":",
"if",
"h",
"[",
"attr",
"]",
"!=",
"r",
"[",
"attr",
"]",
":",
"r",
"[",
"attr",
"]",
"=",
"h",
"[",
"attr",
"]",
"change",
"=",
"True",
"if",
"change",
":",
"filtered_files",
".",
"append",
"(",
"h",
")",
"continue",
"else",
":",
"filtered_files",
".",
"append",
"(",
"h",
")",
"package",
"[",
"'resources'",
"]",
"[",
"i",
"]",
"=",
"h",
"break",
"if",
"not",
"found",
":",
"filtered_files",
".",
"append",
"(",
"h",
")",
"package",
"[",
"'resources'",
"]",
".",
"append",
"(",
"h",
")",
"if",
"len",
"(",
"filtered_files",
")",
"==",
"0",
":",
"return",
"0",
"# Copy the files",
"repo",
".",
"manager",
".",
"add_files",
"(",
"repo",
",",
"filtered_files",
")",
"# Write to disk...",
"rootdir",
"=",
"repo",
".",
"rootdir",
"with",
"cd",
"(",
"rootdir",
")",
":",
"datapath",
"=",
"\"datapackage.json\"",
"with",
"open",
"(",
"datapath",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"package",
",",
"indent",
"=",
"4",
")",
")",
"return",
"len",
"(",
"filtered_files",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
Repo.find_matching_files
|
For various actions we need files that match patterns
|
dgitcore/plugins/repomanager.py
|
def find_matching_files(self, includes):
"""
For various actions we need files that match patterns
"""
if len(includes) == 0:
return []
files = [f['relativepath'] for f in self.package['resources']]
includes = r'|'.join([fnmatch.translate(x) for x in includes])
# Match both the file name as well the path..
files = [f for f in files if re.match(includes, os.path.basename(f))] + \
[f for f in files if re.match(includes, f)]
files = list(set(files))
return files
|
def find_matching_files(self, includes):
"""
For various actions we need files that match patterns
"""
if len(includes) == 0:
return []
files = [f['relativepath'] for f in self.package['resources']]
includes = r'|'.join([fnmatch.translate(x) for x in includes])
# Match both the file name as well the path..
files = [f for f in files if re.match(includes, os.path.basename(f))] + \
[f for f in files if re.match(includes, f)]
files = list(set(files))
return files
|
[
"For",
"various",
"actions",
"we",
"need",
"files",
"that",
"match",
"patterns"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L26-L42
|
[
"def",
"find_matching_files",
"(",
"self",
",",
"includes",
")",
":",
"if",
"len",
"(",
"includes",
")",
"==",
"0",
":",
"return",
"[",
"]",
"files",
"=",
"[",
"f",
"[",
"'relativepath'",
"]",
"for",
"f",
"in",
"self",
".",
"package",
"[",
"'resources'",
"]",
"]",
"includes",
"=",
"r'|'",
".",
"join",
"(",
"[",
"fnmatch",
".",
"translate",
"(",
"x",
")",
"for",
"x",
"in",
"includes",
"]",
")",
"# Match both the file name as well the path..",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"re",
".",
"match",
"(",
"includes",
",",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
")",
"]",
"+",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"re",
".",
"match",
"(",
"includes",
",",
"f",
")",
"]",
"files",
"=",
"list",
"(",
"set",
"(",
"files",
")",
")",
"return",
"files"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
Repo.run
|
Run a specific command using the manager
|
dgitcore/plugins/repomanager.py
|
def run(self, cmd, *args):
"""
Run a specific command using the manager
"""
if self.manager is None:
raise Exception("Fatal internal error: Missing repository manager")
if cmd not in dir(self.manager):
raise Exception("Fatal internal error: Invalid command {} being run".format(cmd))
func = getattr(self.manager, cmd)
repo = self
return func(repo, *args)
|
def run(self, cmd, *args):
"""
Run a specific command using the manager
"""
if self.manager is None:
raise Exception("Fatal internal error: Missing repository manager")
if cmd not in dir(self.manager):
raise Exception("Fatal internal error: Invalid command {} being run".format(cmd))
func = getattr(self.manager, cmd)
repo = self
return func(repo, *args)
|
[
"Run",
"a",
"specific",
"command",
"using",
"the",
"manager"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L81-L91
|
[
"def",
"run",
"(",
"self",
",",
"cmd",
",",
"*",
"args",
")",
":",
"if",
"self",
".",
"manager",
"is",
"None",
":",
"raise",
"Exception",
"(",
"\"Fatal internal error: Missing repository manager\"",
")",
"if",
"cmd",
"not",
"in",
"dir",
"(",
"self",
".",
"manager",
")",
":",
"raise",
"Exception",
"(",
"\"Fatal internal error: Invalid command {} being run\"",
".",
"format",
"(",
"cmd",
")",
")",
"func",
"=",
"getattr",
"(",
"self",
".",
"manager",
",",
"cmd",
")",
"repo",
"=",
"self",
"return",
"func",
"(",
"repo",
",",
"*",
"args",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
Repo.get_resource
|
Get metadata for a given file
|
dgitcore/plugins/repomanager.py
|
def get_resource(self, p):
"""
Get metadata for a given file
"""
for r in self.package['resources']:
if r['relativepath'] == p:
r['localfullpath'] = os.path.join(self.rootdir, p)
return r
raise Exception("Invalid path")
|
def get_resource(self, p):
"""
Get metadata for a given file
"""
for r in self.package['resources']:
if r['relativepath'] == p:
r['localfullpath'] = os.path.join(self.rootdir, p)
return r
raise Exception("Invalid path")
|
[
"Get",
"metadata",
"for",
"a",
"given",
"file"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L94-L103
|
[
"def",
"get_resource",
"(",
"self",
",",
"p",
")",
":",
"for",
"r",
"in",
"self",
".",
"package",
"[",
"'resources'",
"]",
":",
"if",
"r",
"[",
"'relativepath'",
"]",
"==",
"p",
":",
"r",
"[",
"'localfullpath'",
"]",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"rootdir",
",",
"p",
")",
"return",
"r",
"raise",
"Exception",
"(",
"\"Invalid path\"",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
RepoManagerBase.lookup
|
Lookup all available repos
|
dgitcore/plugins/repomanager.py
|
def lookup(self, username=None, reponame=None, key=None):
"""
Lookup all available repos
"""
if key is None:
key = self.key(username, reponame)
if key not in self.repos:
raise UnknownRepository()
return self.repos[key]
|
def lookup(self, username=None, reponame=None, key=None):
"""
Lookup all available repos
"""
if key is None:
key = self.key(username, reponame)
if key not in self.repos:
raise UnknownRepository()
return self.repos[key]
|
[
"Lookup",
"all",
"available",
"repos"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L163-L172
|
[
"def",
"lookup",
"(",
"self",
",",
"username",
"=",
"None",
",",
"reponame",
"=",
"None",
",",
"key",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"self",
".",
"key",
"(",
"username",
",",
"reponame",
")",
"if",
"key",
"not",
"in",
"self",
".",
"repos",
":",
"raise",
"UnknownRepository",
"(",
")",
"return",
"self",
".",
"repos",
"[",
"key",
"]"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
RepoManagerBase.rootdir
|
Working directory for the repo
|
dgitcore/plugins/repomanager.py
|
def rootdir(self, username, reponame, create=True):
"""
Working directory for the repo
"""
path = os.path.join(self.workspace,
'datasets',
username,
reponame)
if create:
try:
os.makedirs(path)
except:
pass
return path
|
def rootdir(self, username, reponame, create=True):
"""
Working directory for the repo
"""
path = os.path.join(self.workspace,
'datasets',
username,
reponame)
if create:
try:
os.makedirs(path)
except:
pass
return path
|
[
"Working",
"directory",
"for",
"the",
"repo"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L204-L218
|
[
"def",
"rootdir",
"(",
"self",
",",
"username",
",",
"reponame",
",",
"create",
"=",
"True",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"workspace",
",",
"'datasets'",
",",
"username",
",",
"reponame",
")",
"if",
"create",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"except",
":",
"pass",
"return",
"path"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
RepoManagerBase.add
|
Add repo to the internal lookup table...
|
dgitcore/plugins/repomanager.py
|
def add(self, repo):
"""
Add repo to the internal lookup table...
"""
key = self.key(repo.username, repo.reponame)
repo.key = key
self.repos[key] = repo
return key
|
def add(self, repo):
"""
Add repo to the internal lookup table...
"""
key = self.key(repo.username, repo.reponame)
repo.key = key
self.repos[key] = repo
return key
|
[
"Add",
"repo",
"to",
"the",
"internal",
"lookup",
"table",
"..."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/repomanager.py#L222-L229
|
[
"def",
"add",
"(",
"self",
",",
"repo",
")",
":",
"key",
"=",
"self",
".",
"key",
"(",
"repo",
".",
"username",
",",
"repo",
".",
"reponame",
")",
"repo",
".",
"key",
"=",
"key",
"self",
".",
"repos",
"[",
"key",
"]",
"=",
"repo",
"return",
"key"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
lookup
|
Lookup a repo based on username reponame
|
dgitcore/datasets/common.py
|
def lookup(username, reponame):
"""
Lookup a repo based on username reponame
"""
mgr = plugins_get_mgr()
# XXX This should be generalized to all repo managers.
repomgr = mgr.get(what='repomanager', name='git')
repo = repomgr.lookup(username=username,
reponame=reponame)
return repo
|
def lookup(username, reponame):
"""
Lookup a repo based on username reponame
"""
mgr = plugins_get_mgr()
# XXX This should be generalized to all repo managers.
repomgr = mgr.get(what='repomanager', name='git')
repo = repomgr.lookup(username=username,
reponame=reponame)
return repo
|
[
"Lookup",
"a",
"repo",
"based",
"on",
"username",
"reponame"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L47-L58
|
[
"def",
"lookup",
"(",
"username",
",",
"reponame",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"# XXX This should be generalized to all repo managers.",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'repomanager'",
",",
"name",
"=",
"'git'",
")",
"repo",
"=",
"repomgr",
".",
"lookup",
"(",
"username",
"=",
"username",
",",
"reponame",
"=",
"reponame",
")",
"return",
"repo"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
list_repos
|
List repos
Parameters
----------
remote: Flag
|
dgitcore/datasets/common.py
|
def list_repos(remote=False):
"""
List repos
Parameters
----------
remote: Flag
"""
mgr = plugins_get_mgr()
if not remote:
repomgr = mgr.get(what='repomanager', name='git')
repos = repomgr.get_repo_list()
repos.sort()
return repos
else:
raise Exception("Not supported yet")
|
def list_repos(remote=False):
"""
List repos
Parameters
----------
remote: Flag
"""
mgr = plugins_get_mgr()
if not remote:
repomgr = mgr.get(what='repomanager', name='git')
repos = repomgr.get_repo_list()
repos.sort()
return repos
else:
raise Exception("Not supported yet")
|
[
"List",
"repos"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L60-L77
|
[
"def",
"list_repos",
"(",
"remote",
"=",
"False",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"if",
"not",
"remote",
":",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'repomanager'",
",",
"name",
"=",
"'git'",
")",
"repos",
"=",
"repomgr",
".",
"get_repo_list",
"(",
")",
"repos",
".",
"sort",
"(",
")",
"return",
"repos",
"else",
":",
"raise",
"Exception",
"(",
"\"Not supported yet\"",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
shellcmd
|
Run a shell command within the repo's context
Parameters
----------
repo: Repository object
args: Shell command
|
dgitcore/datasets/common.py
|
def shellcmd(repo, args):
"""
Run a shell command within the repo's context
Parameters
----------
repo: Repository object
args: Shell command
"""
with cd(repo.rootdir):
result = run(args)
return result
|
def shellcmd(repo, args):
"""
Run a shell command within the repo's context
Parameters
----------
repo: Repository object
args: Shell command
"""
with cd(repo.rootdir):
result = run(args)
return result
|
[
"Run",
"a",
"shell",
"command",
"within",
"the",
"repo",
"s",
"context"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L84-L96
|
[
"def",
"shellcmd",
"(",
"repo",
",",
"args",
")",
":",
"with",
"cd",
"(",
"repo",
".",
"rootdir",
")",
":",
"result",
"=",
"run",
"(",
"args",
")",
"return",
"result"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
datapackage_exists
|
Check if the datapackage exists...
|
dgitcore/datasets/common.py
|
def datapackage_exists(repo):
"""
Check if the datapackage exists...
"""
datapath = os.path.join(repo.rootdir, "datapackage.json")
return os.path.exists(datapath)
|
def datapackage_exists(repo):
"""
Check if the datapackage exists...
"""
datapath = os.path.join(repo.rootdir, "datapackage.json")
return os.path.exists(datapath)
|
[
"Check",
"if",
"the",
"datapackage",
"exists",
"..."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L99-L104
|
[
"def",
"datapackage_exists",
"(",
"repo",
")",
":",
"datapath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"repo",
".",
"rootdir",
",",
"\"datapackage.json\"",
")",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"datapath",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
delete
|
Delete files
Parameters
----------
repo: Repository object
args: Arguments to git command
|
dgitcore/datasets/common.py
|
def delete(repo, args=[]):
"""
Delete files
Parameters
----------
repo: Repository object
args: Arguments to git command
"""
# Remove the files
result = generic_repo_cmd(repo, 'delete', args)
if result['status'] != 'success':
return status
with cd(repo.rootdir):
package = repo.package
resources = package['resources']
cleaned_resources = []
for r in resources:
relativepath = r['relativepath']
sha256 = r['sha256']
if relativepath not in ['', None]:
if not os.path.exists(relativepath):
# This file does not exist on disk.
print("Skipping", relativepath)
continue
cleaned_resources.append(r)
package['resources'] = cleaned_resources
repo.package = package
with open('datapackage.json', 'w') as fd:
fd.write(json.dumps(repo.package, indent=4))
return {
'status': 'success',
'message': ''
}
|
def delete(repo, args=[]):
"""
Delete files
Parameters
----------
repo: Repository object
args: Arguments to git command
"""
# Remove the files
result = generic_repo_cmd(repo, 'delete', args)
if result['status'] != 'success':
return status
with cd(repo.rootdir):
package = repo.package
resources = package['resources']
cleaned_resources = []
for r in resources:
relativepath = r['relativepath']
sha256 = r['sha256']
if relativepath not in ['', None]:
if not os.path.exists(relativepath):
# This file does not exist on disk.
print("Skipping", relativepath)
continue
cleaned_resources.append(r)
package['resources'] = cleaned_resources
repo.package = package
with open('datapackage.json', 'w') as fd:
fd.write(json.dumps(repo.package, indent=4))
return {
'status': 'success',
'message': ''
}
|
[
"Delete",
"files"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L248-L289
|
[
"def",
"delete",
"(",
"repo",
",",
"args",
"=",
"[",
"]",
")",
":",
"# Remove the files ",
"result",
"=",
"generic_repo_cmd",
"(",
"repo",
",",
"'delete'",
",",
"args",
")",
"if",
"result",
"[",
"'status'",
"]",
"!=",
"'success'",
":",
"return",
"status",
"with",
"cd",
"(",
"repo",
".",
"rootdir",
")",
":",
"package",
"=",
"repo",
".",
"package",
"resources",
"=",
"package",
"[",
"'resources'",
"]",
"cleaned_resources",
"=",
"[",
"]",
"for",
"r",
"in",
"resources",
":",
"relativepath",
"=",
"r",
"[",
"'relativepath'",
"]",
"sha256",
"=",
"r",
"[",
"'sha256'",
"]",
"if",
"relativepath",
"not",
"in",
"[",
"''",
",",
"None",
"]",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"relativepath",
")",
":",
"# This file does not exist on disk. ",
"print",
"(",
"\"Skipping\"",
",",
"relativepath",
")",
"continue",
"cleaned_resources",
".",
"append",
"(",
"r",
")",
"package",
"[",
"'resources'",
"]",
"=",
"cleaned_resources",
"repo",
".",
"package",
"=",
"package",
"with",
"open",
"(",
"'datapackage.json'",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"repo",
".",
"package",
",",
"indent",
"=",
"4",
")",
")",
"return",
"{",
"'status'",
":",
"'success'",
",",
"'message'",
":",
"''",
"}"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
bootstrap_datapackage
|
Create the datapackage file..
|
dgitcore/datasets/common.py
|
def bootstrap_datapackage(repo, force=False,
options=None, noinput=False):
"""
Create the datapackage file..
"""
print("Bootstrapping datapackage")
# get the directory
tsprefix = datetime.now().date().isoformat()
# Initial data package json
package = OrderedDict([
('title', ''),
('description', ''),
('username', repo.username),
('reponame', repo.reponame),
('name', str(repo)),
('title', ""),
('description', ""),
('keywords', []),
('resources', []),
('creator', getpass.getuser()),
('createdat', datetime.now().isoformat()),
('remote-url', repo.remoteurl)
])
if options is not None:
package['title'] = options['title']
package['description'] = options['description']
else:
if noinput:
raise IncompleteParameters("Option field with title and description")
for var in ['title', 'description']:
value = ''
while value in ['',None]:
value = input('Your Repo ' + var.title() + ": ")
if len(value) == 0:
print("{} cannot be empty. Please re-enter.".format(var.title()))
package[var] = value
# Now store the package...
(handle, filename) = tempfile.mkstemp()
with open(filename, 'w') as fd:
fd.write(json.dumps(package, indent=4))
repo.package = package
return filename
|
def bootstrap_datapackage(repo, force=False,
options=None, noinput=False):
"""
Create the datapackage file..
"""
print("Bootstrapping datapackage")
# get the directory
tsprefix = datetime.now().date().isoformat()
# Initial data package json
package = OrderedDict([
('title', ''),
('description', ''),
('username', repo.username),
('reponame', repo.reponame),
('name', str(repo)),
('title', ""),
('description', ""),
('keywords', []),
('resources', []),
('creator', getpass.getuser()),
('createdat', datetime.now().isoformat()),
('remote-url', repo.remoteurl)
])
if options is not None:
package['title'] = options['title']
package['description'] = options['description']
else:
if noinput:
raise IncompleteParameters("Option field with title and description")
for var in ['title', 'description']:
value = ''
while value in ['',None]:
value = input('Your Repo ' + var.title() + ": ")
if len(value) == 0:
print("{} cannot be empty. Please re-enter.".format(var.title()))
package[var] = value
# Now store the package...
(handle, filename) = tempfile.mkstemp()
with open(filename, 'w') as fd:
fd.write(json.dumps(package, indent=4))
repo.package = package
return filename
|
[
"Create",
"the",
"datapackage",
"file",
".."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L297-L348
|
[
"def",
"bootstrap_datapackage",
"(",
"repo",
",",
"force",
"=",
"False",
",",
"options",
"=",
"None",
",",
"noinput",
"=",
"False",
")",
":",
"print",
"(",
"\"Bootstrapping datapackage\"",
")",
"# get the directory",
"tsprefix",
"=",
"datetime",
".",
"now",
"(",
")",
".",
"date",
"(",
")",
".",
"isoformat",
"(",
")",
"# Initial data package json",
"package",
"=",
"OrderedDict",
"(",
"[",
"(",
"'title'",
",",
"''",
")",
",",
"(",
"'description'",
",",
"''",
")",
",",
"(",
"'username'",
",",
"repo",
".",
"username",
")",
",",
"(",
"'reponame'",
",",
"repo",
".",
"reponame",
")",
",",
"(",
"'name'",
",",
"str",
"(",
"repo",
")",
")",
",",
"(",
"'title'",
",",
"\"\"",
")",
",",
"(",
"'description'",
",",
"\"\"",
")",
",",
"(",
"'keywords'",
",",
"[",
"]",
")",
",",
"(",
"'resources'",
",",
"[",
"]",
")",
",",
"(",
"'creator'",
",",
"getpass",
".",
"getuser",
"(",
")",
")",
",",
"(",
"'createdat'",
",",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
")",
",",
"(",
"'remote-url'",
",",
"repo",
".",
"remoteurl",
")",
"]",
")",
"if",
"options",
"is",
"not",
"None",
":",
"package",
"[",
"'title'",
"]",
"=",
"options",
"[",
"'title'",
"]",
"package",
"[",
"'description'",
"]",
"=",
"options",
"[",
"'description'",
"]",
"else",
":",
"if",
"noinput",
":",
"raise",
"IncompleteParameters",
"(",
"\"Option field with title and description\"",
")",
"for",
"var",
"in",
"[",
"'title'",
",",
"'description'",
"]",
":",
"value",
"=",
"''",
"while",
"value",
"in",
"[",
"''",
",",
"None",
"]",
":",
"value",
"=",
"input",
"(",
"'Your Repo '",
"+",
"var",
".",
"title",
"(",
")",
"+",
"\": \"",
")",
"if",
"len",
"(",
"value",
")",
"==",
"0",
":",
"print",
"(",
"\"{} cannot be empty. Please re-enter.\"",
".",
"format",
"(",
"var",
".",
"title",
"(",
")",
")",
")",
"package",
"[",
"var",
"]",
"=",
"value",
"# Now store the package...",
"(",
"handle",
",",
"filename",
")",
"=",
"tempfile",
".",
"mkstemp",
"(",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"package",
",",
"indent",
"=",
"4",
")",
")",
"repo",
".",
"package",
"=",
"package",
"return",
"filename"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
init
|
Initialize an empty repository with datapackage.json
Parameters
----------
username: Name of the user
reponame: Name of the repo
setup: Specify the 'configuration' (git only, git+s3 backend etc)
force: Force creation of the files
options: Dictionary with content of dgit.json, if available.
noinput: Automatic operation with no human interaction
|
dgitcore/datasets/common.py
|
def init(username, reponame, setup,
force=False, options=None,
noinput=False):
"""
Initialize an empty repository with datapackage.json
Parameters
----------
username: Name of the user
reponame: Name of the repo
setup: Specify the 'configuration' (git only, git+s3 backend etc)
force: Force creation of the files
options: Dictionary with content of dgit.json, if available.
noinput: Automatic operation with no human interaction
"""
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = None
if setup == 'git+s3':
backendmgr = mgr.get(what='backend', name='s3')
repo = repomgr.init(username, reponame, force, backendmgr)
# Now bootstrap the datapackage.json metadata file and copy it in...
# Insert a gitignore with .dgit directory in the repo. This
# directory will be used to store partial results
(handle, gitignore) = tempfile.mkstemp()
with open(gitignore, 'w') as fd:
fd.write(".dgit")
# Try to bootstrap. If you cant, cleanup and return
try:
filename = bootstrap_datapackage(repo, force, options, noinput)
except Exception as e:
repomgr.drop(repo,[])
os.unlink(gitignore)
raise e
repo.run('add_files',
[
{
'relativepath': 'datapackage.json',
'localfullpath': filename,
},
{
'relativepath': '.gitignore',
'localfullpath': gitignore,
},
])
# Cleanup temp files
os.unlink(filename)
os.unlink(gitignore)
args = ['-a', '-m', 'Bootstrapped the repo']
repo.run('commit', args)
return repo
|
def init(username, reponame, setup,
force=False, options=None,
noinput=False):
"""
Initialize an empty repository with datapackage.json
Parameters
----------
username: Name of the user
reponame: Name of the repo
setup: Specify the 'configuration' (git only, git+s3 backend etc)
force: Force creation of the files
options: Dictionary with content of dgit.json, if available.
noinput: Automatic operation with no human interaction
"""
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = None
if setup == 'git+s3':
backendmgr = mgr.get(what='backend', name='s3')
repo = repomgr.init(username, reponame, force, backendmgr)
# Now bootstrap the datapackage.json metadata file and copy it in...
# Insert a gitignore with .dgit directory in the repo. This
# directory will be used to store partial results
(handle, gitignore) = tempfile.mkstemp()
with open(gitignore, 'w') as fd:
fd.write(".dgit")
# Try to bootstrap. If you cant, cleanup and return
try:
filename = bootstrap_datapackage(repo, force, options, noinput)
except Exception as e:
repomgr.drop(repo,[])
os.unlink(gitignore)
raise e
repo.run('add_files',
[
{
'relativepath': 'datapackage.json',
'localfullpath': filename,
},
{
'relativepath': '.gitignore',
'localfullpath': gitignore,
},
])
# Cleanup temp files
os.unlink(filename)
os.unlink(gitignore)
args = ['-a', '-m', 'Bootstrapped the repo']
repo.run('commit', args)
return repo
|
[
"Initialize",
"an",
"empty",
"repository",
"with",
"datapackage",
".",
"json"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L350-L412
|
[
"def",
"init",
"(",
"username",
",",
"reponame",
",",
"setup",
",",
"force",
"=",
"False",
",",
"options",
"=",
"None",
",",
"noinput",
"=",
"False",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'repomanager'",
",",
"name",
"=",
"'git'",
")",
"backendmgr",
"=",
"None",
"if",
"setup",
"==",
"'git+s3'",
":",
"backendmgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'backend'",
",",
"name",
"=",
"'s3'",
")",
"repo",
"=",
"repomgr",
".",
"init",
"(",
"username",
",",
"reponame",
",",
"force",
",",
"backendmgr",
")",
"# Now bootstrap the datapackage.json metadata file and copy it in...",
"# Insert a gitignore with .dgit directory in the repo. This",
"# directory will be used to store partial results",
"(",
"handle",
",",
"gitignore",
")",
"=",
"tempfile",
".",
"mkstemp",
"(",
")",
"with",
"open",
"(",
"gitignore",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"\".dgit\"",
")",
"# Try to bootstrap. If you cant, cleanup and return",
"try",
":",
"filename",
"=",
"bootstrap_datapackage",
"(",
"repo",
",",
"force",
",",
"options",
",",
"noinput",
")",
"except",
"Exception",
"as",
"e",
":",
"repomgr",
".",
"drop",
"(",
"repo",
",",
"[",
"]",
")",
"os",
".",
"unlink",
"(",
"gitignore",
")",
"raise",
"e",
"repo",
".",
"run",
"(",
"'add_files'",
",",
"[",
"{",
"'relativepath'",
":",
"'datapackage.json'",
",",
"'localfullpath'",
":",
"filename",
",",
"}",
",",
"{",
"'relativepath'",
":",
"'.gitignore'",
",",
"'localfullpath'",
":",
"gitignore",
",",
"}",
",",
"]",
")",
"# Cleanup temp files",
"os",
".",
"unlink",
"(",
"filename",
")",
"os",
".",
"unlink",
"(",
"gitignore",
")",
"args",
"=",
"[",
"'-a'",
",",
"'-m'",
",",
"'Bootstrapped the repo'",
"]",
"repo",
".",
"run",
"(",
"'commit'",
",",
"args",
")",
"return",
"repo"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
clone
|
Clone a URL. Examples include:
- git@github.com:pingali/dgit.git
- https://github.com:pingali/dgit.git
- s3://mybucket/git/pingali/dgit.git
Parameters
----------
url: URL of the repo
|
dgitcore/datasets/common.py
|
def clone(url):
"""
Clone a URL. Examples include:
- git@github.com:pingali/dgit.git
- https://github.com:pingali/dgit.git
- s3://mybucket/git/pingali/dgit.git
Parameters
----------
url: URL of the repo
"""
backend = None
backendmgr = None
if url.startswith('s3'):
backendtype = 's3'
elif url.startswith("http") or url.startswith("git"):
backendtype = 'git'
else:
backendtype = None
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = mgr.get(what='backend', name=backendtype)
# print("Testing {} with backend {}".format(url, backendmgr))
if backendmgr is not None and not backendmgr.url_is_valid(url):
raise InvalidParameters("Invalid URL")
key = repomgr.clone(url, backendmgr)
# Insert a datapackage if it doesnt already exist...
repo = repomgr.lookup(key=key)
if not datapackage_exists(repo):
filename = bootstrap_datapackage(repo)
repo.run('add_files',
[
{
'relativepath': 'datapackage.json',
'localfullpath': filename,
},
])
os.unlink(filename)
args = ['-a', '-m', 'Bootstrapped the repo']
repo.run('commit', args)
return repo
|
def clone(url):
"""
Clone a URL. Examples include:
- git@github.com:pingali/dgit.git
- https://github.com:pingali/dgit.git
- s3://mybucket/git/pingali/dgit.git
Parameters
----------
url: URL of the repo
"""
backend = None
backendmgr = None
if url.startswith('s3'):
backendtype = 's3'
elif url.startswith("http") or url.startswith("git"):
backendtype = 'git'
else:
backendtype = None
mgr = plugins_get_mgr()
repomgr = mgr.get(what='repomanager', name='git')
backendmgr = mgr.get(what='backend', name=backendtype)
# print("Testing {} with backend {}".format(url, backendmgr))
if backendmgr is not None and not backendmgr.url_is_valid(url):
raise InvalidParameters("Invalid URL")
key = repomgr.clone(url, backendmgr)
# Insert a datapackage if it doesnt already exist...
repo = repomgr.lookup(key=key)
if not datapackage_exists(repo):
filename = bootstrap_datapackage(repo)
repo.run('add_files',
[
{
'relativepath': 'datapackage.json',
'localfullpath': filename,
},
])
os.unlink(filename)
args = ['-a', '-m', 'Bootstrapped the repo']
repo.run('commit', args)
return repo
|
[
"Clone",
"a",
"URL",
".",
"Examples",
"include",
":"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L414-L462
|
[
"def",
"clone",
"(",
"url",
")",
":",
"backend",
"=",
"None",
"backendmgr",
"=",
"None",
"if",
"url",
".",
"startswith",
"(",
"'s3'",
")",
":",
"backendtype",
"=",
"'s3'",
"elif",
"url",
".",
"startswith",
"(",
"\"http\"",
")",
"or",
"url",
".",
"startswith",
"(",
"\"git\"",
")",
":",
"backendtype",
"=",
"'git'",
"else",
":",
"backendtype",
"=",
"None",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'repomanager'",
",",
"name",
"=",
"'git'",
")",
"backendmgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'backend'",
",",
"name",
"=",
"backendtype",
")",
"# print(\"Testing {} with backend {}\".format(url, backendmgr))",
"if",
"backendmgr",
"is",
"not",
"None",
"and",
"not",
"backendmgr",
".",
"url_is_valid",
"(",
"url",
")",
":",
"raise",
"InvalidParameters",
"(",
"\"Invalid URL\"",
")",
"key",
"=",
"repomgr",
".",
"clone",
"(",
"url",
",",
"backendmgr",
")",
"# Insert a datapackage if it doesnt already exist...",
"repo",
"=",
"repomgr",
".",
"lookup",
"(",
"key",
"=",
"key",
")",
"if",
"not",
"datapackage_exists",
"(",
"repo",
")",
":",
"filename",
"=",
"bootstrap_datapackage",
"(",
"repo",
")",
"repo",
".",
"run",
"(",
"'add_files'",
",",
"[",
"{",
"'relativepath'",
":",
"'datapackage.json'",
",",
"'localfullpath'",
":",
"filename",
",",
"}",
",",
"]",
")",
"os",
".",
"unlink",
"(",
"filename",
")",
"args",
"=",
"[",
"'-a'",
",",
"'-m'",
",",
"'Bootstrapped the repo'",
"]",
"repo",
".",
"run",
"(",
"'commit'",
",",
"args",
")",
"return",
"repo"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
annotate_metadata_data
|
Update metadata with the content of the files
|
dgitcore/datasets/common.py
|
def annotate_metadata_data(repo, task, patterns=["*"], size=0):
"""
Update metadata with the content of the files
"""
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
matching_files = repo.find_matching_files(patterns)
package = repo.package
rootdir = repo.rootdir
files = package['resources']
for f in files:
relativepath = f['relativepath']
if relativepath in matching_files:
path = os.path.join(rootdir, relativepath)
if task == 'preview':
print("Adding preview for ", relativepath)
f['content'] = open(path).read()[:size]
elif task == 'schema':
for r in representations:
if r.can_process(path):
print("Adding schema for ", path)
f['schema'] = r.get_schema(path)
break
|
def annotate_metadata_data(repo, task, patterns=["*"], size=0):
"""
Update metadata with the content of the files
"""
mgr = plugins_get_mgr()
keys = mgr.search('representation')['representation']
representations = [mgr.get_by_key('representation', k) for k in keys]
matching_files = repo.find_matching_files(patterns)
package = repo.package
rootdir = repo.rootdir
files = package['resources']
for f in files:
relativepath = f['relativepath']
if relativepath in matching_files:
path = os.path.join(rootdir, relativepath)
if task == 'preview':
print("Adding preview for ", relativepath)
f['content'] = open(path).read()[:size]
elif task == 'schema':
for r in representations:
if r.can_process(path):
print("Adding schema for ", path)
f['schema'] = r.get_schema(path)
break
|
[
"Update",
"metadata",
"with",
"the",
"content",
"of",
"the",
"files"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L470-L495
|
[
"def",
"annotate_metadata_data",
"(",
"repo",
",",
"task",
",",
"patterns",
"=",
"[",
"\"*\"",
"]",
",",
"size",
"=",
"0",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"keys",
"=",
"mgr",
".",
"search",
"(",
"'representation'",
")",
"[",
"'representation'",
"]",
"representations",
"=",
"[",
"mgr",
".",
"get_by_key",
"(",
"'representation'",
",",
"k",
")",
"for",
"k",
"in",
"keys",
"]",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"patterns",
")",
"package",
"=",
"repo",
".",
"package",
"rootdir",
"=",
"repo",
".",
"rootdir",
"files",
"=",
"package",
"[",
"'resources'",
"]",
"for",
"f",
"in",
"files",
":",
"relativepath",
"=",
"f",
"[",
"'relativepath'",
"]",
"if",
"relativepath",
"in",
"matching_files",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"rootdir",
",",
"relativepath",
")",
"if",
"task",
"==",
"'preview'",
":",
"print",
"(",
"\"Adding preview for \"",
",",
"relativepath",
")",
"f",
"[",
"'content'",
"]",
"=",
"open",
"(",
"path",
")",
".",
"read",
"(",
")",
"[",
":",
"size",
"]",
"elif",
"task",
"==",
"'schema'",
":",
"for",
"r",
"in",
"representations",
":",
"if",
"r",
".",
"can_process",
"(",
"path",
")",
":",
"print",
"(",
"\"Adding schema for \"",
",",
"path",
")",
"f",
"[",
"'schema'",
"]",
"=",
"r",
".",
"get_schema",
"(",
"path",
")",
"break"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
annotate_metadata_code
|
Update metadata with the commit information
|
dgitcore/datasets/common.py
|
def annotate_metadata_code(repo, files):
"""
Update metadata with the commit information
"""
package = repo.package
package['code'] = []
for p in files:
matching_files = glob2.glob("**/{}".format(p))
for f in matching_files:
absf = os.path.abspath(f)
print("Add commit data for {}".format(f))
package['code'].append(OrderedDict([
('script', f),
('permalink', repo.manager.permalink(repo, absf)),
('mimetypes', mimetypes.guess_type(absf)[0]),
('sha256', compute_sha256(absf))
]))
|
def annotate_metadata_code(repo, files):
"""
Update metadata with the commit information
"""
package = repo.package
package['code'] = []
for p in files:
matching_files = glob2.glob("**/{}".format(p))
for f in matching_files:
absf = os.path.abspath(f)
print("Add commit data for {}".format(f))
package['code'].append(OrderedDict([
('script', f),
('permalink', repo.manager.permalink(repo, absf)),
('mimetypes', mimetypes.guess_type(absf)[0]),
('sha256', compute_sha256(absf))
]))
|
[
"Update",
"metadata",
"with",
"the",
"commit",
"information"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L497-L514
|
[
"def",
"annotate_metadata_code",
"(",
"repo",
",",
"files",
")",
":",
"package",
"=",
"repo",
".",
"package",
"package",
"[",
"'code'",
"]",
"=",
"[",
"]",
"for",
"p",
"in",
"files",
":",
"matching_files",
"=",
"glob2",
".",
"glob",
"(",
"\"**/{}\"",
".",
"format",
"(",
"p",
")",
")",
"for",
"f",
"in",
"matching_files",
":",
"absf",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"f",
")",
"print",
"(",
"\"Add commit data for {}\"",
".",
"format",
"(",
"f",
")",
")",
"package",
"[",
"'code'",
"]",
".",
"append",
"(",
"OrderedDict",
"(",
"[",
"(",
"'script'",
",",
"f",
")",
",",
"(",
"'permalink'",
",",
"repo",
".",
"manager",
".",
"permalink",
"(",
"repo",
",",
"absf",
")",
")",
",",
"(",
"'mimetypes'",
",",
"mimetypes",
".",
"guess_type",
"(",
"absf",
")",
"[",
"0",
"]",
")",
",",
"(",
"'sha256'",
",",
"compute_sha256",
"(",
"absf",
")",
")",
"]",
")",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
annotate_metadata_action
|
Update metadata with the action history
|
dgitcore/datasets/common.py
|
def annotate_metadata_action(repo):
"""
Update metadata with the action history
"""
package = repo.package
print("Including history of actions")
with cd(repo.rootdir):
filename = ".dgit/log.json"
if os.path.exists(filename):
history = open(filename).readlines()
actions = []
for a in history:
try:
a = json.loads(a)
for x in ['code']:
if x not in a or a[x] == None:
a[x] = "..."
actions.append(a)
except:
pass
package['actions'] = actions
|
def annotate_metadata_action(repo):
"""
Update metadata with the action history
"""
package = repo.package
print("Including history of actions")
with cd(repo.rootdir):
filename = ".dgit/log.json"
if os.path.exists(filename):
history = open(filename).readlines()
actions = []
for a in history:
try:
a = json.loads(a)
for x in ['code']:
if x not in a or a[x] == None:
a[x] = "..."
actions.append(a)
except:
pass
package['actions'] = actions
|
[
"Update",
"metadata",
"with",
"the",
"action",
"history"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L517-L538
|
[
"def",
"annotate_metadata_action",
"(",
"repo",
")",
":",
"package",
"=",
"repo",
".",
"package",
"print",
"(",
"\"Including history of actions\"",
")",
"with",
"cd",
"(",
"repo",
".",
"rootdir",
")",
":",
"filename",
"=",
"\".dgit/log.json\"",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"history",
"=",
"open",
"(",
"filename",
")",
".",
"readlines",
"(",
")",
"actions",
"=",
"[",
"]",
"for",
"a",
"in",
"history",
":",
"try",
":",
"a",
"=",
"json",
".",
"loads",
"(",
"a",
")",
"for",
"x",
"in",
"[",
"'code'",
"]",
":",
"if",
"x",
"not",
"in",
"a",
"or",
"a",
"[",
"x",
"]",
"==",
"None",
":",
"a",
"[",
"x",
"]",
"=",
"\"...\"",
"actions",
".",
"append",
"(",
"a",
")",
"except",
":",
"pass",
"package",
"[",
"'actions'",
"]",
"=",
"actions"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
annotate_metadata_platform
|
Update metadata host information
|
dgitcore/datasets/common.py
|
def annotate_metadata_platform(repo):
"""
Update metadata host information
"""
print("Added platform information")
package = repo.package
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
package['platform'] = repomgr.get_metadata()
|
def annotate_metadata_platform(repo):
"""
Update metadata host information
"""
print("Added platform information")
package = repo.package
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
package['platform'] = repomgr.get_metadata()
|
[
"Update",
"metadata",
"host",
"information"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L540-L549
|
[
"def",
"annotate_metadata_platform",
"(",
"repo",
")",
":",
"print",
"(",
"\"Added platform information\"",
")",
"package",
"=",
"repo",
".",
"package",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"repomgr",
"=",
"mgr",
".",
"get",
"(",
"what",
"=",
"'instrumentation'",
",",
"name",
"=",
"'platform'",
")",
"package",
"[",
"'platform'",
"]",
"=",
"repomgr",
".",
"get_metadata",
"(",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
annotate_metadata_dependencies
|
Collect information from the dependent repo's
|
dgitcore/datasets/common.py
|
def annotate_metadata_dependencies(repo):
"""
Collect information from the dependent repo's
"""
options = repo.options
if 'dependencies' not in options:
print("No dependencies")
return []
repos = []
dependent_repos = options['dependencies']
for d in dependent_repos:
if "/" not in d:
print("Invalid dependency specification")
(username, reponame) = d.split("/")
try:
repos.append(repo.manager.lookup(username, reponame))
except:
print("Repository does not exist. Please create one", d)
package = repo.package
package['dependencies'] = []
for r in repos:
package['dependencies'].append({
'username': r.username,
'reponame': r.reponame,
})
|
def annotate_metadata_dependencies(repo):
"""
Collect information from the dependent repo's
"""
options = repo.options
if 'dependencies' not in options:
print("No dependencies")
return []
repos = []
dependent_repos = options['dependencies']
for d in dependent_repos:
if "/" not in d:
print("Invalid dependency specification")
(username, reponame) = d.split("/")
try:
repos.append(repo.manager.lookup(username, reponame))
except:
print("Repository does not exist. Please create one", d)
package = repo.package
package['dependencies'] = []
for r in repos:
package['dependencies'].append({
'username': r.username,
'reponame': r.reponame,
})
|
[
"Collect",
"information",
"from",
"the",
"dependent",
"repo",
"s"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L574-L602
|
[
"def",
"annotate_metadata_dependencies",
"(",
"repo",
")",
":",
"options",
"=",
"repo",
".",
"options",
"if",
"'dependencies'",
"not",
"in",
"options",
":",
"print",
"(",
"\"No dependencies\"",
")",
"return",
"[",
"]",
"repos",
"=",
"[",
"]",
"dependent_repos",
"=",
"options",
"[",
"'dependencies'",
"]",
"for",
"d",
"in",
"dependent_repos",
":",
"if",
"\"/\"",
"not",
"in",
"d",
":",
"print",
"(",
"\"Invalid dependency specification\"",
")",
"(",
"username",
",",
"reponame",
")",
"=",
"d",
".",
"split",
"(",
"\"/\"",
")",
"try",
":",
"repos",
".",
"append",
"(",
"repo",
".",
"manager",
".",
"lookup",
"(",
"username",
",",
"reponame",
")",
")",
"except",
":",
"print",
"(",
"\"Repository does not exist. Please create one\"",
",",
"d",
")",
"package",
"=",
"repo",
".",
"package",
"package",
"[",
"'dependencies'",
"]",
"=",
"[",
"]",
"for",
"r",
"in",
"repos",
":",
"package",
"[",
"'dependencies'",
"]",
".",
"append",
"(",
"{",
"'username'",
":",
"r",
".",
"username",
",",
"'reponame'",
":",
"r",
".",
"reponame",
",",
"}",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
post
|
Post to metadata server
Parameters
----------
repo: Repository object (result of lookup)
|
dgitcore/datasets/common.py
|
def post(repo, args=[]):
"""
Post to metadata server
Parameters
----------
repo: Repository object (result of lookup)
"""
mgr = plugins_get_mgr()
keys = mgr.search(what='metadata')
keys = keys['metadata']
if len(keys) == 0:
return
# Incorporate pipeline information...
if 'pipeline' in repo.options:
for name, details in repo.options['pipeline'].items():
patterns = details['files']
matching_files = repo.find_matching_files(patterns)
matching_files.sort()
details['files'] = matching_files
for i, f in enumerate(matching_files):
r = repo.get_resource(f)
if 'pipeline' not in r:
r['pipeline'] = []
r['pipeline'].append(name + " [Step {}]".format(i))
if 'metadata-management' in repo.options:
print("Collecting all the required metadata to post")
metadata = repo.options['metadata-management']
# Add data repo history
if 'include-data-history' in metadata and metadata['include-data-history']:
repo.package['history'] = get_history(repo.rootdir)
# Add action history
if 'include-action-history' in metadata and metadata['include-action-history']:
annotate_metadata_action(repo)
# Add data repo history
if 'include-preview' in metadata:
annotate_metadata_data(repo,
task='preview',
patterns=metadata['include-preview']['files'],
size=metadata['include-preview']['length'])
if (('include-schema' in metadata) and metadata['include-schema']):
annotate_metadata_data(repo, task='schema')
if 'include-code-history' in metadata:
annotate_metadata_code(repo, files=metadata['include-code-history'])
if 'include-platform' in metadata:
annotate_metadata_platform(repo)
if 'include-validation' in metadata:
annotate_metadata_validation(repo)
if 'include-dependencies' in metadata:
annotate_metadata_dependencies(repo)
history = repo.package.get('history',None)
if (('include-tab-diffs' in metadata) and
metadata['include-tab-diffs'] and
history is not None):
annotate_metadata_diffs(repo)
# Insert options as well
repo.package['config'] = repo.options
try:
for k in keys:
# print("Key", k)
metadatamgr = mgr.get_by_key('metadata', k)
url = metadatamgr.url
o = urlparse(url)
print("Posting to ", o.netloc)
response = metadatamgr.post(repo)
if isinstance(response, str):
print("Error while posting:", response)
elif response.status_code in [400]:
content = response.json()
print("Error while posting:")
for k in content:
print(" ", k,"- ", ",".join(content[k]))
except NetworkError as e:
print("Unable to reach metadata server!")
except NetworkInvalidConfiguration as e:
print("Invalid network configuration in the INI file")
print(e.message)
except Exception as e:
print("Could not post. Unknown error")
print(e)
|
def post(repo, args=[]):
"""
Post to metadata server
Parameters
----------
repo: Repository object (result of lookup)
"""
mgr = plugins_get_mgr()
keys = mgr.search(what='metadata')
keys = keys['metadata']
if len(keys) == 0:
return
# Incorporate pipeline information...
if 'pipeline' in repo.options:
for name, details in repo.options['pipeline'].items():
patterns = details['files']
matching_files = repo.find_matching_files(patterns)
matching_files.sort()
details['files'] = matching_files
for i, f in enumerate(matching_files):
r = repo.get_resource(f)
if 'pipeline' not in r:
r['pipeline'] = []
r['pipeline'].append(name + " [Step {}]".format(i))
if 'metadata-management' in repo.options:
print("Collecting all the required metadata to post")
metadata = repo.options['metadata-management']
# Add data repo history
if 'include-data-history' in metadata and metadata['include-data-history']:
repo.package['history'] = get_history(repo.rootdir)
# Add action history
if 'include-action-history' in metadata and metadata['include-action-history']:
annotate_metadata_action(repo)
# Add data repo history
if 'include-preview' in metadata:
annotate_metadata_data(repo,
task='preview',
patterns=metadata['include-preview']['files'],
size=metadata['include-preview']['length'])
if (('include-schema' in metadata) and metadata['include-schema']):
annotate_metadata_data(repo, task='schema')
if 'include-code-history' in metadata:
annotate_metadata_code(repo, files=metadata['include-code-history'])
if 'include-platform' in metadata:
annotate_metadata_platform(repo)
if 'include-validation' in metadata:
annotate_metadata_validation(repo)
if 'include-dependencies' in metadata:
annotate_metadata_dependencies(repo)
history = repo.package.get('history',None)
if (('include-tab-diffs' in metadata) and
metadata['include-tab-diffs'] and
history is not None):
annotate_metadata_diffs(repo)
# Insert options as well
repo.package['config'] = repo.options
try:
for k in keys:
# print("Key", k)
metadatamgr = mgr.get_by_key('metadata', k)
url = metadatamgr.url
o = urlparse(url)
print("Posting to ", o.netloc)
response = metadatamgr.post(repo)
if isinstance(response, str):
print("Error while posting:", response)
elif response.status_code in [400]:
content = response.json()
print("Error while posting:")
for k in content:
print(" ", k,"- ", ",".join(content[k]))
except NetworkError as e:
print("Unable to reach metadata server!")
except NetworkInvalidConfiguration as e:
print("Invalid network configuration in the INI file")
print(e.message)
except Exception as e:
print("Could not post. Unknown error")
print(e)
|
[
"Post",
"to",
"metadata",
"server"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L604-L700
|
[
"def",
"post",
"(",
"repo",
",",
"args",
"=",
"[",
"]",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"keys",
"=",
"mgr",
".",
"search",
"(",
"what",
"=",
"'metadata'",
")",
"keys",
"=",
"keys",
"[",
"'metadata'",
"]",
"if",
"len",
"(",
"keys",
")",
"==",
"0",
":",
"return",
"# Incorporate pipeline information...",
"if",
"'pipeline'",
"in",
"repo",
".",
"options",
":",
"for",
"name",
",",
"details",
"in",
"repo",
".",
"options",
"[",
"'pipeline'",
"]",
".",
"items",
"(",
")",
":",
"patterns",
"=",
"details",
"[",
"'files'",
"]",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"patterns",
")",
"matching_files",
".",
"sort",
"(",
")",
"details",
"[",
"'files'",
"]",
"=",
"matching_files",
"for",
"i",
",",
"f",
"in",
"enumerate",
"(",
"matching_files",
")",
":",
"r",
"=",
"repo",
".",
"get_resource",
"(",
"f",
")",
"if",
"'pipeline'",
"not",
"in",
"r",
":",
"r",
"[",
"'pipeline'",
"]",
"=",
"[",
"]",
"r",
"[",
"'pipeline'",
"]",
".",
"append",
"(",
"name",
"+",
"\" [Step {}]\"",
".",
"format",
"(",
"i",
")",
")",
"if",
"'metadata-management'",
"in",
"repo",
".",
"options",
":",
"print",
"(",
"\"Collecting all the required metadata to post\"",
")",
"metadata",
"=",
"repo",
".",
"options",
"[",
"'metadata-management'",
"]",
"# Add data repo history",
"if",
"'include-data-history'",
"in",
"metadata",
"and",
"metadata",
"[",
"'include-data-history'",
"]",
":",
"repo",
".",
"package",
"[",
"'history'",
"]",
"=",
"get_history",
"(",
"repo",
".",
"rootdir",
")",
"# Add action history ",
"if",
"'include-action-history'",
"in",
"metadata",
"and",
"metadata",
"[",
"'include-action-history'",
"]",
":",
"annotate_metadata_action",
"(",
"repo",
")",
"# Add data repo history",
"if",
"'include-preview'",
"in",
"metadata",
":",
"annotate_metadata_data",
"(",
"repo",
",",
"task",
"=",
"'preview'",
",",
"patterns",
"=",
"metadata",
"[",
"'include-preview'",
"]",
"[",
"'files'",
"]",
",",
"size",
"=",
"metadata",
"[",
"'include-preview'",
"]",
"[",
"'length'",
"]",
")",
"if",
"(",
"(",
"'include-schema'",
"in",
"metadata",
")",
"and",
"metadata",
"[",
"'include-schema'",
"]",
")",
":",
"annotate_metadata_data",
"(",
"repo",
",",
"task",
"=",
"'schema'",
")",
"if",
"'include-code-history'",
"in",
"metadata",
":",
"annotate_metadata_code",
"(",
"repo",
",",
"files",
"=",
"metadata",
"[",
"'include-code-history'",
"]",
")",
"if",
"'include-platform'",
"in",
"metadata",
":",
"annotate_metadata_platform",
"(",
"repo",
")",
"if",
"'include-validation'",
"in",
"metadata",
":",
"annotate_metadata_validation",
"(",
"repo",
")",
"if",
"'include-dependencies'",
"in",
"metadata",
":",
"annotate_metadata_dependencies",
"(",
"repo",
")",
"history",
"=",
"repo",
".",
"package",
".",
"get",
"(",
"'history'",
",",
"None",
")",
"if",
"(",
"(",
"'include-tab-diffs'",
"in",
"metadata",
")",
"and",
"metadata",
"[",
"'include-tab-diffs'",
"]",
"and",
"history",
"is",
"not",
"None",
")",
":",
"annotate_metadata_diffs",
"(",
"repo",
")",
"# Insert options as well",
"repo",
".",
"package",
"[",
"'config'",
"]",
"=",
"repo",
".",
"options",
"try",
":",
"for",
"k",
"in",
"keys",
":",
"# print(\"Key\", k)",
"metadatamgr",
"=",
"mgr",
".",
"get_by_key",
"(",
"'metadata'",
",",
"k",
")",
"url",
"=",
"metadatamgr",
".",
"url",
"o",
"=",
"urlparse",
"(",
"url",
")",
"print",
"(",
"\"Posting to \"",
",",
"o",
".",
"netloc",
")",
"response",
"=",
"metadatamgr",
".",
"post",
"(",
"repo",
")",
"if",
"isinstance",
"(",
"response",
",",
"str",
")",
":",
"print",
"(",
"\"Error while posting:\"",
",",
"response",
")",
"elif",
"response",
".",
"status_code",
"in",
"[",
"400",
"]",
":",
"content",
"=",
"response",
".",
"json",
"(",
")",
"print",
"(",
"\"Error while posting:\"",
")",
"for",
"k",
"in",
"content",
":",
"print",
"(",
"\" \"",
",",
"k",
",",
"\"- \"",
",",
"\",\"",
".",
"join",
"(",
"content",
"[",
"k",
"]",
")",
")",
"except",
"NetworkError",
"as",
"e",
":",
"print",
"(",
"\"Unable to reach metadata server!\"",
")",
"except",
"NetworkInvalidConfiguration",
"as",
"e",
":",
"print",
"(",
"\"Invalid network configuration in the INI file\"",
")",
"print",
"(",
"e",
".",
"message",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Could not post. Unknown error\"",
")",
"print",
"(",
"e",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
plugins_show
|
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown?
|
dgitcore/plugins/common.py
|
def plugins_show(what=None, name=None, version=None, details=False):
"""
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown?
"""
global pluginmgr
return pluginmgr.show(what, name, version, details)
|
def plugins_show(what=None, name=None, version=None, details=False):
"""
Show details of available plugins
Parameters
----------
what: Class of plugins e.g., backend
name: Name of the plugin e.g., s3
version: Version of the plugin
details: Show details be shown?
"""
global pluginmgr
return pluginmgr.show(what, name, version, details)
|
[
"Show",
"details",
"of",
"available",
"plugins"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L252-L265
|
[
"def",
"plugins_show",
"(",
"what",
"=",
"None",
",",
"name",
"=",
"None",
",",
"version",
"=",
"None",
",",
"details",
"=",
"False",
")",
":",
"global",
"pluginmgr",
"return",
"pluginmgr",
".",
"show",
"(",
"what",
",",
"name",
",",
"version",
",",
"details",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
PluginManager.discover_all_plugins
|
Load all plugins from dgit extension
|
dgitcore/plugins/common.py
|
def discover_all_plugins(self):
"""
Load all plugins from dgit extension
"""
for v in pkg_resources.iter_entry_points('dgit.plugins'):
m = v.load()
m.setup(self)
|
def discover_all_plugins(self):
"""
Load all plugins from dgit extension
"""
for v in pkg_resources.iter_entry_points('dgit.plugins'):
m = v.load()
m.setup(self)
|
[
"Load",
"all",
"plugins",
"from",
"dgit",
"extension"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L96-L102
|
[
"def",
"discover_all_plugins",
"(",
"self",
")",
":",
"for",
"v",
"in",
"pkg_resources",
".",
"iter_entry_points",
"(",
"'dgit.plugins'",
")",
":",
"m",
"=",
"v",
".",
"load",
"(",
")",
"m",
".",
"setup",
"(",
"self",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
PluginManager.register
|
Registering a plugin
Params
------
what: Nature of the plugin (backend, instrumentation, repo)
obj: Instance of the plugin
|
dgitcore/plugins/common.py
|
def register(self, what, obj):
"""
Registering a plugin
Params
------
what: Nature of the plugin (backend, instrumentation, repo)
obj: Instance of the plugin
"""
# print("Registering pattern", name, pattern)
name = obj.name
version = obj.version
enable = obj.enable
if enable == 'n':
return
key = Key(name, version)
self.plugins[what][key] = obj
|
def register(self, what, obj):
"""
Registering a plugin
Params
------
what: Nature of the plugin (backend, instrumentation, repo)
obj: Instance of the plugin
"""
# print("Registering pattern", name, pattern)
name = obj.name
version = obj.version
enable = obj.enable
if enable == 'n':
return
key = Key(name, version)
self.plugins[what][key] = obj
|
[
"Registering",
"a",
"plugin"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L104-L121
|
[
"def",
"register",
"(",
"self",
",",
"what",
",",
"obj",
")",
":",
"# print(\"Registering pattern\", name, pattern)",
"name",
"=",
"obj",
".",
"name",
"version",
"=",
"obj",
".",
"version",
"enable",
"=",
"obj",
".",
"enable",
"if",
"enable",
"==",
"'n'",
":",
"return",
"key",
"=",
"Key",
"(",
"name",
",",
"version",
")",
"self",
".",
"plugins",
"[",
"what",
"]",
"[",
"key",
"]",
"=",
"obj"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
PluginManager.search
|
Search for a plugin
|
dgitcore/plugins/common.py
|
def search(self, what, name=None, version=None):
"""
Search for a plugin
"""
filtered = {}
# The search may for a scan (what is None) or
if what is None:
whats = list(self.plugins.keys())
elif what is not None:
if what not in self.plugins:
raise Exception("Unknown class of plugins")
whats = [what]
for what in whats:
if what not in filtered:
filtered[what] = []
for key in self.plugins[what].keys():
(k_name, k_version) = key
if name is not None and k_name != name:
continue
if version is not None and k_version != version:
continue
if self.plugins[what][key].enable == 'n':
continue
filtered[what].append(key)
# print(filtered)
return filtered
|
def search(self, what, name=None, version=None):
"""
Search for a plugin
"""
filtered = {}
# The search may for a scan (what is None) or
if what is None:
whats = list(self.plugins.keys())
elif what is not None:
if what not in self.plugins:
raise Exception("Unknown class of plugins")
whats = [what]
for what in whats:
if what not in filtered:
filtered[what] = []
for key in self.plugins[what].keys():
(k_name, k_version) = key
if name is not None and k_name != name:
continue
if version is not None and k_version != version:
continue
if self.plugins[what][key].enable == 'n':
continue
filtered[what].append(key)
# print(filtered)
return filtered
|
[
"Search",
"for",
"a",
"plugin"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L123-L150
|
[
"def",
"search",
"(",
"self",
",",
"what",
",",
"name",
"=",
"None",
",",
"version",
"=",
"None",
")",
":",
"filtered",
"=",
"{",
"}",
"# The search may for a scan (what is None) or",
"if",
"what",
"is",
"None",
":",
"whats",
"=",
"list",
"(",
"self",
".",
"plugins",
".",
"keys",
"(",
")",
")",
"elif",
"what",
"is",
"not",
"None",
":",
"if",
"what",
"not",
"in",
"self",
".",
"plugins",
":",
"raise",
"Exception",
"(",
"\"Unknown class of plugins\"",
")",
"whats",
"=",
"[",
"what",
"]",
"for",
"what",
"in",
"whats",
":",
"if",
"what",
"not",
"in",
"filtered",
":",
"filtered",
"[",
"what",
"]",
"=",
"[",
"]",
"for",
"key",
"in",
"self",
".",
"plugins",
"[",
"what",
"]",
".",
"keys",
"(",
")",
":",
"(",
"k_name",
",",
"k_version",
")",
"=",
"key",
"if",
"name",
"is",
"not",
"None",
"and",
"k_name",
"!=",
"name",
":",
"continue",
"if",
"version",
"is",
"not",
"None",
"and",
"k_version",
"!=",
"version",
":",
"continue",
"if",
"self",
".",
"plugins",
"[",
"what",
"]",
"[",
"key",
"]",
".",
"enable",
"==",
"'n'",
":",
"continue",
"filtered",
"[",
"what",
"]",
".",
"append",
"(",
"key",
")",
"# print(filtered)",
"return",
"filtered"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
PluginManager.gather_configs
|
Gather configuration requirements of all plugins
|
dgitcore/plugins/common.py
|
def gather_configs(self):
"""
Gather configuration requirements of all plugins
"""
configs = []
for what in self.order:
for key in self.plugins[what]:
mgr = self.plugins[what][key]
c = mgr.config(what='get')
if c is not None:
c.update({
'description': mgr.description
})
# print("Gathering configuration from ", c)
configs.append(c)
return configs
|
def gather_configs(self):
"""
Gather configuration requirements of all plugins
"""
configs = []
for what in self.order:
for key in self.plugins[what]:
mgr = self.plugins[what][key]
c = mgr.config(what='get')
if c is not None:
c.update({
'description': mgr.description
})
# print("Gathering configuration from ", c)
configs.append(c)
return configs
|
[
"Gather",
"configuration",
"requirements",
"of",
"all",
"plugins"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L152-L167
|
[
"def",
"gather_configs",
"(",
"self",
")",
":",
"configs",
"=",
"[",
"]",
"for",
"what",
"in",
"self",
".",
"order",
":",
"for",
"key",
"in",
"self",
".",
"plugins",
"[",
"what",
"]",
":",
"mgr",
"=",
"self",
".",
"plugins",
"[",
"what",
"]",
"[",
"key",
"]",
"c",
"=",
"mgr",
".",
"config",
"(",
"what",
"=",
"'get'",
")",
"if",
"c",
"is",
"not",
"None",
":",
"c",
".",
"update",
"(",
"{",
"'description'",
":",
"mgr",
".",
"description",
"}",
")",
"# print(\"Gathering configuration from \", c)",
"configs",
".",
"append",
"(",
"c",
")",
"return",
"configs"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
PluginManager.update_configs
|
Gather configuration requirements of all plugins
|
dgitcore/plugins/common.py
|
def update_configs(self, config):
"""
Gather configuration requirements of all plugins
"""
for what in self.plugins: # backend, repo etc.
for key in self.plugins[what]: # s3, filesystem etc.
# print("Updating configuration of", what, key)
self.plugins[what][key].config(what='set', params=config)
return
|
def update_configs(self, config):
"""
Gather configuration requirements of all plugins
"""
for what in self.plugins: # backend, repo etc.
for key in self.plugins[what]: # s3, filesystem etc.
# print("Updating configuration of", what, key)
self.plugins[what][key].config(what='set', params=config)
return
|
[
"Gather",
"configuration",
"requirements",
"of",
"all",
"plugins"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/plugins/common.py#L169-L177
|
[
"def",
"update_configs",
"(",
"self",
",",
"config",
")",
":",
"for",
"what",
"in",
"self",
".",
"plugins",
":",
"# backend, repo etc.",
"for",
"key",
"in",
"self",
".",
"plugins",
"[",
"what",
"]",
":",
"# s3, filesystem etc.",
"# print(\"Updating configuration of\", what, key)",
"self",
".",
"plugins",
"[",
"what",
"]",
"[",
"key",
"]",
".",
"config",
"(",
"what",
"=",
"'set'",
",",
"params",
"=",
"config",
")",
"return"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
BootLoaderThread.run
|
Receives the serial data into the self._raw buffer
:return:
|
booty/comm_thread.py
|
def run(self):
"""
Receives the serial data into the self._raw buffer
:return:
"""
run_once = True
while (run_once or self._threaded) and self.end is False:
self.service_tx_queue()
self.parse_messages()
run_once = False
if self._threaded:
time.sleep(self._timeout)
if self._threaded:
logger.info('bootloader thread complete')
|
def run(self):
"""
Receives the serial data into the self._raw buffer
:return:
"""
run_once = True
while (run_once or self._threaded) and self.end is False:
self.service_tx_queue()
self.parse_messages()
run_once = False
if self._threaded:
time.sleep(self._timeout)
if self._threaded:
logger.info('bootloader thread complete')
|
[
"Receives",
"the",
"serial",
"data",
"into",
"the",
"self",
".",
"_raw",
"buffer",
":",
"return",
":"
] |
slightlynybbled/booty
|
python
|
https://github.com/slightlynybbled/booty/blob/17f13f0bc28ad855a3fab895478c85c57f356a38/booty/comm_thread.py#L330-L346
|
[
"def",
"run",
"(",
"self",
")",
":",
"run_once",
"=",
"True",
"while",
"(",
"run_once",
"or",
"self",
".",
"_threaded",
")",
"and",
"self",
".",
"end",
"is",
"False",
":",
"self",
".",
"service_tx_queue",
"(",
")",
"self",
".",
"parse_messages",
"(",
")",
"run_once",
"=",
"False",
"if",
"self",
".",
"_threaded",
":",
"time",
".",
"sleep",
"(",
"self",
".",
"_timeout",
")",
"if",
"self",
".",
"_threaded",
":",
"logger",
".",
"info",
"(",
"'bootloader thread complete'",
")"
] |
17f13f0bc28ad855a3fab895478c85c57f356a38
|
valid
|
instantiate
|
Instantiate the validation specification
|
dgitcore/datasets/validation.py
|
def instantiate(repo, validator_name=None, filename=None, rulesfiles=None):
"""
Instantiate the validation specification
"""
default_validators = repo.options.get('validator', {})
validators = {}
if validator_name is not None:
# Handle the case validator is specified..
if validator_name in default_validators:
validators = {
validator_name : default_validators[validator_name]
}
else:
validators = {
validator_name : {
'files': [],
'rules': {},
'rules-files': []
}
}
else:
validators = default_validators
#=========================================
# Insert the file names
#=========================================
if filename is not None:
matching_files = repo.find_matching_files([filename])
if len(matching_files) == 0:
print("Filename could not be found", filename)
raise Exception("Invalid filename pattern")
for v in validators:
validators[v]['files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'files' not in validators[v]:
validators[v]['files'] = []
elif len(validators[v]['files']) > 0:
matching_files = repo.find_matching_files(validators[v]['files'])
validators[v]['files'] = matching_files
#=========================================
# Insert the rules files..
#=========================================
if rulesfiles is not None:
# Command lines...
matching_files = repo.find_matching_files([rulesfiles])
if len(matching_files) == 0:
print("Could not find matching rules files ({}) for {}".format(rulesfiles,v))
raise Exception("Invalid rules")
for v in validators:
validators[v]['rules-files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'rules-files' not in validators[v]:
validators[v]['rules-files'] = []
else:
rulesfiles = validators[v]['rules-files']
matching_files = repo.find_matching_files(rulesfiles)
validators[v]['rules-files'] = matching_files
return validators
|
def instantiate(repo, validator_name=None, filename=None, rulesfiles=None):
"""
Instantiate the validation specification
"""
default_validators = repo.options.get('validator', {})
validators = {}
if validator_name is not None:
# Handle the case validator is specified..
if validator_name in default_validators:
validators = {
validator_name : default_validators[validator_name]
}
else:
validators = {
validator_name : {
'files': [],
'rules': {},
'rules-files': []
}
}
else:
validators = default_validators
#=========================================
# Insert the file names
#=========================================
if filename is not None:
matching_files = repo.find_matching_files([filename])
if len(matching_files) == 0:
print("Filename could not be found", filename)
raise Exception("Invalid filename pattern")
for v in validators:
validators[v]['files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'files' not in validators[v]:
validators[v]['files'] = []
elif len(validators[v]['files']) > 0:
matching_files = repo.find_matching_files(validators[v]['files'])
validators[v]['files'] = matching_files
#=========================================
# Insert the rules files..
#=========================================
if rulesfiles is not None:
# Command lines...
matching_files = repo.find_matching_files([rulesfiles])
if len(matching_files) == 0:
print("Could not find matching rules files ({}) for {}".format(rulesfiles,v))
raise Exception("Invalid rules")
for v in validators:
validators[v]['rules-files'] = matching_files
else:
# Instantiate the files from the patterns specified
for v in validators:
if 'rules-files' not in validators[v]:
validators[v]['rules-files'] = []
else:
rulesfiles = validators[v]['rules-files']
matching_files = repo.find_matching_files(rulesfiles)
validators[v]['rules-files'] = matching_files
return validators
|
[
"Instantiate",
"the",
"validation",
"specification"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/validation.py#L17-L82
|
[
"def",
"instantiate",
"(",
"repo",
",",
"validator_name",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"rulesfiles",
"=",
"None",
")",
":",
"default_validators",
"=",
"repo",
".",
"options",
".",
"get",
"(",
"'validator'",
",",
"{",
"}",
")",
"validators",
"=",
"{",
"}",
"if",
"validator_name",
"is",
"not",
"None",
":",
"# Handle the case validator is specified..",
"if",
"validator_name",
"in",
"default_validators",
":",
"validators",
"=",
"{",
"validator_name",
":",
"default_validators",
"[",
"validator_name",
"]",
"}",
"else",
":",
"validators",
"=",
"{",
"validator_name",
":",
"{",
"'files'",
":",
"[",
"]",
",",
"'rules'",
":",
"{",
"}",
",",
"'rules-files'",
":",
"[",
"]",
"}",
"}",
"else",
":",
"validators",
"=",
"default_validators",
"#=========================================",
"# Insert the file names",
"#=========================================",
"if",
"filename",
"is",
"not",
"None",
":",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"[",
"filename",
"]",
")",
"if",
"len",
"(",
"matching_files",
")",
"==",
"0",
":",
"print",
"(",
"\"Filename could not be found\"",
",",
"filename",
")",
"raise",
"Exception",
"(",
"\"Invalid filename pattern\"",
")",
"for",
"v",
"in",
"validators",
":",
"validators",
"[",
"v",
"]",
"[",
"'files'",
"]",
"=",
"matching_files",
"else",
":",
"# Instantiate the files from the patterns specified",
"for",
"v",
"in",
"validators",
":",
"if",
"'files'",
"not",
"in",
"validators",
"[",
"v",
"]",
":",
"validators",
"[",
"v",
"]",
"[",
"'files'",
"]",
"=",
"[",
"]",
"elif",
"len",
"(",
"validators",
"[",
"v",
"]",
"[",
"'files'",
"]",
")",
">",
"0",
":",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"validators",
"[",
"v",
"]",
"[",
"'files'",
"]",
")",
"validators",
"[",
"v",
"]",
"[",
"'files'",
"]",
"=",
"matching_files",
"#=========================================",
"# Insert the rules files..",
"#=========================================",
"if",
"rulesfiles",
"is",
"not",
"None",
":",
"# Command lines...",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"[",
"rulesfiles",
"]",
")",
"if",
"len",
"(",
"matching_files",
")",
"==",
"0",
":",
"print",
"(",
"\"Could not find matching rules files ({}) for {}\"",
".",
"format",
"(",
"rulesfiles",
",",
"v",
")",
")",
"raise",
"Exception",
"(",
"\"Invalid rules\"",
")",
"for",
"v",
"in",
"validators",
":",
"validators",
"[",
"v",
"]",
"[",
"'rules-files'",
"]",
"=",
"matching_files",
"else",
":",
"# Instantiate the files from the patterns specified",
"for",
"v",
"in",
"validators",
":",
"if",
"'rules-files'",
"not",
"in",
"validators",
"[",
"v",
"]",
":",
"validators",
"[",
"v",
"]",
"[",
"'rules-files'",
"]",
"=",
"[",
"]",
"else",
":",
"rulesfiles",
"=",
"validators",
"[",
"v",
"]",
"[",
"'rules-files'",
"]",
"matching_files",
"=",
"repo",
".",
"find_matching_files",
"(",
"rulesfiles",
")",
"validators",
"[",
"v",
"]",
"[",
"'rules-files'",
"]",
"=",
"matching_files",
"return",
"validators"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
validate
|
Validate the content of the files for consistency. Validators can
look as deeply as needed into the files. dgit treats them all as
black boxes.
Parameters
----------
repo: Repository object
validator_name: Name of validator, if any. If none, then all validators specified in dgit.json will be included.
filename: Pattern that specifies files that must be processed by the validators selected. If none, then the default specification in dgit.json is used.
rules: Pattern specifying the files that have rules that validators will use
show: Print the validation results on the terminal
Returns
-------
status: A list of dictionaries, each with target file processed, rules file applied, status of the validation and any error message.
|
dgitcore/datasets/validation.py
|
def validate(repo,
validator_name=None,
filename=None,
rulesfiles=None,
args=[]):
"""
Validate the content of the files for consistency. Validators can
look as deeply as needed into the files. dgit treats them all as
black boxes.
Parameters
----------
repo: Repository object
validator_name: Name of validator, if any. If none, then all validators specified in dgit.json will be included.
filename: Pattern that specifies files that must be processed by the validators selected. If none, then the default specification in dgit.json is used.
rules: Pattern specifying the files that have rules that validators will use
show: Print the validation results on the terminal
Returns
-------
status: A list of dictionaries, each with target file processed, rules file applied, status of the validation and any error message.
"""
mgr = plugins_get_mgr()
# Expand the specification. Now we have full file paths
validator_specs = instantiate(repo, validator_name, filename, rulesfiles)
# Run the validators with rules files...
allresults = []
for v in validator_specs:
keys = mgr.search(what='validator',name=v)['validator']
for k in keys:
validator = mgr.get_by_key('validator', k)
result = validator.evaluate(repo,
validator_specs[v],
args)
allresults.extend(result)
return allresults
|
def validate(repo,
validator_name=None,
filename=None,
rulesfiles=None,
args=[]):
"""
Validate the content of the files for consistency. Validators can
look as deeply as needed into the files. dgit treats them all as
black boxes.
Parameters
----------
repo: Repository object
validator_name: Name of validator, if any. If none, then all validators specified in dgit.json will be included.
filename: Pattern that specifies files that must be processed by the validators selected. If none, then the default specification in dgit.json is used.
rules: Pattern specifying the files that have rules that validators will use
show: Print the validation results on the terminal
Returns
-------
status: A list of dictionaries, each with target file processed, rules file applied, status of the validation and any error message.
"""
mgr = plugins_get_mgr()
# Expand the specification. Now we have full file paths
validator_specs = instantiate(repo, validator_name, filename, rulesfiles)
# Run the validators with rules files...
allresults = []
for v in validator_specs:
keys = mgr.search(what='validator',name=v)['validator']
for k in keys:
validator = mgr.get_by_key('validator', k)
result = validator.evaluate(repo,
validator_specs[v],
args)
allresults.extend(result)
return allresults
|
[
"Validate",
"the",
"content",
"of",
"the",
"files",
"for",
"consistency",
".",
"Validators",
"can",
"look",
"as",
"deeply",
"as",
"needed",
"into",
"the",
"files",
".",
"dgit",
"treats",
"them",
"all",
"as",
"black",
"boxes",
"."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/validation.py#L85-L127
|
[
"def",
"validate",
"(",
"repo",
",",
"validator_name",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"rulesfiles",
"=",
"None",
",",
"args",
"=",
"[",
"]",
")",
":",
"mgr",
"=",
"plugins_get_mgr",
"(",
")",
"# Expand the specification. Now we have full file paths",
"validator_specs",
"=",
"instantiate",
"(",
"repo",
",",
"validator_name",
",",
"filename",
",",
"rulesfiles",
")",
"# Run the validators with rules files...",
"allresults",
"=",
"[",
"]",
"for",
"v",
"in",
"validator_specs",
":",
"keys",
"=",
"mgr",
".",
"search",
"(",
"what",
"=",
"'validator'",
",",
"name",
"=",
"v",
")",
"[",
"'validator'",
"]",
"for",
"k",
"in",
"keys",
":",
"validator",
"=",
"mgr",
".",
"get_by_key",
"(",
"'validator'",
",",
"k",
")",
"result",
"=",
"validator",
".",
"evaluate",
"(",
"repo",
",",
"validator_specs",
"[",
"v",
"]",
",",
"args",
")",
"allresults",
".",
"extend",
"(",
"result",
")",
"return",
"allresults"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
LocalBackend.url_is_valid
|
Check if a URL exists
|
dgitcore/contrib/backends/local.py
|
def url_is_valid(self, url):
"""
Check if a URL exists
"""
# Check if the file system path exists...
if url.startswith("file://"):
url = url.replace("file://","")
return os.path.exists(url)
|
def url_is_valid(self, url):
"""
Check if a URL exists
"""
# Check if the file system path exists...
if url.startswith("file://"):
url = url.replace("file://","")
return os.path.exists(url)
|
[
"Check",
"if",
"a",
"URL",
"exists"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/backends/local.py#L25-L33
|
[
"def",
"url_is_valid",
"(",
"self",
",",
"url",
")",
":",
"# Check if the file system path exists...",
"if",
"url",
".",
"startswith",
"(",
"\"file://\"",
")",
":",
"url",
"=",
"url",
".",
"replace",
"(",
"\"file://\"",
",",
"\"\"",
")",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"url",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
BasicMetadata.post
|
Post to the metadata server
Parameters
----------
repo
|
dgitcore/contrib/metadata/default.py
|
def post(self, repo):
"""
Post to the metadata server
Parameters
----------
repo
"""
datapackage = repo.package
url = self.url
token = self.token
headers = {
'Authorization': 'Token {}'.format(token),
'Content-Type': 'application/json'
}
try:
r = requests.post(url,
data = json.dumps(datapackage),
headers=headers)
return r
except Exception as e:
#print(e)
#traceback.print_exc()
raise NetworkError()
return ""
|
def post(self, repo):
"""
Post to the metadata server
Parameters
----------
repo
"""
datapackage = repo.package
url = self.url
token = self.token
headers = {
'Authorization': 'Token {}'.format(token),
'Content-Type': 'application/json'
}
try:
r = requests.post(url,
data = json.dumps(datapackage),
headers=headers)
return r
except Exception as e:
#print(e)
#traceback.print_exc()
raise NetworkError()
return ""
|
[
"Post",
"to",
"the",
"metadata",
"server"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/contrib/metadata/default.py#L75-L104
|
[
"def",
"post",
"(",
"self",
",",
"repo",
")",
":",
"datapackage",
"=",
"repo",
".",
"package",
"url",
"=",
"self",
".",
"url",
"token",
"=",
"self",
".",
"token",
"headers",
"=",
"{",
"'Authorization'",
":",
"'Token {}'",
".",
"format",
"(",
"token",
")",
",",
"'Content-Type'",
":",
"'application/json'",
"}",
"try",
":",
"r",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"datapackage",
")",
",",
"headers",
"=",
"headers",
")",
"return",
"r",
"except",
"Exception",
"as",
"e",
":",
"#print(e)",
"#traceback.print_exc()",
"raise",
"NetworkError",
"(",
")",
"return",
"\"\""
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
get_module_class
|
imports and returns module class from ``path.to.module.Class``
argument
|
pyems/utils.py
|
def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
try:
mod = import_module(mod_name)
except ImportError as ex:
raise EvoStreamException('Error importing module %s: '
'"%s"' % (mod_name, ex))
return getattr(mod, cls_name)
|
def get_module_class(class_path):
"""
imports and returns module class from ``path.to.module.Class``
argument
"""
mod_name, cls_name = class_path.rsplit('.', 1)
try:
mod = import_module(mod_name)
except ImportError as ex:
raise EvoStreamException('Error importing module %s: '
'"%s"' % (mod_name, ex))
return getattr(mod, cls_name)
|
[
"imports",
"and",
"returns",
"module",
"class",
"from",
"path",
".",
"to",
".",
"module",
".",
"Class",
"argument"
] |
tomi77/pyems
|
python
|
https://github.com/tomi77/pyems/blob/8c0748b720d389f19d5226fdcceedc26cd6284ee/pyems/utils.py#L12-L25
|
[
"def",
"get_module_class",
"(",
"class_path",
")",
":",
"mod_name",
",",
"cls_name",
"=",
"class_path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"try",
":",
"mod",
"=",
"import_module",
"(",
"mod_name",
")",
"except",
"ImportError",
"as",
"ex",
":",
"raise",
"EvoStreamException",
"(",
"'Error importing module %s: '",
"'\"%s\"'",
"%",
"(",
"mod_name",
",",
"ex",
")",
")",
"return",
"getattr",
"(",
"mod",
",",
"cls_name",
")"
] |
8c0748b720d389f19d5226fdcceedc26cd6284ee
|
valid
|
find_executable_files
|
Find max 5 executables that are responsible for this repo.
|
dgitcore/datasets/auto.py
|
def find_executable_files():
"""
Find max 5 executables that are responsible for this repo.
"""
files = glob.glob("*") + glob.glob("*/*") + glob.glob('*/*/*')
files = filter(lambda f: os.path.isfile(f), files)
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
final = []
for filename in files:
if os.path.isfile(filename):
st = os.stat(filename)
mode = st.st_mode
if mode & executable:
final.append(filename)
if len(final) > 5:
break
return final
|
def find_executable_files():
"""
Find max 5 executables that are responsible for this repo.
"""
files = glob.glob("*") + glob.glob("*/*") + glob.glob('*/*/*')
files = filter(lambda f: os.path.isfile(f), files)
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
final = []
for filename in files:
if os.path.isfile(filename):
st = os.stat(filename)
mode = st.st_mode
if mode & executable:
final.append(filename)
if len(final) > 5:
break
return final
|
[
"Find",
"max",
"5",
"executables",
"that",
"are",
"responsible",
"for",
"this",
"repo",
"."
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/auto.py#L18-L34
|
[
"def",
"find_executable_files",
"(",
")",
":",
"files",
"=",
"glob",
".",
"glob",
"(",
"\"*\"",
")",
"+",
"glob",
".",
"glob",
"(",
"\"*/*\"",
")",
"+",
"glob",
".",
"glob",
"(",
"'*/*/*'",
")",
"files",
"=",
"filter",
"(",
"lambda",
"f",
":",
"os",
".",
"path",
".",
"isfile",
"(",
"f",
")",
",",
"files",
")",
"executable",
"=",
"stat",
".",
"S_IEXEC",
"|",
"stat",
".",
"S_IXGRP",
"|",
"stat",
".",
"S_IXOTH",
"final",
"=",
"[",
"]",
"for",
"filename",
"in",
"files",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"st",
"=",
"os",
".",
"stat",
"(",
"filename",
")",
"mode",
"=",
"st",
".",
"st_mode",
"if",
"mode",
"&",
"executable",
":",
"final",
".",
"append",
"(",
"filename",
")",
"if",
"len",
"(",
"final",
")",
">",
"5",
":",
"break",
"return",
"final"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
auto_init
|
Initialize a repo-specific configuration file to execute dgit
Parameters
----------
autofile: Repo-specific configuration file (dgit.json)
force_init: Flag to force to re-initialization of the configuration file
|
dgitcore/datasets/auto.py
|
def auto_init(autofile, force_init=False):
"""
Initialize a repo-specific configuration file to execute dgit
Parameters
----------
autofile: Repo-specific configuration file (dgit.json)
force_init: Flag to force to re-initialization of the configuration file
"""
if os.path.exists(autofile) and not force_init:
try:
autooptions = json.loads(open(autofile).read())
return autooptions
except:
print("Error in dgit.json configuration file")
traceback.print_exc()
raise Exception("Invalid configuration file")
config = get_config()
pluginmgr = plugins_get_mgr()
print("Repo configuration file missing or corrupted. Creating one")
print("Let us know a few details about your data repository")
# Get the username
username = getpass.getuser()
revised = input("Please specify username [{}]".format(username))
if revised not in ["", None]:
username = revised
# Get the reponame
thisdir = os.path.abspath(os.getcwd())
reponame = os.path.basename(thisdir)
revised = input("Please specify repo name [{}]".format(reponame))
if revised not in ["", None]:
reponame = revised
# Get the default backend URL
keys = pluginmgr.search('backend')
keys = keys['backend']
keys = [k for k in keys if k[0] != "local"]
remoteurl = ""
backend = None
if len(keys) > 0:
backend = pluginmgr.get_by_key('backend', keys[0])
candidate = backend.url(username, reponame)
revised = input("Please specify remote URL [{}]".format(candidate))
if revised in ["", None]:
remoteurl = candidate
else:
remoteurl = revised
# Get title...
title = ""
while title == "":
title = input("One line summary of your repo:")
if title == "":
print("The repo requires a one line summary")
else:
break
# Get description
description = ""
while description == "":
description = input("Detailed description:")
if description == "":
print("The repo requires some text as well")
else:
break
autooptions = OrderedDict([
("username", username),
("reponame", reponame),
("remoteurl", remoteurl),
("title", title),
("description", description),
("working-directory", "."),
('track' ,OrderedDict([
('includes', ['*.csv', '*.tsv', '*.txt','*.json', '*.xls', '*.xlsx', "*.sql", "*.hql"]),
('excludes', ['.git', '.svn', os.path.basename(autofile)]),
])),
('auto-push', False),
('pipeline' ,OrderedDict([])),
('import' ,OrderedDict([
('directory-mapping' ,OrderedDict([
('.', '')
]))
])),
('dependencies' ,OrderedDict([]))
])
# Gather options from each of the enabled plugins
for p in ['validator', 'transformer']:
keys = pluginmgr.search(p)
keys = keys[p]
options = OrderedDict()
for k in keys:
if k.name in options:
continue
mgr = pluginmgr.get_by_key(p, k)
options[k.name] = mgr.autooptions()
autooptions[p] = options
keys = pluginmgr.search('metadata')
keys = keys['metadata']
if len(keys) > 0:
# => Select domains that be included.
servers = []
for k in keys:
server = pluginmgr.get_by_key('metadata', k)
server = server.url.split("/")[2]
servers.append(server)
# Specify what should be included. Some of these should go ino
# the metadata modules
autooptions.update(OrderedDict([
('metadata-management', OrderedDict([
('servers', servers),
('include-code-history', find_executable_files()),
('include-preview', OrderedDict([
('length', 512),
('files', ['*.txt', '*.csv', '*.tsv'])
])),
('include-data-history', True),
('include-action-history', True),
('include-validation', True),
('include-dependencies', True),
('include-schema', True),
('include-tab-diffs', True),
('include-platform', True),
]))]))
with open(autofile, 'w') as fd:
fd.write(json.dumps(autooptions, indent=4))
print("")
print("Updated dataset specific config file: {}".format(autofile))
print("Please edit it and rerun dgit auto.")
print("Tip: Consider committing dgit.json to the code repository.")
#if platform.system() == "Linux":
# subprocess.call(["xdg-open", autofile])
sys.exit()
|
def auto_init(autofile, force_init=False):
"""
Initialize a repo-specific configuration file to execute dgit
Parameters
----------
autofile: Repo-specific configuration file (dgit.json)
force_init: Flag to force to re-initialization of the configuration file
"""
if os.path.exists(autofile) and not force_init:
try:
autooptions = json.loads(open(autofile).read())
return autooptions
except:
print("Error in dgit.json configuration file")
traceback.print_exc()
raise Exception("Invalid configuration file")
config = get_config()
pluginmgr = plugins_get_mgr()
print("Repo configuration file missing or corrupted. Creating one")
print("Let us know a few details about your data repository")
# Get the username
username = getpass.getuser()
revised = input("Please specify username [{}]".format(username))
if revised not in ["", None]:
username = revised
# Get the reponame
thisdir = os.path.abspath(os.getcwd())
reponame = os.path.basename(thisdir)
revised = input("Please specify repo name [{}]".format(reponame))
if revised not in ["", None]:
reponame = revised
# Get the default backend URL
keys = pluginmgr.search('backend')
keys = keys['backend']
keys = [k for k in keys if k[0] != "local"]
remoteurl = ""
backend = None
if len(keys) > 0:
backend = pluginmgr.get_by_key('backend', keys[0])
candidate = backend.url(username, reponame)
revised = input("Please specify remote URL [{}]".format(candidate))
if revised in ["", None]:
remoteurl = candidate
else:
remoteurl = revised
# Get title...
title = ""
while title == "":
title = input("One line summary of your repo:")
if title == "":
print("The repo requires a one line summary")
else:
break
# Get description
description = ""
while description == "":
description = input("Detailed description:")
if description == "":
print("The repo requires some text as well")
else:
break
autooptions = OrderedDict([
("username", username),
("reponame", reponame),
("remoteurl", remoteurl),
("title", title),
("description", description),
("working-directory", "."),
('track' ,OrderedDict([
('includes', ['*.csv', '*.tsv', '*.txt','*.json', '*.xls', '*.xlsx', "*.sql", "*.hql"]),
('excludes', ['.git', '.svn', os.path.basename(autofile)]),
])),
('auto-push', False),
('pipeline' ,OrderedDict([])),
('import' ,OrderedDict([
('directory-mapping' ,OrderedDict([
('.', '')
]))
])),
('dependencies' ,OrderedDict([]))
])
# Gather options from each of the enabled plugins
for p in ['validator', 'transformer']:
keys = pluginmgr.search(p)
keys = keys[p]
options = OrderedDict()
for k in keys:
if k.name in options:
continue
mgr = pluginmgr.get_by_key(p, k)
options[k.name] = mgr.autooptions()
autooptions[p] = options
keys = pluginmgr.search('metadata')
keys = keys['metadata']
if len(keys) > 0:
# => Select domains that be included.
servers = []
for k in keys:
server = pluginmgr.get_by_key('metadata', k)
server = server.url.split("/")[2]
servers.append(server)
# Specify what should be included. Some of these should go ino
# the metadata modules
autooptions.update(OrderedDict([
('metadata-management', OrderedDict([
('servers', servers),
('include-code-history', find_executable_files()),
('include-preview', OrderedDict([
('length', 512),
('files', ['*.txt', '*.csv', '*.tsv'])
])),
('include-data-history', True),
('include-action-history', True),
('include-validation', True),
('include-dependencies', True),
('include-schema', True),
('include-tab-diffs', True),
('include-platform', True),
]))]))
with open(autofile, 'w') as fd:
fd.write(json.dumps(autooptions, indent=4))
print("")
print("Updated dataset specific config file: {}".format(autofile))
print("Please edit it and rerun dgit auto.")
print("Tip: Consider committing dgit.json to the code repository.")
#if platform.system() == "Linux":
# subprocess.call(["xdg-open", autofile])
sys.exit()
|
[
"Initialize",
"a",
"repo",
"-",
"specific",
"configuration",
"file",
"to",
"execute",
"dgit"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/auto.py#L36-L184
|
[
"def",
"auto_init",
"(",
"autofile",
",",
"force_init",
"=",
"False",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"autofile",
")",
"and",
"not",
"force_init",
":",
"try",
":",
"autooptions",
"=",
"json",
".",
"loads",
"(",
"open",
"(",
"autofile",
")",
".",
"read",
"(",
")",
")",
"return",
"autooptions",
"except",
":",
"print",
"(",
"\"Error in dgit.json configuration file\"",
")",
"traceback",
".",
"print_exc",
"(",
")",
"raise",
"Exception",
"(",
"\"Invalid configuration file\"",
")",
"config",
"=",
"get_config",
"(",
")",
"pluginmgr",
"=",
"plugins_get_mgr",
"(",
")",
"print",
"(",
"\"Repo configuration file missing or corrupted. Creating one\"",
")",
"print",
"(",
"\"Let us know a few details about your data repository\"",
")",
"# Get the username",
"username",
"=",
"getpass",
".",
"getuser",
"(",
")",
"revised",
"=",
"input",
"(",
"\"Please specify username [{}]\"",
".",
"format",
"(",
"username",
")",
")",
"if",
"revised",
"not",
"in",
"[",
"\"\"",
",",
"None",
"]",
":",
"username",
"=",
"revised",
"# Get the reponame",
"thisdir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
"reponame",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"thisdir",
")",
"revised",
"=",
"input",
"(",
"\"Please specify repo name [{}]\"",
".",
"format",
"(",
"reponame",
")",
")",
"if",
"revised",
"not",
"in",
"[",
"\"\"",
",",
"None",
"]",
":",
"reponame",
"=",
"revised",
"# Get the default backend URL",
"keys",
"=",
"pluginmgr",
".",
"search",
"(",
"'backend'",
")",
"keys",
"=",
"keys",
"[",
"'backend'",
"]",
"keys",
"=",
"[",
"k",
"for",
"k",
"in",
"keys",
"if",
"k",
"[",
"0",
"]",
"!=",
"\"local\"",
"]",
"remoteurl",
"=",
"\"\"",
"backend",
"=",
"None",
"if",
"len",
"(",
"keys",
")",
">",
"0",
":",
"backend",
"=",
"pluginmgr",
".",
"get_by_key",
"(",
"'backend'",
",",
"keys",
"[",
"0",
"]",
")",
"candidate",
"=",
"backend",
".",
"url",
"(",
"username",
",",
"reponame",
")",
"revised",
"=",
"input",
"(",
"\"Please specify remote URL [{}]\"",
".",
"format",
"(",
"candidate",
")",
")",
"if",
"revised",
"in",
"[",
"\"\"",
",",
"None",
"]",
":",
"remoteurl",
"=",
"candidate",
"else",
":",
"remoteurl",
"=",
"revised",
"# Get title...",
"title",
"=",
"\"\"",
"while",
"title",
"==",
"\"\"",
":",
"title",
"=",
"input",
"(",
"\"One line summary of your repo:\"",
")",
"if",
"title",
"==",
"\"\"",
":",
"print",
"(",
"\"The repo requires a one line summary\"",
")",
"else",
":",
"break",
"# Get description",
"description",
"=",
"\"\"",
"while",
"description",
"==",
"\"\"",
":",
"description",
"=",
"input",
"(",
"\"Detailed description:\"",
")",
"if",
"description",
"==",
"\"\"",
":",
"print",
"(",
"\"The repo requires some text as well\"",
")",
"else",
":",
"break",
"autooptions",
"=",
"OrderedDict",
"(",
"[",
"(",
"\"username\"",
",",
"username",
")",
",",
"(",
"\"reponame\"",
",",
"reponame",
")",
",",
"(",
"\"remoteurl\"",
",",
"remoteurl",
")",
",",
"(",
"\"title\"",
",",
"title",
")",
",",
"(",
"\"description\"",
",",
"description",
")",
",",
"(",
"\"working-directory\"",
",",
"\".\"",
")",
",",
"(",
"'track'",
",",
"OrderedDict",
"(",
"[",
"(",
"'includes'",
",",
"[",
"'*.csv'",
",",
"'*.tsv'",
",",
"'*.txt'",
",",
"'*.json'",
",",
"'*.xls'",
",",
"'*.xlsx'",
",",
"\"*.sql\"",
",",
"\"*.hql\"",
"]",
")",
",",
"(",
"'excludes'",
",",
"[",
"'.git'",
",",
"'.svn'",
",",
"os",
".",
"path",
".",
"basename",
"(",
"autofile",
")",
"]",
")",
",",
"]",
")",
")",
",",
"(",
"'auto-push'",
",",
"False",
")",
",",
"(",
"'pipeline'",
",",
"OrderedDict",
"(",
"[",
"]",
")",
")",
",",
"(",
"'import'",
",",
"OrderedDict",
"(",
"[",
"(",
"'directory-mapping'",
",",
"OrderedDict",
"(",
"[",
"(",
"'.'",
",",
"''",
")",
"]",
")",
")",
"]",
")",
")",
",",
"(",
"'dependencies'",
",",
"OrderedDict",
"(",
"[",
"]",
")",
")",
"]",
")",
"# Gather options from each of the enabled plugins",
"for",
"p",
"in",
"[",
"'validator'",
",",
"'transformer'",
"]",
":",
"keys",
"=",
"pluginmgr",
".",
"search",
"(",
"p",
")",
"keys",
"=",
"keys",
"[",
"p",
"]",
"options",
"=",
"OrderedDict",
"(",
")",
"for",
"k",
"in",
"keys",
":",
"if",
"k",
".",
"name",
"in",
"options",
":",
"continue",
"mgr",
"=",
"pluginmgr",
".",
"get_by_key",
"(",
"p",
",",
"k",
")",
"options",
"[",
"k",
".",
"name",
"]",
"=",
"mgr",
".",
"autooptions",
"(",
")",
"autooptions",
"[",
"p",
"]",
"=",
"options",
"keys",
"=",
"pluginmgr",
".",
"search",
"(",
"'metadata'",
")",
"keys",
"=",
"keys",
"[",
"'metadata'",
"]",
"if",
"len",
"(",
"keys",
")",
">",
"0",
":",
"# => Select domains that be included.",
"servers",
"=",
"[",
"]",
"for",
"k",
"in",
"keys",
":",
"server",
"=",
"pluginmgr",
".",
"get_by_key",
"(",
"'metadata'",
",",
"k",
")",
"server",
"=",
"server",
".",
"url",
".",
"split",
"(",
"\"/\"",
")",
"[",
"2",
"]",
"servers",
".",
"append",
"(",
"server",
")",
"# Specify what should be included. Some of these should go ino",
"# the metadata modules",
"autooptions",
".",
"update",
"(",
"OrderedDict",
"(",
"[",
"(",
"'metadata-management'",
",",
"OrderedDict",
"(",
"[",
"(",
"'servers'",
",",
"servers",
")",
",",
"(",
"'include-code-history'",
",",
"find_executable_files",
"(",
")",
")",
",",
"(",
"'include-preview'",
",",
"OrderedDict",
"(",
"[",
"(",
"'length'",
",",
"512",
")",
",",
"(",
"'files'",
",",
"[",
"'*.txt'",
",",
"'*.csv'",
",",
"'*.tsv'",
"]",
")",
"]",
")",
")",
",",
"(",
"'include-data-history'",
",",
"True",
")",
",",
"(",
"'include-action-history'",
",",
"True",
")",
",",
"(",
"'include-validation'",
",",
"True",
")",
",",
"(",
"'include-dependencies'",
",",
"True",
")",
",",
"(",
"'include-schema'",
",",
"True",
")",
",",
"(",
"'include-tab-diffs'",
",",
"True",
")",
",",
"(",
"'include-platform'",
",",
"True",
")",
",",
"]",
")",
")",
"]",
")",
")",
"with",
"open",
"(",
"autofile",
",",
"'w'",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"autooptions",
",",
"indent",
"=",
"4",
")",
")",
"print",
"(",
"\"\"",
")",
"print",
"(",
"\"Updated dataset specific config file: {}\"",
".",
"format",
"(",
"autofile",
")",
")",
"print",
"(",
"\"Please edit it and rerun dgit auto.\"",
")",
"print",
"(",
"\"Tip: Consider committing dgit.json to the code repository.\"",
")",
"#if platform.system() == \"Linux\":",
"# subprocess.call([\"xdg-open\", autofile])",
"sys",
".",
"exit",
"(",
")"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
auto_get_repo
|
Automatically get repo
Parameters
----------
autooptions: dgit.json content
|
dgitcore/datasets/auto.py
|
def auto_get_repo(autooptions, debug=False):
"""
Automatically get repo
Parameters
----------
autooptions: dgit.json content
"""
# plugin manager
pluginmgr = plugins_get_mgr()
# get the repo manager
repomgr = pluginmgr.get(what='repomanager', name='git')
repo = None
try:
if debug:
print("Looking repo")
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
except:
# Clone the repo
try:
print("Checking and cloning if the dataset exists on backend")
url = autooptions['remoteurl']
if debug:
print("Doesnt exist. trying to clone: {}".format(url))
common_clone(url)
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
if debug:
print("Cloning successful")
except:
# traceback.print_exc()
yes = input("Repo doesnt exist. Should I create one? [yN]")
if yes == 'y':
setup = "git"
if autooptions['remoteurl'].startswith('s3://'):
setup = 'git+s3'
repo = common_init(username=autooptions['username'],
reponame=autooptions['reponame'],
setup=setup,
force=True,
options=autooptions)
if debug:
print("Successfully inited repo")
else:
raise Exception("Cannot load repo")
repo.options = autooptions
return repo
|
def auto_get_repo(autooptions, debug=False):
"""
Automatically get repo
Parameters
----------
autooptions: dgit.json content
"""
# plugin manager
pluginmgr = plugins_get_mgr()
# get the repo manager
repomgr = pluginmgr.get(what='repomanager', name='git')
repo = None
try:
if debug:
print("Looking repo")
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
except:
# Clone the repo
try:
print("Checking and cloning if the dataset exists on backend")
url = autooptions['remoteurl']
if debug:
print("Doesnt exist. trying to clone: {}".format(url))
common_clone(url)
repo = repomgr.lookup(username=autooptions['username'],
reponame=autooptions['reponame'])
if debug:
print("Cloning successful")
except:
# traceback.print_exc()
yes = input("Repo doesnt exist. Should I create one? [yN]")
if yes == 'y':
setup = "git"
if autooptions['remoteurl'].startswith('s3://'):
setup = 'git+s3'
repo = common_init(username=autooptions['username'],
reponame=autooptions['reponame'],
setup=setup,
force=True,
options=autooptions)
if debug:
print("Successfully inited repo")
else:
raise Exception("Cannot load repo")
repo.options = autooptions
return repo
|
[
"Automatically",
"get",
"repo"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/auto.py#L187-L243
|
[
"def",
"auto_get_repo",
"(",
"autooptions",
",",
"debug",
"=",
"False",
")",
":",
"# plugin manager",
"pluginmgr",
"=",
"plugins_get_mgr",
"(",
")",
"# get the repo manager",
"repomgr",
"=",
"pluginmgr",
".",
"get",
"(",
"what",
"=",
"'repomanager'",
",",
"name",
"=",
"'git'",
")",
"repo",
"=",
"None",
"try",
":",
"if",
"debug",
":",
"print",
"(",
"\"Looking repo\"",
")",
"repo",
"=",
"repomgr",
".",
"lookup",
"(",
"username",
"=",
"autooptions",
"[",
"'username'",
"]",
",",
"reponame",
"=",
"autooptions",
"[",
"'reponame'",
"]",
")",
"except",
":",
"# Clone the repo",
"try",
":",
"print",
"(",
"\"Checking and cloning if the dataset exists on backend\"",
")",
"url",
"=",
"autooptions",
"[",
"'remoteurl'",
"]",
"if",
"debug",
":",
"print",
"(",
"\"Doesnt exist. trying to clone: {}\"",
".",
"format",
"(",
"url",
")",
")",
"common_clone",
"(",
"url",
")",
"repo",
"=",
"repomgr",
".",
"lookup",
"(",
"username",
"=",
"autooptions",
"[",
"'username'",
"]",
",",
"reponame",
"=",
"autooptions",
"[",
"'reponame'",
"]",
")",
"if",
"debug",
":",
"print",
"(",
"\"Cloning successful\"",
")",
"except",
":",
"# traceback.print_exc()",
"yes",
"=",
"input",
"(",
"\"Repo doesnt exist. Should I create one? [yN]\"",
")",
"if",
"yes",
"==",
"'y'",
":",
"setup",
"=",
"\"git\"",
"if",
"autooptions",
"[",
"'remoteurl'",
"]",
".",
"startswith",
"(",
"'s3://'",
")",
":",
"setup",
"=",
"'git+s3'",
"repo",
"=",
"common_init",
"(",
"username",
"=",
"autooptions",
"[",
"'username'",
"]",
",",
"reponame",
"=",
"autooptions",
"[",
"'reponame'",
"]",
",",
"setup",
"=",
"setup",
",",
"force",
"=",
"True",
",",
"options",
"=",
"autooptions",
")",
"if",
"debug",
":",
"print",
"(",
"\"Successfully inited repo\"",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Cannot load repo\"",
")",
"repo",
".",
"options",
"=",
"autooptions",
"return",
"repo"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
get_files_to_commit
|
Look through the local directory to pick up files to check
|
dgitcore/datasets/auto.py
|
def get_files_to_commit(autooptions):
"""
Look through the local directory to pick up files to check
"""
workingdir = autooptions['working-directory']
includes = autooptions['track']['includes']
excludes = autooptions['track']['excludes']
# transform glob patterns to regular expressions
# print("Includes ", includes)
includes = r'|'.join([fnmatch.translate(x) for x in includes])
excludes = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'
matched_files = []
for root, dirs, files in os.walk(workingdir):
# print("Looking at ", files)
# exclude dirs
# dirs[:] = [os.path.join(root, d) for d in dirs]
dirs[:] = [d for d in dirs if not re.match(excludes, d)]
# exclude/include files
files = [f for f in files if not re.match(excludes, f)]
#print("Files after excludes", files)
#print(includes)
files = [f for f in files if re.match(includes, f)]
#print("Files after includes", files)
files = [os.path.join(root, f) for f in files]
matched_files.extend(files)
return matched_files
|
def get_files_to_commit(autooptions):
"""
Look through the local directory to pick up files to check
"""
workingdir = autooptions['working-directory']
includes = autooptions['track']['includes']
excludes = autooptions['track']['excludes']
# transform glob patterns to regular expressions
# print("Includes ", includes)
includes = r'|'.join([fnmatch.translate(x) for x in includes])
excludes = r'|'.join([fnmatch.translate(x) for x in excludes]) or r'$.'
matched_files = []
for root, dirs, files in os.walk(workingdir):
# print("Looking at ", files)
# exclude dirs
# dirs[:] = [os.path.join(root, d) for d in dirs]
dirs[:] = [d for d in dirs if not re.match(excludes, d)]
# exclude/include files
files = [f for f in files if not re.match(excludes, f)]
#print("Files after excludes", files)
#print(includes)
files = [f for f in files if re.match(includes, f)]
#print("Files after includes", files)
files = [os.path.join(root, f) for f in files]
matched_files.extend(files)
return matched_files
|
[
"Look",
"through",
"the",
"local",
"directory",
"to",
"pick",
"up",
"files",
"to",
"check"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/auto.py#L246-L278
|
[
"def",
"get_files_to_commit",
"(",
"autooptions",
")",
":",
"workingdir",
"=",
"autooptions",
"[",
"'working-directory'",
"]",
"includes",
"=",
"autooptions",
"[",
"'track'",
"]",
"[",
"'includes'",
"]",
"excludes",
"=",
"autooptions",
"[",
"'track'",
"]",
"[",
"'excludes'",
"]",
"# transform glob patterns to regular expressions",
"# print(\"Includes \", includes) ",
"includes",
"=",
"r'|'",
".",
"join",
"(",
"[",
"fnmatch",
".",
"translate",
"(",
"x",
")",
"for",
"x",
"in",
"includes",
"]",
")",
"excludes",
"=",
"r'|'",
".",
"join",
"(",
"[",
"fnmatch",
".",
"translate",
"(",
"x",
")",
"for",
"x",
"in",
"excludes",
"]",
")",
"or",
"r'$.'",
"matched_files",
"=",
"[",
"]",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"workingdir",
")",
":",
"# print(\"Looking at \", files)",
"# exclude dirs",
"# dirs[:] = [os.path.join(root, d) for d in dirs]",
"dirs",
"[",
":",
"]",
"=",
"[",
"d",
"for",
"d",
"in",
"dirs",
"if",
"not",
"re",
".",
"match",
"(",
"excludes",
",",
"d",
")",
"]",
"# exclude/include files",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"not",
"re",
".",
"match",
"(",
"excludes",
",",
"f",
")",
"]",
"#print(\"Files after excludes\", files)",
"#print(includes) ",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"re",
".",
"match",
"(",
"includes",
",",
"f",
")",
"]",
"#print(\"Files after includes\", files) ",
"files",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"f",
")",
"for",
"f",
"in",
"files",
"]",
"matched_files",
".",
"extend",
"(",
"files",
")",
"return",
"matched_files"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
valid
|
auto_add
|
Cleanup the paths and add
|
dgitcore/datasets/auto.py
|
def auto_add(repo, autooptions, files):
"""
Cleanup the paths and add
"""
# Get the mappings and keys.
mapping = { ".": "" }
if (('import' in autooptions) and
('directory-mapping' in autooptions['import'])):
mapping = autooptions['import']['directory-mapping']
# Apply the longest prefix first...
keys = mapping.keys()
keys = sorted(keys, key=lambda k: len(k), reverse=True)
count = 0
params = []
for f in files:
# Find the destination
relativepath = f
for k in keys:
v = mapping[k]
if f.startswith(k + "/"):
#print("Replacing ", k)
relativepath = f.replace(k + "/", v)
break
# Now add to repository
count += files_add(repo=repo,
args=[f],
targetdir=os.path.dirname(relativepath))
return count
|
def auto_add(repo, autooptions, files):
"""
Cleanup the paths and add
"""
# Get the mappings and keys.
mapping = { ".": "" }
if (('import' in autooptions) and
('directory-mapping' in autooptions['import'])):
mapping = autooptions['import']['directory-mapping']
# Apply the longest prefix first...
keys = mapping.keys()
keys = sorted(keys, key=lambda k: len(k), reverse=True)
count = 0
params = []
for f in files:
# Find the destination
relativepath = f
for k in keys:
v = mapping[k]
if f.startswith(k + "/"):
#print("Replacing ", k)
relativepath = f.replace(k + "/", v)
break
# Now add to repository
count += files_add(repo=repo,
args=[f],
targetdir=os.path.dirname(relativepath))
return count
|
[
"Cleanup",
"the",
"paths",
"and",
"add"
] |
pingali/dgit
|
python
|
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/auto.py#L280-L312
|
[
"def",
"auto_add",
"(",
"repo",
",",
"autooptions",
",",
"files",
")",
":",
"# Get the mappings and keys.",
"mapping",
"=",
"{",
"\".\"",
":",
"\"\"",
"}",
"if",
"(",
"(",
"'import'",
"in",
"autooptions",
")",
"and",
"(",
"'directory-mapping'",
"in",
"autooptions",
"[",
"'import'",
"]",
")",
")",
":",
"mapping",
"=",
"autooptions",
"[",
"'import'",
"]",
"[",
"'directory-mapping'",
"]",
"# Apply the longest prefix first...",
"keys",
"=",
"mapping",
".",
"keys",
"(",
")",
"keys",
"=",
"sorted",
"(",
"keys",
",",
"key",
"=",
"lambda",
"k",
":",
"len",
"(",
"k",
")",
",",
"reverse",
"=",
"True",
")",
"count",
"=",
"0",
"params",
"=",
"[",
"]",
"for",
"f",
"in",
"files",
":",
"# Find the destination",
"relativepath",
"=",
"f",
"for",
"k",
"in",
"keys",
":",
"v",
"=",
"mapping",
"[",
"k",
"]",
"if",
"f",
".",
"startswith",
"(",
"k",
"+",
"\"/\"",
")",
":",
"#print(\"Replacing \", k)",
"relativepath",
"=",
"f",
".",
"replace",
"(",
"k",
"+",
"\"/\"",
",",
"v",
")",
"break",
"# Now add to repository",
"count",
"+=",
"files_add",
"(",
"repo",
"=",
"repo",
",",
"args",
"=",
"[",
"f",
"]",
",",
"targetdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"relativepath",
")",
")",
"return",
"count"
] |
ecde01f40b98f0719dbcfb54452270ed2f86686d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.