partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
valid
|
is_ordered
|
Checks to see if a CatalogID has been ordered or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
ordered (bool): Whether or not the image has been ordered
|
gbdxtools/images/util/image.py
|
def is_ordered(cat_id):
"""
Checks to see if a CatalogID has been ordered or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
ordered (bool): Whether or not the image has been ordered
"""
url = 'https://rda.geobigdata.io/v1/stripMetadata/{}'.format(cat_id)
auth = Auth()
r = _req_with_retries(auth.gbdx_connection, url)
if r is not None:
return r.status_code == 200
return False
|
def is_ordered(cat_id):
"""
Checks to see if a CatalogID has been ordered or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
ordered (bool): Whether or not the image has been ordered
"""
url = 'https://rda.geobigdata.io/v1/stripMetadata/{}'.format(cat_id)
auth = Auth()
r = _req_with_retries(auth.gbdx_connection, url)
if r is not None:
return r.status_code == 200
return False
|
[
"Checks",
"to",
"see",
"if",
"a",
"CatalogID",
"has",
"been",
"ordered",
"or",
"not",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/util/image.py#L49-L63
|
[
"def",
"is_ordered",
"(",
"cat_id",
")",
":",
"url",
"=",
"'https://rda.geobigdata.io/v1/stripMetadata/{}'",
".",
"format",
"(",
"cat_id",
")",
"auth",
"=",
"Auth",
"(",
")",
"r",
"=",
"_req_with_retries",
"(",
"auth",
".",
"gbdx_connection",
",",
"url",
")",
"if",
"r",
"is",
"not",
"None",
":",
"return",
"r",
".",
"status_code",
"==",
"200",
"return",
"False"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
can_acomp
|
Checks to see if a CatalogID can be atmos. compensated or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
available (bool): Whether or not the image can be acomp'd
|
gbdxtools/images/util/image.py
|
def can_acomp(cat_id):
"""
Checks to see if a CatalogID can be atmos. compensated or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
available (bool): Whether or not the image can be acomp'd
"""
url = 'https://rda.geobigdata.io/v1/stripMetadata/{}/capabilities'.format(cat_id)
auth = Auth()
r = _req_with_retries(auth.gbdx_connection, url)
try:
data = r.json()
return data['acompVersion'] is not None
except:
return False
|
def can_acomp(cat_id):
"""
Checks to see if a CatalogID can be atmos. compensated or not.
Args:
catalogID (str): The catalog ID from the platform catalog.
Returns:
available (bool): Whether or not the image can be acomp'd
"""
url = 'https://rda.geobigdata.io/v1/stripMetadata/{}/capabilities'.format(cat_id)
auth = Auth()
r = _req_with_retries(auth.gbdx_connection, url)
try:
data = r.json()
return data['acompVersion'] is not None
except:
return False
|
[
"Checks",
"to",
"see",
"if",
"a",
"CatalogID",
"can",
"be",
"atmos",
".",
"compensated",
"or",
"not",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/util/image.py#L65-L81
|
[
"def",
"can_acomp",
"(",
"cat_id",
")",
":",
"url",
"=",
"'https://rda.geobigdata.io/v1/stripMetadata/{}/capabilities'",
".",
"format",
"(",
"cat_id",
")",
"auth",
"=",
"Auth",
"(",
")",
"r",
"=",
"_req_with_retries",
"(",
"auth",
".",
"gbdx_connection",
",",
"url",
")",
"try",
":",
"data",
"=",
"r",
".",
"json",
"(",
")",
"return",
"data",
"[",
"'acompVersion'",
"]",
"is",
"not",
"None",
"except",
":",
"return",
"False"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
deprecate_module_attr
|
Return a wrapped object that warns about deprecated accesses
|
gbdxtools/deprecate.py
|
def deprecate_module_attr(mod, deprecated):
"""Return a wrapped object that warns about deprecated accesses"""
deprecated = set(deprecated)
class Wrapper(object):
def __getattr__(self, attr):
if attr in deprecated:
warnings.warn("Property {} is deprecated".format(attr), GBDXDeprecation)
return getattr(mod, attr)
def __setattr__(self, attr, value):
if attr in deprecated:
warnings.warn("Property {} is deprecated".format(attr), GBDXDeprecation)
return setattr(mod, attr, value)
return Wrapper()
|
def deprecate_module_attr(mod, deprecated):
"""Return a wrapped object that warns about deprecated accesses"""
deprecated = set(deprecated)
class Wrapper(object):
def __getattr__(self, attr):
if attr in deprecated:
warnings.warn("Property {} is deprecated".format(attr), GBDXDeprecation)
return getattr(mod, attr)
def __setattr__(self, attr, value):
if attr in deprecated:
warnings.warn("Property {} is deprecated".format(attr), GBDXDeprecation)
return setattr(mod, attr, value)
return Wrapper()
|
[
"Return",
"a",
"wrapped",
"object",
"that",
"warns",
"about",
"deprecated",
"accesses"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/deprecate.py#L13-L27
|
[
"def",
"deprecate_module_attr",
"(",
"mod",
",",
"deprecated",
")",
":",
"deprecated",
"=",
"set",
"(",
"deprecated",
")",
"class",
"Wrapper",
"(",
"object",
")",
":",
"def",
"__getattr__",
"(",
"self",
",",
"attr",
")",
":",
"if",
"attr",
"in",
"deprecated",
":",
"warnings",
".",
"warn",
"(",
"\"Property {} is deprecated\"",
".",
"format",
"(",
"attr",
")",
",",
"GBDXDeprecation",
")",
"return",
"getattr",
"(",
"mod",
",",
"attr",
")",
"def",
"__setattr__",
"(",
"self",
",",
"attr",
",",
"value",
")",
":",
"if",
"attr",
"in",
"deprecated",
":",
"warnings",
".",
"warn",
"(",
"\"Property {} is deprecated\"",
".",
"format",
"(",
"attr",
")",
",",
"GBDXDeprecation",
")",
"return",
"setattr",
"(",
"mod",
",",
"attr",
",",
"value",
")",
"return",
"Wrapper",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
PortList.get_matching_multiplex_port
|
Given a name, figure out if a multiplex port prefixes this name and return it. Otherwise return none.
|
gbdxtools/simpleworkflows.py
|
def get_matching_multiplex_port(self,name):
"""
Given a name, figure out if a multiplex port prefixes this name and return it. Otherwise return none.
"""
# short circuit: if the attribute name already exists return none
# if name in self._portnames: return None
# if not len([p for p in self._portnames if name.startswith(p) and name != p]): return None
matching_multiplex_ports = [self.__getattribute__(p) for p in self._portnames
if name.startswith(p)
and name != p
and hasattr(self, p)
and self.__getattribute__(p).is_multiplex
]
for port in matching_multiplex_ports:
return port
return None
|
def get_matching_multiplex_port(self,name):
"""
Given a name, figure out if a multiplex port prefixes this name and return it. Otherwise return none.
"""
# short circuit: if the attribute name already exists return none
# if name in self._portnames: return None
# if not len([p for p in self._portnames if name.startswith(p) and name != p]): return None
matching_multiplex_ports = [self.__getattribute__(p) for p in self._portnames
if name.startswith(p)
and name != p
and hasattr(self, p)
and self.__getattribute__(p).is_multiplex
]
for port in matching_multiplex_ports:
return port
return None
|
[
"Given",
"a",
"name",
"figure",
"out",
"if",
"a",
"multiplex",
"port",
"prefixes",
"this",
"name",
"and",
"return",
"it",
".",
"Otherwise",
"return",
"none",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L109-L128
|
[
"def",
"get_matching_multiplex_port",
"(",
"self",
",",
"name",
")",
":",
"# short circuit: if the attribute name already exists return none",
"# if name in self._portnames: return None",
"# if not len([p for p in self._portnames if name.startswith(p) and name != p]): return None",
"matching_multiplex_ports",
"=",
"[",
"self",
".",
"__getattribute__",
"(",
"p",
")",
"for",
"p",
"in",
"self",
".",
"_portnames",
"if",
"name",
".",
"startswith",
"(",
"p",
")",
"and",
"name",
"!=",
"p",
"and",
"hasattr",
"(",
"self",
",",
"p",
")",
"and",
"self",
".",
"__getattribute__",
"(",
"p",
")",
".",
"is_multiplex",
"]",
"for",
"port",
"in",
"matching_multiplex_ports",
":",
"return",
"port",
"return",
"None"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Task.set
|
Set input values on task
Args:
arbitrary_keys: values for the keys
Returns:
None
|
gbdxtools/simpleworkflows.py
|
def set(self, **kwargs):
"""
Set input values on task
Args:
arbitrary_keys: values for the keys
Returns:
None
"""
for port_name, port_value in kwargs.items():
# Support both port and port.value
if hasattr(port_value, 'value'):
port_value = port_value.value
self.inputs.__setattr__(port_name, port_value)
|
def set(self, **kwargs):
"""
Set input values on task
Args:
arbitrary_keys: values for the keys
Returns:
None
"""
for port_name, port_value in kwargs.items():
# Support both port and port.value
if hasattr(port_value, 'value'):
port_value = port_value.value
self.inputs.__setattr__(port_name, port_value)
|
[
"Set",
"input",
"values",
"on",
"task"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L273-L288
|
[
"def",
"set",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"port_name",
",",
"port_value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"# Support both port and port.value",
"if",
"hasattr",
"(",
"port_value",
",",
"'value'",
")",
":",
"port_value",
"=",
"port_value",
".",
"value",
"self",
".",
"inputs",
".",
"__setattr__",
"(",
"port_name",
",",
"port_value",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.savedata
|
Save output data from any task in this workflow to S3
Args:
output: Reference task output (e.g. task.outputs.output1).
location (optional): Subfolder under which the output will be saved.
It will be placed under the account directory in gbd-customer-data bucket:
s3://gbd-customer-data/{account_id}/{location}
Leave blank to save to: workflow_output/{workflow_id}/{task_name}/{port_name}
Returns:
None
|
gbdxtools/simpleworkflows.py
|
def savedata(self, output, location=None):
'''
Save output data from any task in this workflow to S3
Args:
output: Reference task output (e.g. task.outputs.output1).
location (optional): Subfolder under which the output will be saved.
It will be placed under the account directory in gbd-customer-data bucket:
s3://gbd-customer-data/{account_id}/{location}
Leave blank to save to: workflow_output/{workflow_id}/{task_name}/{port_name}
Returns:
None
'''
output.persist = True
if location:
output.persist_location = location
|
def savedata(self, output, location=None):
'''
Save output data from any task in this workflow to S3
Args:
output: Reference task output (e.g. task.outputs.output1).
location (optional): Subfolder under which the output will be saved.
It will be placed under the account directory in gbd-customer-data bucket:
s3://gbd-customer-data/{account_id}/{location}
Leave blank to save to: workflow_output/{workflow_id}/{task_name}/{port_name}
Returns:
None
'''
output.persist = True
if location:
output.persist_location = location
|
[
"Save",
"output",
"data",
"from",
"any",
"task",
"in",
"this",
"workflow",
"to",
"S3"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L402-L420
|
[
"def",
"savedata",
"(",
"self",
",",
"output",
",",
"location",
"=",
"None",
")",
":",
"output",
".",
"persist",
"=",
"True",
"if",
"location",
":",
"output",
".",
"persist_location",
"=",
"location"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.list_workflow_outputs
|
Get a list of outputs from the workflow that are saved to S3. To get resolved locations call workflow status.
Args:
None
Returns:
list
|
gbdxtools/simpleworkflows.py
|
def list_workflow_outputs(self):
'''
Get a list of outputs from the workflow that are saved to S3. To get resolved locations call workflow status.
Args:
None
Returns:
list
'''
workflow_outputs = []
for task in self.tasks:
for output_port_name in task.outputs._portnames:
if task.outputs.__getattribute__(output_port_name).persist:
workflow_outputs.append(task.name + ':' + output_port_name)
return workflow_outputs
|
def list_workflow_outputs(self):
'''
Get a list of outputs from the workflow that are saved to S3. To get resolved locations call workflow status.
Args:
None
Returns:
list
'''
workflow_outputs = []
for task in self.tasks:
for output_port_name in task.outputs._portnames:
if task.outputs.__getattribute__(output_port_name).persist:
workflow_outputs.append(task.name + ':' + output_port_name)
return workflow_outputs
|
[
"Get",
"a",
"list",
"of",
"outputs",
"from",
"the",
"workflow",
"that",
"are",
"saved",
"to",
"S3",
".",
"To",
"get",
"resolved",
"locations",
"call",
"workflow",
"status",
".",
"Args",
":",
"None"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L428-L443
|
[
"def",
"list_workflow_outputs",
"(",
"self",
")",
":",
"workflow_outputs",
"=",
"[",
"]",
"for",
"task",
"in",
"self",
".",
"tasks",
":",
"for",
"output_port_name",
"in",
"task",
".",
"outputs",
".",
"_portnames",
":",
"if",
"task",
".",
"outputs",
".",
"__getattribute__",
"(",
"output_port_name",
")",
".",
"persist",
":",
"workflow_outputs",
".",
"append",
"(",
"task",
".",
"name",
"+",
"':'",
"+",
"output_port_name",
")",
"return",
"workflow_outputs"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.generate_workflow_description
|
Generate workflow json for launching the workflow against the gbdx api
Args:
None
Returns:
json string
|
gbdxtools/simpleworkflows.py
|
def generate_workflow_description(self):
'''
Generate workflow json for launching the workflow against the gbdx api
Args:
None
Returns:
json string
'''
if not self.tasks:
raise WorkflowError('Workflow contains no tasks, and cannot be executed.')
self.definition = self.workflow_skeleton()
if self.batch_values:
self.definition["batch_values"] = self.batch_values
all_input_port_values = [t.inputs.__getattribute__(input_port_name).value for t in self.tasks for
input_port_name in t.inputs._portnames]
for task in self.tasks:
# only include multiplex output ports in this task if other tasks refer to them in their inputs.
# 1. find the multplex output port_names in this task
# 2. see if they are referred to in any other tasks inputs
# 3. If not, exclude them from the workflow_def
output_multiplex_ports_to_exclude = []
multiplex_output_port_names = [portname for portname in task.outputs._portnames if
task.outputs.__getattribute__(portname).is_multiplex]
for p in multiplex_output_port_names:
output_port_reference = 'source:' + task.name + ':' + p
if output_port_reference not in all_input_port_values:
output_multiplex_ports_to_exclude.append(p)
task_def = task.generate_task_workflow_json(
output_multiplex_ports_to_exclude=output_multiplex_ports_to_exclude)
self.definition['tasks'].append(task_def)
if self.callback:
self.definition['callback'] = self.callback
return self.definition
|
def generate_workflow_description(self):
'''
Generate workflow json for launching the workflow against the gbdx api
Args:
None
Returns:
json string
'''
if not self.tasks:
raise WorkflowError('Workflow contains no tasks, and cannot be executed.')
self.definition = self.workflow_skeleton()
if self.batch_values:
self.definition["batch_values"] = self.batch_values
all_input_port_values = [t.inputs.__getattribute__(input_port_name).value for t in self.tasks for
input_port_name in t.inputs._portnames]
for task in self.tasks:
# only include multiplex output ports in this task if other tasks refer to them in their inputs.
# 1. find the multplex output port_names in this task
# 2. see if they are referred to in any other tasks inputs
# 3. If not, exclude them from the workflow_def
output_multiplex_ports_to_exclude = []
multiplex_output_port_names = [portname for portname in task.outputs._portnames if
task.outputs.__getattribute__(portname).is_multiplex]
for p in multiplex_output_port_names:
output_port_reference = 'source:' + task.name + ':' + p
if output_port_reference not in all_input_port_values:
output_multiplex_ports_to_exclude.append(p)
task_def = task.generate_task_workflow_json(
output_multiplex_ports_to_exclude=output_multiplex_ports_to_exclude)
self.definition['tasks'].append(task_def)
if self.callback:
self.definition['callback'] = self.callback
return self.definition
|
[
"Generate",
"workflow",
"json",
"for",
"launching",
"the",
"workflow",
"against",
"the",
"gbdx",
"api"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L445-L485
|
[
"def",
"generate_workflow_description",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"tasks",
":",
"raise",
"WorkflowError",
"(",
"'Workflow contains no tasks, and cannot be executed.'",
")",
"self",
".",
"definition",
"=",
"self",
".",
"workflow_skeleton",
"(",
")",
"if",
"self",
".",
"batch_values",
":",
"self",
".",
"definition",
"[",
"\"batch_values\"",
"]",
"=",
"self",
".",
"batch_values",
"all_input_port_values",
"=",
"[",
"t",
".",
"inputs",
".",
"__getattribute__",
"(",
"input_port_name",
")",
".",
"value",
"for",
"t",
"in",
"self",
".",
"tasks",
"for",
"input_port_name",
"in",
"t",
".",
"inputs",
".",
"_portnames",
"]",
"for",
"task",
"in",
"self",
".",
"tasks",
":",
"# only include multiplex output ports in this task if other tasks refer to them in their inputs.",
"# 1. find the multplex output port_names in this task",
"# 2. see if they are referred to in any other tasks inputs",
"# 3. If not, exclude them from the workflow_def",
"output_multiplex_ports_to_exclude",
"=",
"[",
"]",
"multiplex_output_port_names",
"=",
"[",
"portname",
"for",
"portname",
"in",
"task",
".",
"outputs",
".",
"_portnames",
"if",
"task",
".",
"outputs",
".",
"__getattribute__",
"(",
"portname",
")",
".",
"is_multiplex",
"]",
"for",
"p",
"in",
"multiplex_output_port_names",
":",
"output_port_reference",
"=",
"'source:'",
"+",
"task",
".",
"name",
"+",
"':'",
"+",
"p",
"if",
"output_port_reference",
"not",
"in",
"all_input_port_values",
":",
"output_multiplex_ports_to_exclude",
".",
"append",
"(",
"p",
")",
"task_def",
"=",
"task",
".",
"generate_task_workflow_json",
"(",
"output_multiplex_ports_to_exclude",
"=",
"output_multiplex_ports_to_exclude",
")",
"self",
".",
"definition",
"[",
"'tasks'",
"]",
".",
"append",
"(",
"task_def",
")",
"if",
"self",
".",
"callback",
":",
"self",
".",
"definition",
"[",
"'callback'",
"]",
"=",
"self",
".",
"callback",
"return",
"self",
".",
"definition"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.execute
|
Execute the workflow.
Args:
None
Returns:
Workflow_id
|
gbdxtools/simpleworkflows.py
|
def execute(self):
'''
Execute the workflow.
Args:
None
Returns:
Workflow_id
'''
# if not self.tasks:
# raise WorkflowError('Workflow contains no tasks, and cannot be executed.')
# for task in self.tasks:
# self.definition['tasks'].append( task.generate_task_workflow_json() )
self.generate_workflow_description()
# hit batch workflow endpoint if batch values
if self.batch_values:
self.id = self.workflow.launch_batch_workflow(self.definition)
# use regular workflow endpoint if no batch values
else:
self.id = self.workflow.launch(self.definition)
return self.id
|
def execute(self):
'''
Execute the workflow.
Args:
None
Returns:
Workflow_id
'''
# if not self.tasks:
# raise WorkflowError('Workflow contains no tasks, and cannot be executed.')
# for task in self.tasks:
# self.definition['tasks'].append( task.generate_task_workflow_json() )
self.generate_workflow_description()
# hit batch workflow endpoint if batch values
if self.batch_values:
self.id = self.workflow.launch_batch_workflow(self.definition)
# use regular workflow endpoint if no batch values
else:
self.id = self.workflow.launch(self.definition)
return self.id
|
[
"Execute",
"the",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L487-L513
|
[
"def",
"execute",
"(",
"self",
")",
":",
"# if not self.tasks:",
"# raise WorkflowError('Workflow contains no tasks, and cannot be executed.')",
"# for task in self.tasks:",
"# self.definition['tasks'].append( task.generate_task_workflow_json() )",
"self",
".",
"generate_workflow_description",
"(",
")",
"# hit batch workflow endpoint if batch values",
"if",
"self",
".",
"batch_values",
":",
"self",
".",
"id",
"=",
"self",
".",
"workflow",
".",
"launch_batch_workflow",
"(",
"self",
".",
"definition",
")",
"# use regular workflow endpoint if no batch values",
"else",
":",
"self",
".",
"id",
"=",
"self",
".",
"workflow",
".",
"launch",
"(",
"self",
".",
"definition",
")",
"return",
"self",
".",
"id"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.task_ids
|
Get the task IDs of a running workflow
Args:
None
Returns:
List of task IDs
|
gbdxtools/simpleworkflows.py
|
def task_ids(self):
'''
Get the task IDs of a running workflow
Args:
None
Returns:
List of task IDs
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get task IDs.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for task IDs.")
wf = self.workflow.get(self.id)
return [task['id'] for task in wf['tasks']]
|
def task_ids(self):
'''
Get the task IDs of a running workflow
Args:
None
Returns:
List of task IDs
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get task IDs.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for task IDs.")
wf = self.workflow.get(self.id)
return [task['id'] for task in wf['tasks']]
|
[
"Get",
"the",
"task",
"IDs",
"of",
"a",
"running",
"workflow"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L516-L534
|
[
"def",
"task_ids",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"id",
":",
"raise",
"WorkflowError",
"(",
"'Workflow is not running. Cannot get task IDs.'",
")",
"if",
"self",
".",
"batch_values",
":",
"raise",
"NotImplementedError",
"(",
"\"Query Each Workflow Id within the Batch Workflow for task IDs.\"",
")",
"wf",
"=",
"self",
".",
"workflow",
".",
"get",
"(",
"self",
".",
"id",
")",
"return",
"[",
"task",
"[",
"'id'",
"]",
"for",
"task",
"in",
"wf",
"[",
"'tasks'",
"]",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.cancel
|
Cancel a running workflow.
Args:
None
Returns:
None
|
gbdxtools/simpleworkflows.py
|
def cancel(self):
'''
Cancel a running workflow.
Args:
None
Returns:
None
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot cancel.')
if self.batch_values:
self.workflow.batch_workflow_cancel(self.id)
else:
self.workflow.cancel(self.id)
|
def cancel(self):
'''
Cancel a running workflow.
Args:
None
Returns:
None
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot cancel.')
if self.batch_values:
self.workflow.batch_workflow_cancel(self.id)
else:
self.workflow.cancel(self.id)
|
[
"Cancel",
"a",
"running",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L541-L557
|
[
"def",
"cancel",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"id",
":",
"raise",
"WorkflowError",
"(",
"'Workflow is not running. Cannot cancel.'",
")",
"if",
"self",
".",
"batch_values",
":",
"self",
".",
"workflow",
".",
"batch_workflow_cancel",
"(",
"self",
".",
"id",
")",
"else",
":",
"self",
".",
"workflow",
".",
"cancel",
"(",
"self",
".",
"id",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.stdout
|
Get stdout from all the tasks of a workflow.
Returns:
(list): tasks with their stdout
Example:
>>> workflow.stdout
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stdout": "............"
}
]
|
gbdxtools/simpleworkflows.py
|
def stdout(self):
''' Get stdout from all the tasks of a workflow.
Returns:
(list): tasks with their stdout
Example:
>>> workflow.stdout
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stdout": "............"
}
]
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get stdout.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for stdout.")
wf = self.workflow.get(self.id)
stdout_list = []
for task in wf['tasks']:
stdout_list.append(
{
'id': task['id'],
'taskType': task['taskType'],
'name': task['name'],
'stdout': self.workflow.get_stdout(self.id, task['id'])
}
)
return stdout_list
|
def stdout(self):
''' Get stdout from all the tasks of a workflow.
Returns:
(list): tasks with their stdout
Example:
>>> workflow.stdout
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stdout": "............"
}
]
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get stdout.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for stdout.")
wf = self.workflow.get(self.id)
stdout_list = []
for task in wf['tasks']:
stdout_list.append(
{
'id': task['id'],
'taskType': task['taskType'],
'name': task['name'],
'stdout': self.workflow.get_stdout(self.id, task['id'])
}
)
return stdout_list
|
[
"Get",
"stdout",
"from",
"all",
"the",
"tasks",
"of",
"a",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L726-L762
|
[
"def",
"stdout",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"id",
":",
"raise",
"WorkflowError",
"(",
"'Workflow is not running. Cannot get stdout.'",
")",
"if",
"self",
".",
"batch_values",
":",
"raise",
"NotImplementedError",
"(",
"\"Query Each Workflow Id within the Batch Workflow for stdout.\"",
")",
"wf",
"=",
"self",
".",
"workflow",
".",
"get",
"(",
"self",
".",
"id",
")",
"stdout_list",
"=",
"[",
"]",
"for",
"task",
"in",
"wf",
"[",
"'tasks'",
"]",
":",
"stdout_list",
".",
"append",
"(",
"{",
"'id'",
":",
"task",
"[",
"'id'",
"]",
",",
"'taskType'",
":",
"task",
"[",
"'taskType'",
"]",
",",
"'name'",
":",
"task",
"[",
"'name'",
"]",
",",
"'stdout'",
":",
"self",
".",
"workflow",
".",
"get_stdout",
"(",
"self",
".",
"id",
",",
"task",
"[",
"'id'",
"]",
")",
"}",
")",
"return",
"stdout_list"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.stderr
|
Get stderr from all the tasks of a workflow.
Returns:
(list): tasks with their stderr
Example:
>>> workflow.stderr
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stderr": "............"
}
]
|
gbdxtools/simpleworkflows.py
|
def stderr(self):
'''Get stderr from all the tasks of a workflow.
Returns:
(list): tasks with their stderr
Example:
>>> workflow.stderr
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stderr": "............"
}
]
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get stderr.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for stderr.")
wf = self.workflow.get(self.id)
stderr_list = []
for task in wf['tasks']:
stderr_list.append(
{
'id': task['id'],
'taskType': task['taskType'],
'name': task['name'],
'stderr': self.workflow.get_stderr(self.id, task['id'])
}
)
return stderr_list
|
def stderr(self):
'''Get stderr from all the tasks of a workflow.
Returns:
(list): tasks with their stderr
Example:
>>> workflow.stderr
[
{
"id": "4488895771403082552",
"taskType": "AOP_Strip_Processor",
"name": "Task1",
"stderr": "............"
}
]
'''
if not self.id:
raise WorkflowError('Workflow is not running. Cannot get stderr.')
if self.batch_values:
raise NotImplementedError("Query Each Workflow Id within the Batch Workflow for stderr.")
wf = self.workflow.get(self.id)
stderr_list = []
for task in wf['tasks']:
stderr_list.append(
{
'id': task['id'],
'taskType': task['taskType'],
'name': task['name'],
'stderr': self.workflow.get_stderr(self.id, task['id'])
}
)
return stderr_list
|
[
"Get",
"stderr",
"from",
"all",
"the",
"tasks",
"of",
"a",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simpleworkflows.py#L769-L806
|
[
"def",
"stderr",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"id",
":",
"raise",
"WorkflowError",
"(",
"'Workflow is not running. Cannot get stderr.'",
")",
"if",
"self",
".",
"batch_values",
":",
"raise",
"NotImplementedError",
"(",
"\"Query Each Workflow Id within the Batch Workflow for stderr.\"",
")",
"wf",
"=",
"self",
".",
"workflow",
".",
"get",
"(",
"self",
".",
"id",
")",
"stderr_list",
"=",
"[",
"]",
"for",
"task",
"in",
"wf",
"[",
"'tasks'",
"]",
":",
"stderr_list",
".",
"append",
"(",
"{",
"'id'",
":",
"task",
"[",
"'id'",
"]",
",",
"'taskType'",
":",
"task",
"[",
"'taskType'",
"]",
",",
"'name'",
":",
"task",
"[",
"'name'",
"]",
",",
"'stderr'",
":",
"self",
".",
"workflow",
".",
"get_stderr",
"(",
"self",
".",
"id",
",",
"task",
"[",
"'id'",
"]",
")",
"}",
")",
"return",
"stderr_list"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
VectorLayer.layers
|
Renders the list of layers to add to the map.
Returns:
layers (list): list of layer entries suitable for use in mapbox-gl 'map.addLayer()' call
|
gbdxtools/vector_layers.py
|
def layers(self):
""" Renders the list of layers to add to the map.
Returns:
layers (list): list of layer entries suitable for use in mapbox-gl 'map.addLayer()' call
"""
layers = [self._layer_def(style) for style in self.styles]
return layers
|
def layers(self):
""" Renders the list of layers to add to the map.
Returns:
layers (list): list of layer entries suitable for use in mapbox-gl 'map.addLayer()' call
"""
layers = [self._layer_def(style) for style in self.styles]
return layers
|
[
"Renders",
"the",
"list",
"of",
"layers",
"to",
"add",
"to",
"the",
"map",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vector_layers.py#L51-L58
|
[
"def",
"layers",
"(",
"self",
")",
":",
"layers",
"=",
"[",
"self",
".",
"_layer_def",
"(",
"style",
")",
"for",
"style",
"in",
"self",
".",
"styles",
"]",
"return",
"layers"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
get_proj
|
Helper method for handling projection codes that are unknown to pyproj
Args:
prj_code (str): an epsg proj code
Returns:
projection: a pyproj projection
|
gbdxtools/rda/util.py
|
def get_proj(prj_code):
"""
Helper method for handling projection codes that are unknown to pyproj
Args:
prj_code (str): an epsg proj code
Returns:
projection: a pyproj projection
"""
if prj_code in CUSTOM_PRJ:
proj = pyproj.Proj(CUSTOM_PRJ[prj_code])
else:
proj = pyproj.Proj(init=prj_code)
return proj
|
def get_proj(prj_code):
"""
Helper method for handling projection codes that are unknown to pyproj
Args:
prj_code (str): an epsg proj code
Returns:
projection: a pyproj projection
"""
if prj_code in CUSTOM_PRJ:
proj = pyproj.Proj(CUSTOM_PRJ[prj_code])
else:
proj = pyproj.Proj(init=prj_code)
return proj
|
[
"Helper",
"method",
"for",
"handling",
"projection",
"codes",
"that",
"are",
"unknown",
"to",
"pyproj"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/util.py#L52-L66
|
[
"def",
"get_proj",
"(",
"prj_code",
")",
":",
"if",
"prj_code",
"in",
"CUSTOM_PRJ",
":",
"proj",
"=",
"pyproj",
".",
"Proj",
"(",
"CUSTOM_PRJ",
"[",
"prj_code",
"]",
")",
"else",
":",
"proj",
"=",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"prj_code",
")",
"return",
"proj"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
preview
|
Show a slippy map preview of the image. Requires iPython.
Args:
image (image): image object to display
zoom (int): zoom level to intialize the map, default is 16
center (list): center coordinates to initialize the map, defaults to center of image
bands (list): bands of image to display, defaults to the image's default RGB bands
|
gbdxtools/rda/util.py
|
def preview(image, **kwargs):
''' Show a slippy map preview of the image. Requires iPython.
Args:
image (image): image object to display
zoom (int): zoom level to intialize the map, default is 16
center (list): center coordinates to initialize the map, defaults to center of image
bands (list): bands of image to display, defaults to the image's default RGB bands
'''
try:
from IPython.display import Javascript, HTML, display
from gbdxtools.rda.interface import RDA
from gbdxtools import Interface
gbdx = Interface()
except:
print("IPython is required to produce maps.")
return
zoom = kwargs.get("zoom", 16)
bands = kwargs.get("bands")
if bands is None:
bands = image._rgb_bands
wgs84_bounds = kwargs.get("bounds", list(loads(image.metadata["image"]["imageBoundsWGS84"]).bounds))
center = kwargs.get("center", list(shape(image).centroid.bounds[0:2]))
if image.proj != 'EPSG:4326':
code = image.proj.split(':')[1]
conn = gbdx.gbdx_connection
proj_info = conn.get('https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'.format(code)).json()
tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj))
bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds)
else:
proj_info = {}
bounds = wgs84_bounds
# Applying DRA to a DRA'ed image looks bad, skip if already in graph
if not image.options.get('dra'):
rda = RDA()
# Need some simple DRA to get the image in range for display.
dra = rda.HistogramDRA(image)
image = dra.aoi(bbox=image.bounds)
graph_id = image.rda_id
node_id = image.rda.graph()['nodes'][0]['id']
map_id = "map_{}".format(str(int(time.time())))
scales = ','.join(['1'] * len(bands))
offsets = ','.join(['0'] * len(bands))
display(HTML(Template('''
<div id="$map_id"/>
<link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' />
<script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script>
<style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style>
<style></style>
''').substitute({"map_id": map_id})))
js = Template("""
require.config({
paths: {
oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol',
proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4'
}
});
require(['oljs', 'proj4'], function(oljs, proj4) {
oljs.proj.setProj4(proj4)
var md = $md;
var georef = $georef;
var graphId = '$graphId';
var nodeId = '$nodeId';
var extents = $bounds;
var x1 = md.minTileX * md.tileXSize;
var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize);
var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize);
var y2 = md.minTileY * md.tileYSize;
var tileLayerResolutions = [georef.scaleX];
var url = '$url' + '/tile/';
url += graphId + '/' + nodeId;
url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets";
var proj = '$proj';
var projInfo = $projInfo;
if ( proj !== 'EPSG:4326' ) {
var proj4def = projInfo["proj4"];
proj4.defs(proj, proj4def);
var area = projInfo["area_of_use"];
var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"],
area["area_east_bound_lon"], area["area_north_bound_lat"]]
var projection = oljs.proj.get(proj);
var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection);
var extent = oljs.extent.applyTransform(
[bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat);
projection.setExtent(extent);
} else {
var projection = oljs.proj.get(proj);
}
var rda = new oljs.layer.Tile({
title: 'RDA',
opacity: 1,
extent: extents,
source: new oljs.source.TileImage({
crossOrigin: null,
projection: projection,
extent: extents,
tileGrid: new oljs.tilegrid.TileGrid({
extent: extents,
origin: [extents[0], extents[3]],
resolutions: tileLayerResolutions,
tileSize: [md.tileXSize, md.tileYSize],
}),
tileUrlFunction: function (coordinate) {
if (coordinate === null) return undefined;
const x = coordinate[1] + md.minTileX;
const y = -(coordinate[2] + 1 - md.minTileY);
if (x < md.minTileX || x > md.maxTileX) return undefined;
if (y < md.minTileY || y > md.maxTileY) return undefined;
return url.replace('{x}', x).replace('{y}', y);
}
})
});
var map = new oljs.Map({
layers: [ rda ],
target: '$map_id',
view: new oljs.View({
projection: projection,
center: $center,
zoom: $zoom
})
});
});
""").substitute({
"map_id": map_id,
"proj": image.proj,
"projInfo": json.dumps(proj_info),
"graphId": graph_id,
"bounds": bounds,
"bands": ",".join(map(str, bands)),
"nodeId": node_id,
"md": json.dumps(image.metadata["image"]),
"georef": json.dumps(image.metadata["georef"]),
"center": center,
"zoom": zoom,
"token": gbdx.gbdx_connection.access_token,
"scales": scales,
"offsets": offsets,
"url": VIRTUAL_RDA_URL
})
display(Javascript(js))
|
def preview(image, **kwargs):
''' Show a slippy map preview of the image. Requires iPython.
Args:
image (image): image object to display
zoom (int): zoom level to intialize the map, default is 16
center (list): center coordinates to initialize the map, defaults to center of image
bands (list): bands of image to display, defaults to the image's default RGB bands
'''
try:
from IPython.display import Javascript, HTML, display
from gbdxtools.rda.interface import RDA
from gbdxtools import Interface
gbdx = Interface()
except:
print("IPython is required to produce maps.")
return
zoom = kwargs.get("zoom", 16)
bands = kwargs.get("bands")
if bands is None:
bands = image._rgb_bands
wgs84_bounds = kwargs.get("bounds", list(loads(image.metadata["image"]["imageBoundsWGS84"]).bounds))
center = kwargs.get("center", list(shape(image).centroid.bounds[0:2]))
if image.proj != 'EPSG:4326':
code = image.proj.split(':')[1]
conn = gbdx.gbdx_connection
proj_info = conn.get('https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'.format(code)).json()
tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj))
bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds)
else:
proj_info = {}
bounds = wgs84_bounds
# Applying DRA to a DRA'ed image looks bad, skip if already in graph
if not image.options.get('dra'):
rda = RDA()
# Need some simple DRA to get the image in range for display.
dra = rda.HistogramDRA(image)
image = dra.aoi(bbox=image.bounds)
graph_id = image.rda_id
node_id = image.rda.graph()['nodes'][0]['id']
map_id = "map_{}".format(str(int(time.time())))
scales = ','.join(['1'] * len(bands))
offsets = ','.join(['0'] * len(bands))
display(HTML(Template('''
<div id="$map_id"/>
<link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' />
<script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script>
<style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style>
<style></style>
''').substitute({"map_id": map_id})))
js = Template("""
require.config({
paths: {
oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol',
proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4'
}
});
require(['oljs', 'proj4'], function(oljs, proj4) {
oljs.proj.setProj4(proj4)
var md = $md;
var georef = $georef;
var graphId = '$graphId';
var nodeId = '$nodeId';
var extents = $bounds;
var x1 = md.minTileX * md.tileXSize;
var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize);
var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize);
var y2 = md.minTileY * md.tileYSize;
var tileLayerResolutions = [georef.scaleX];
var url = '$url' + '/tile/';
url += graphId + '/' + nodeId;
url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets";
var proj = '$proj';
var projInfo = $projInfo;
if ( proj !== 'EPSG:4326' ) {
var proj4def = projInfo["proj4"];
proj4.defs(proj, proj4def);
var area = projInfo["area_of_use"];
var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"],
area["area_east_bound_lon"], area["area_north_bound_lat"]]
var projection = oljs.proj.get(proj);
var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection);
var extent = oljs.extent.applyTransform(
[bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat);
projection.setExtent(extent);
} else {
var projection = oljs.proj.get(proj);
}
var rda = new oljs.layer.Tile({
title: 'RDA',
opacity: 1,
extent: extents,
source: new oljs.source.TileImage({
crossOrigin: null,
projection: projection,
extent: extents,
tileGrid: new oljs.tilegrid.TileGrid({
extent: extents,
origin: [extents[0], extents[3]],
resolutions: tileLayerResolutions,
tileSize: [md.tileXSize, md.tileYSize],
}),
tileUrlFunction: function (coordinate) {
if (coordinate === null) return undefined;
const x = coordinate[1] + md.minTileX;
const y = -(coordinate[2] + 1 - md.minTileY);
if (x < md.minTileX || x > md.maxTileX) return undefined;
if (y < md.minTileY || y > md.maxTileY) return undefined;
return url.replace('{x}', x).replace('{y}', y);
}
})
});
var map = new oljs.Map({
layers: [ rda ],
target: '$map_id',
view: new oljs.View({
projection: projection,
center: $center,
zoom: $zoom
})
});
});
""").substitute({
"map_id": map_id,
"proj": image.proj,
"projInfo": json.dumps(proj_info),
"graphId": graph_id,
"bounds": bounds,
"bands": ",".join(map(str, bands)),
"nodeId": node_id,
"md": json.dumps(image.metadata["image"]),
"georef": json.dumps(image.metadata["georef"]),
"center": center,
"zoom": zoom,
"token": gbdx.gbdx_connection.access_token,
"scales": scales,
"offsets": offsets,
"url": VIRTUAL_RDA_URL
})
display(Javascript(js))
|
[
"Show",
"a",
"slippy",
"map",
"preview",
"of",
"the",
"image",
".",
"Requires",
"iPython",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/util.py#L69-L221
|
[
"def",
"preview",
"(",
"image",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"from",
"IPython",
".",
"display",
"import",
"Javascript",
",",
"HTML",
",",
"display",
"from",
"gbdxtools",
".",
"rda",
".",
"interface",
"import",
"RDA",
"from",
"gbdxtools",
"import",
"Interface",
"gbdx",
"=",
"Interface",
"(",
")",
"except",
":",
"print",
"(",
"\"IPython is required to produce maps.\"",
")",
"return",
"zoom",
"=",
"kwargs",
".",
"get",
"(",
"\"zoom\"",
",",
"16",
")",
"bands",
"=",
"kwargs",
".",
"get",
"(",
"\"bands\"",
")",
"if",
"bands",
"is",
"None",
":",
"bands",
"=",
"image",
".",
"_rgb_bands",
"wgs84_bounds",
"=",
"kwargs",
".",
"get",
"(",
"\"bounds\"",
",",
"list",
"(",
"loads",
"(",
"image",
".",
"metadata",
"[",
"\"image\"",
"]",
"[",
"\"imageBoundsWGS84\"",
"]",
")",
".",
"bounds",
")",
")",
"center",
"=",
"kwargs",
".",
"get",
"(",
"\"center\"",
",",
"list",
"(",
"shape",
"(",
"image",
")",
".",
"centroid",
".",
"bounds",
"[",
"0",
":",
"2",
"]",
")",
")",
"if",
"image",
".",
"proj",
"!=",
"'EPSG:4326'",
":",
"code",
"=",
"image",
".",
"proj",
".",
"split",
"(",
"':'",
")",
"[",
"1",
"]",
"conn",
"=",
"gbdx",
".",
"gbdx_connection",
"proj_info",
"=",
"conn",
".",
"get",
"(",
"'https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'",
".",
"format",
"(",
"code",
")",
")",
".",
"json",
"(",
")",
"tfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"'EPSG:4326'",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"image",
".",
"proj",
")",
")",
"bounds",
"=",
"list",
"(",
"ops",
".",
"transform",
"(",
"tfm",
",",
"box",
"(",
"*",
"wgs84_bounds",
")",
")",
".",
"bounds",
")",
"else",
":",
"proj_info",
"=",
"{",
"}",
"bounds",
"=",
"wgs84_bounds",
"# Applying DRA to a DRA'ed image looks bad, skip if already in graph",
"if",
"not",
"image",
".",
"options",
".",
"get",
"(",
"'dra'",
")",
":",
"rda",
"=",
"RDA",
"(",
")",
"# Need some simple DRA to get the image in range for display.",
"dra",
"=",
"rda",
".",
"HistogramDRA",
"(",
"image",
")",
"image",
"=",
"dra",
".",
"aoi",
"(",
"bbox",
"=",
"image",
".",
"bounds",
")",
"graph_id",
"=",
"image",
".",
"rda_id",
"node_id",
"=",
"image",
".",
"rda",
".",
"graph",
"(",
")",
"[",
"'nodes'",
"]",
"[",
"0",
"]",
"[",
"'id'",
"]",
"map_id",
"=",
"\"map_{}\"",
".",
"format",
"(",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
")",
"scales",
"=",
"','",
".",
"join",
"(",
"[",
"'1'",
"]",
"*",
"len",
"(",
"bands",
")",
")",
"offsets",
"=",
"','",
".",
"join",
"(",
"[",
"'0'",
"]",
"*",
"len",
"(",
"bands",
")",
")",
"display",
"(",
"HTML",
"(",
"Template",
"(",
"'''\n <div id=\"$map_id\"/>\n <link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' />\n <script src=\"https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL\"></script>\n <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style>\n <style></style>\n '''",
")",
".",
"substitute",
"(",
"{",
"\"map_id\"",
":",
"map_id",
"}",
")",
")",
")",
"js",
"=",
"Template",
"(",
"\"\"\"\n require.config({\n paths: {\n oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol',\n proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4'\n }\n });\n\n require(['oljs', 'proj4'], function(oljs, proj4) {\n oljs.proj.setProj4(proj4)\n var md = $md;\n var georef = $georef;\n var graphId = '$graphId';\n var nodeId = '$nodeId';\n var extents = $bounds;\n\n var x1 = md.minTileX * md.tileXSize;\n var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize);\n var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize);\n var y2 = md.minTileY * md.tileYSize;\n var tileLayerResolutions = [georef.scaleX];\n\n var url = '$url' + '/tile/';\n url += graphId + '/' + nodeId;\n url += \"/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets\";\n\n var proj = '$proj';\n var projInfo = $projInfo;\n\n if ( proj !== 'EPSG:4326' ) {\n var proj4def = projInfo[\"proj4\"];\n proj4.defs(proj, proj4def);\n var area = projInfo[\"area_of_use\"];\n var bbox = [area[\"area_west_bound_lon\"], area[\"area_south_bound_lat\"],\n area[\"area_east_bound_lon\"], area[\"area_north_bound_lat\"]]\n var projection = oljs.proj.get(proj);\n var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection);\n var extent = oljs.extent.applyTransform(\n [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat);\n projection.setExtent(extent);\n } else {\n var projection = oljs.proj.get(proj);\n }\n\n var rda = new oljs.layer.Tile({\n title: 'RDA',\n opacity: 1,\n extent: extents,\n source: new oljs.source.TileImage({\n crossOrigin: null,\n projection: projection,\n extent: extents,\n\n tileGrid: new oljs.tilegrid.TileGrid({\n extent: extents,\n origin: [extents[0], extents[3]],\n resolutions: tileLayerResolutions,\n tileSize: [md.tileXSize, md.tileYSize],\n }),\n tileUrlFunction: function (coordinate) {\n if (coordinate === null) return undefined;\n const x = coordinate[1] + md.minTileX;\n const y = -(coordinate[2] + 1 - md.minTileY);\n if (x < md.minTileX || x > md.maxTileX) return undefined;\n if (y < md.minTileY || y > md.maxTileY) return undefined;\n return url.replace('{x}', x).replace('{y}', y);\n }\n })\n });\n\n var map = new oljs.Map({\n layers: [ rda ],\n target: '$map_id',\n view: new oljs.View({\n projection: projection,\n center: $center,\n zoom: $zoom\n })\n });\n });\n \"\"\"",
")",
".",
"substitute",
"(",
"{",
"\"map_id\"",
":",
"map_id",
",",
"\"proj\"",
":",
"image",
".",
"proj",
",",
"\"projInfo\"",
":",
"json",
".",
"dumps",
"(",
"proj_info",
")",
",",
"\"graphId\"",
":",
"graph_id",
",",
"\"bounds\"",
":",
"bounds",
",",
"\"bands\"",
":",
"\",\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"bands",
")",
")",
",",
"\"nodeId\"",
":",
"node_id",
",",
"\"md\"",
":",
"json",
".",
"dumps",
"(",
"image",
".",
"metadata",
"[",
"\"image\"",
"]",
")",
",",
"\"georef\"",
":",
"json",
".",
"dumps",
"(",
"image",
".",
"metadata",
"[",
"\"georef\"",
"]",
")",
",",
"\"center\"",
":",
"center",
",",
"\"zoom\"",
":",
"zoom",
",",
"\"token\"",
":",
"gbdx",
".",
"gbdx_connection",
".",
"access_token",
",",
"\"scales\"",
":",
"scales",
",",
"\"offsets\"",
":",
"offsets",
",",
"\"url\"",
":",
"VIRTUAL_RDA_URL",
"}",
")",
"display",
"(",
"Javascript",
"(",
"js",
")",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
calc_toa_gain_offset
|
Compute (gain, offset) tuples for each band of the specified image metadata
|
gbdxtools/rda/util.py
|
def calc_toa_gain_offset(meta):
"""
Compute (gain, offset) tuples for each band of the specified image metadata
"""
# Set satellite index to look up cal factors
sat_index = meta['satid'].upper() + "_" + meta['bandid'].upper()
# Set scale for at sensor radiance
# Eq is:
# L = GAIN * DN * (ACF/EBW) + Offset
# ACF abscal factor from meta data
# EBW effectiveBandwidth from meta data
# Gain provided by abscal from const
# Offset provided by abscal from const
acf = np.asarray(meta['abscalfactor']) # Should be nbands length
ebw = np.asarray(meta['effbandwidth']) # Should be nbands length
gain = np.asarray(constants.DG_ABSCAL_GAIN[sat_index])
scale = (acf / ebw) * gain
offset = np.asarray(constants.DG_ABSCAL_OFFSET[sat_index])
e_sun_index = meta['satid'].upper() + "_" + meta['bandid'].upper()
e_sun = np.asarray(constants.DG_ESUN[e_sun_index])
sun = ephem.Sun()
img_obs = ephem.Observer()
img_obs.lon = meta['latlonhae'][1]
img_obs.lat = meta['latlonhae'][0]
img_obs.elevation = meta['latlonhae'][2]
img_obs.date = datetime.datetime.fromtimestamp(meta['img_datetime_obj_utc']['$date'] / 1000.0).strftime(
'%Y-%m-%d %H:%M:%S.%f')
sun.compute(img_obs)
d_es = sun.earth_distance
# Pull sun elevation from the image metadata
# theta_s can be zenith or elevation - the calc below will us either
# a cos or s in respectively
# theta_s = float(self.meta_dg.IMD.IMAGE.MEANSUNEL)
theta_s = 90 - float(meta['mean_sun_el'])
scale2 = (d_es ** 2 * np.pi) / (e_sun * np.cos(np.deg2rad(theta_s)))
# Return scaled data
# Radiance = Scale * Image + offset, Reflectance = Radiance * Scale2
return zip(scale, scale2, offset)
|
def calc_toa_gain_offset(meta):
"""
Compute (gain, offset) tuples for each band of the specified image metadata
"""
# Set satellite index to look up cal factors
sat_index = meta['satid'].upper() + "_" + meta['bandid'].upper()
# Set scale for at sensor radiance
# Eq is:
# L = GAIN * DN * (ACF/EBW) + Offset
# ACF abscal factor from meta data
# EBW effectiveBandwidth from meta data
# Gain provided by abscal from const
# Offset provided by abscal from const
acf = np.asarray(meta['abscalfactor']) # Should be nbands length
ebw = np.asarray(meta['effbandwidth']) # Should be nbands length
gain = np.asarray(constants.DG_ABSCAL_GAIN[sat_index])
scale = (acf / ebw) * gain
offset = np.asarray(constants.DG_ABSCAL_OFFSET[sat_index])
e_sun_index = meta['satid'].upper() + "_" + meta['bandid'].upper()
e_sun = np.asarray(constants.DG_ESUN[e_sun_index])
sun = ephem.Sun()
img_obs = ephem.Observer()
img_obs.lon = meta['latlonhae'][1]
img_obs.lat = meta['latlonhae'][0]
img_obs.elevation = meta['latlonhae'][2]
img_obs.date = datetime.datetime.fromtimestamp(meta['img_datetime_obj_utc']['$date'] / 1000.0).strftime(
'%Y-%m-%d %H:%M:%S.%f')
sun.compute(img_obs)
d_es = sun.earth_distance
# Pull sun elevation from the image metadata
# theta_s can be zenith or elevation - the calc below will us either
# a cos or s in respectively
# theta_s = float(self.meta_dg.IMD.IMAGE.MEANSUNEL)
theta_s = 90 - float(meta['mean_sun_el'])
scale2 = (d_es ** 2 * np.pi) / (e_sun * np.cos(np.deg2rad(theta_s)))
# Return scaled data
# Radiance = Scale * Image + offset, Reflectance = Radiance * Scale2
return zip(scale, scale2, offset)
|
[
"Compute",
"(",
"gain",
"offset",
")",
"tuples",
"for",
"each",
"band",
"of",
"the",
"specified",
"image",
"metadata"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/util.py#L246-L287
|
[
"def",
"calc_toa_gain_offset",
"(",
"meta",
")",
":",
"# Set satellite index to look up cal factors",
"sat_index",
"=",
"meta",
"[",
"'satid'",
"]",
".",
"upper",
"(",
")",
"+",
"\"_\"",
"+",
"meta",
"[",
"'bandid'",
"]",
".",
"upper",
"(",
")",
"# Set scale for at sensor radiance",
"# Eq is:",
"# L = GAIN * DN * (ACF/EBW) + Offset",
"# ACF abscal factor from meta data",
"# EBW effectiveBandwidth from meta data",
"# Gain provided by abscal from const",
"# Offset provided by abscal from const",
"acf",
"=",
"np",
".",
"asarray",
"(",
"meta",
"[",
"'abscalfactor'",
"]",
")",
"# Should be nbands length",
"ebw",
"=",
"np",
".",
"asarray",
"(",
"meta",
"[",
"'effbandwidth'",
"]",
")",
"# Should be nbands length",
"gain",
"=",
"np",
".",
"asarray",
"(",
"constants",
".",
"DG_ABSCAL_GAIN",
"[",
"sat_index",
"]",
")",
"scale",
"=",
"(",
"acf",
"/",
"ebw",
")",
"*",
"gain",
"offset",
"=",
"np",
".",
"asarray",
"(",
"constants",
".",
"DG_ABSCAL_OFFSET",
"[",
"sat_index",
"]",
")",
"e_sun_index",
"=",
"meta",
"[",
"'satid'",
"]",
".",
"upper",
"(",
")",
"+",
"\"_\"",
"+",
"meta",
"[",
"'bandid'",
"]",
".",
"upper",
"(",
")",
"e_sun",
"=",
"np",
".",
"asarray",
"(",
"constants",
".",
"DG_ESUN",
"[",
"e_sun_index",
"]",
")",
"sun",
"=",
"ephem",
".",
"Sun",
"(",
")",
"img_obs",
"=",
"ephem",
".",
"Observer",
"(",
")",
"img_obs",
".",
"lon",
"=",
"meta",
"[",
"'latlonhae'",
"]",
"[",
"1",
"]",
"img_obs",
".",
"lat",
"=",
"meta",
"[",
"'latlonhae'",
"]",
"[",
"0",
"]",
"img_obs",
".",
"elevation",
"=",
"meta",
"[",
"'latlonhae'",
"]",
"[",
"2",
"]",
"img_obs",
".",
"date",
"=",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"meta",
"[",
"'img_datetime_obj_utc'",
"]",
"[",
"'$date'",
"]",
"/",
"1000.0",
")",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S.%f'",
")",
"sun",
".",
"compute",
"(",
"img_obs",
")",
"d_es",
"=",
"sun",
".",
"earth_distance",
"# Pull sun elevation from the image metadata",
"# theta_s can be zenith or elevation - the calc below will us either",
"# a cos or s in respectively",
"# theta_s = float(self.meta_dg.IMD.IMAGE.MEANSUNEL)",
"theta_s",
"=",
"90",
"-",
"float",
"(",
"meta",
"[",
"'mean_sun_el'",
"]",
")",
"scale2",
"=",
"(",
"d_es",
"**",
"2",
"*",
"np",
".",
"pi",
")",
"/",
"(",
"e_sun",
"*",
"np",
".",
"cos",
"(",
"np",
".",
"deg2rad",
"(",
"theta_s",
")",
")",
")",
"# Return scaled data",
"# Radiance = Scale * Image + offset, Reflectance = Radiance * Scale2",
"return",
"zip",
"(",
"scale",
",",
"scale2",
",",
"offset",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
RDAImage.materialize
|
Materializes images into gbdx user buckets in s3.
Note: This method is only available to RDA based image classes.
Args:
node (str): the node in the graph to materialize
bounds (list): optional bbox for cropping what gets materialized in s3
out_format (str): VECTOR_TILE, VECTOR, TIF, TILE_STREAM
callback (str): a callback url like an `sns://`
Returns:
job_id (str): the job_id of the materialization
|
gbdxtools/images/rda_image.py
|
def materialize(self, node=None, bounds=None, callback=None, out_format='TILE_STREAM', **kwargs):
"""
Materializes images into gbdx user buckets in s3.
Note: This method is only available to RDA based image classes.
Args:
node (str): the node in the graph to materialize
bounds (list): optional bbox for cropping what gets materialized in s3
out_format (str): VECTOR_TILE, VECTOR, TIF, TILE_STREAM
callback (str): a callback url like an `sns://`
Returns:
job_id (str): the job_id of the materialization
"""
kwargs.update({
"node": node,
"bounds": bounds,
"callback": callback,
"out_format": out_format
})
return self.rda._materialize(**kwargs)
|
def materialize(self, node=None, bounds=None, callback=None, out_format='TILE_STREAM', **kwargs):
"""
Materializes images into gbdx user buckets in s3.
Note: This method is only available to RDA based image classes.
Args:
node (str): the node in the graph to materialize
bounds (list): optional bbox for cropping what gets materialized in s3
out_format (str): VECTOR_TILE, VECTOR, TIF, TILE_STREAM
callback (str): a callback url like an `sns://`
Returns:
job_id (str): the job_id of the materialization
"""
kwargs.update({
"node": node,
"bounds": bounds,
"callback": callback,
"out_format": out_format
})
return self.rda._materialize(**kwargs)
|
[
"Materializes",
"images",
"into",
"gbdx",
"user",
"buckets",
"in",
"s3",
".",
"Note",
":",
"This",
"method",
"is",
"only",
"available",
"to",
"RDA",
"based",
"image",
"classes",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/rda_image.py#L177-L196
|
[
"def",
"materialize",
"(",
"self",
",",
"node",
"=",
"None",
",",
"bounds",
"=",
"None",
",",
"callback",
"=",
"None",
",",
"out_format",
"=",
"'TILE_STREAM'",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"update",
"(",
"{",
"\"node\"",
":",
"node",
",",
"\"bounds\"",
":",
"bounds",
",",
"\"callback\"",
":",
"callback",
",",
"\"out_format\"",
":",
"out_format",
"}",
")",
"return",
"self",
".",
"rda",
".",
"_materialize",
"(",
"*",
"*",
"kwargs",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TaskRegistry.list
|
Lists available and visible GBDX tasks.
Returns:
List of tasks
|
gbdxtools/task_registry.py
|
def list(self):
"""Lists available and visible GBDX tasks.
Returns:
List of tasks
"""
r = self.gbdx_connection.get(self._base_url)
raise_for_status(r)
return r.json()['tasks']
|
def list(self):
"""Lists available and visible GBDX tasks.
Returns:
List of tasks
"""
r = self.gbdx_connection.get(self._base_url)
raise_for_status(r)
return r.json()['tasks']
|
[
"Lists",
"available",
"and",
"visible",
"GBDX",
"tasks",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task_registry.py#L31-L40
|
[
"def",
"list",
"(",
"self",
")",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"self",
".",
"_base_url",
")",
"raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"json",
"(",
")",
"[",
"'tasks'",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TaskRegistry.register
|
Registers a new GBDX task.
Args:
task_json (dict): Dictionary representing task definition.
json_filename (str): A full path of a file with json representing the task definition.
Only one out of task_json and json_filename should be provided.
Returns:
Response (str).
|
gbdxtools/task_registry.py
|
def register(self, task_json=None, json_filename=None):
"""Registers a new GBDX task.
Args:
task_json (dict): Dictionary representing task definition.
json_filename (str): A full path of a file with json representing the task definition.
Only one out of task_json and json_filename should be provided.
Returns:
Response (str).
"""
if not task_json and not json_filename:
raise Exception("Both task json and filename can't be none.")
if task_json and json_filename:
raise Exception("Both task json and filename can't be provided.")
if json_filename:
task_json = json.load(open(json_filename, 'r'))
r = self.gbdx_connection.post(self._base_url, json=task_json)
raise_for_status(r)
return r.text
|
def register(self, task_json=None, json_filename=None):
"""Registers a new GBDX task.
Args:
task_json (dict): Dictionary representing task definition.
json_filename (str): A full path of a file with json representing the task definition.
Only one out of task_json and json_filename should be provided.
Returns:
Response (str).
"""
if not task_json and not json_filename:
raise Exception("Both task json and filename can't be none.")
if task_json and json_filename:
raise Exception("Both task json and filename can't be provided.")
if json_filename:
task_json = json.load(open(json_filename, 'r'))
r = self.gbdx_connection.post(self._base_url, json=task_json)
raise_for_status(r)
return r.text
|
[
"Registers",
"a",
"new",
"GBDX",
"task",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task_registry.py#L42-L64
|
[
"def",
"register",
"(",
"self",
",",
"task_json",
"=",
"None",
",",
"json_filename",
"=",
"None",
")",
":",
"if",
"not",
"task_json",
"and",
"not",
"json_filename",
":",
"raise",
"Exception",
"(",
"\"Both task json and filename can't be none.\"",
")",
"if",
"task_json",
"and",
"json_filename",
":",
"raise",
"Exception",
"(",
"\"Both task json and filename can't be provided.\"",
")",
"if",
"json_filename",
":",
"task_json",
"=",
"json",
".",
"load",
"(",
"open",
"(",
"json_filename",
",",
"'r'",
")",
")",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"self",
".",
"_base_url",
",",
"json",
"=",
"task_json",
")",
"raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"text"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TaskRegistry.get_definition
|
Gets definition of a registered GBDX task.
Args:
task_name (str): Task name.
Returns:
Dictionary representing the task definition.
|
gbdxtools/task_registry.py
|
def get_definition(self, task_name):
"""Gets definition of a registered GBDX task.
Args:
task_name (str): Task name.
Returns:
Dictionary representing the task definition.
"""
r = self.gbdx_connection.get(self._base_url + '/' + task_name)
raise_for_status(r)
return r.json()
|
def get_definition(self, task_name):
"""Gets definition of a registered GBDX task.
Args:
task_name (str): Task name.
Returns:
Dictionary representing the task definition.
"""
r = self.gbdx_connection.get(self._base_url + '/' + task_name)
raise_for_status(r)
return r.json()
|
[
"Gets",
"definition",
"of",
"a",
"registered",
"GBDX",
"task",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task_registry.py#L66-L78
|
[
"def",
"get_definition",
"(",
"self",
",",
"task_name",
")",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"self",
".",
"_base_url",
"+",
"'/'",
"+",
"task_name",
")",
"raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TaskRegistry.delete
|
Deletes a GBDX task.
Args:
task_name (str): Task name.
Returns:
Response (str).
|
gbdxtools/task_registry.py
|
def delete(self, task_name):
"""Deletes a GBDX task.
Args:
task_name (str): Task name.
Returns:
Response (str).
"""
r = self.gbdx_connection.delete(self._base_url + '/' + task_name)
raise_for_status(r)
return r.text
|
def delete(self, task_name):
"""Deletes a GBDX task.
Args:
task_name (str): Task name.
Returns:
Response (str).
"""
r = self.gbdx_connection.delete(self._base_url + '/' + task_name)
raise_for_status(r)
return r.text
|
[
"Deletes",
"a",
"GBDX",
"task",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task_registry.py#L80-L92
|
[
"def",
"delete",
"(",
"self",
",",
"task_name",
")",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"delete",
"(",
"self",
".",
"_base_url",
"+",
"'/'",
"+",
"task_name",
")",
"raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"text"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TaskRegistry.update
|
Updates a GBDX task.
Args:
task_name (str): Task name.
task_json (dict): Dictionary representing updated task definition.
Returns:
Dictionary representing the updated task definition.
|
gbdxtools/task_registry.py
|
def update(self, task_name, task_json):
"""Updates a GBDX task.
Args:
task_name (str): Task name.
task_json (dict): Dictionary representing updated task definition.
Returns:
Dictionary representing the updated task definition.
"""
r = self.gbdx_connection.put(self._base_url + '/' + task_name, json=task_json)
raise_for_status(r)
return r.json()
|
def update(self, task_name, task_json):
"""Updates a GBDX task.
Args:
task_name (str): Task name.
task_json (dict): Dictionary representing updated task definition.
Returns:
Dictionary representing the updated task definition.
"""
r = self.gbdx_connection.put(self._base_url + '/' + task_name, json=task_json)
raise_for_status(r)
return r.json()
|
[
"Updates",
"a",
"GBDX",
"task",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task_registry.py#L94-L107
|
[
"def",
"update",
"(",
"self",
",",
"task_name",
",",
"task_json",
")",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"put",
"(",
"self",
".",
"_base_url",
"+",
"'/'",
"+",
"task_name",
",",
"json",
"=",
"task_json",
")",
"raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
to_geotiff
|
Write out a geotiff file of the image
Args:
path (str): path to write the geotiff file to, default is ./output.tif
proj (str): EPSG string of projection to reproject to
spec (str): if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): list of bands to export. If spec='rgb' will default to RGB bands
Returns:
str: path the geotiff was written to
|
gbdxtools/rda/io.py
|
def to_geotiff(arr, path='./output.tif', proj=None, spec=None, bands=None, **kwargs):
''' Write out a geotiff file of the image
Args:
path (str): path to write the geotiff file to, default is ./output.tif
proj (str): EPSG string of projection to reproject to
spec (str): if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): list of bands to export. If spec='rgb' will default to RGB bands
Returns:
str: path the geotiff was written to'''
assert has_rasterio, "To create geotiff images please install rasterio"
try:
img_md = arr.rda.metadata["image"]
x_size = img_md["tileXSize"]
y_size = img_md["tileYSize"]
except (AttributeError, KeyError):
x_size = kwargs.get("chunk_size", 256)
y_size = kwargs.get("chunk_size", 256)
try:
tfm = kwargs['transform'] if 'transform' in kwargs else arr.affine
except:
tfm = None
dtype = arr.dtype.name if arr.dtype.name != 'int8' else 'uint8'
if spec is not None and spec.lower() == 'rgb':
if bands is None:
bands = arr._rgb_bands
# skip if already DRA'ed
if not arr.options.get('dra'):
# add the RDA HistogramDRA op to get a RGB 8-bit image
from gbdxtools.rda.interface import RDA
rda = RDA()
dra = rda.HistogramDRA(arr)
# Reset the bounds and select the bands on the new Dask
arr = dra.aoi(bbox=arr.bounds)
arr = arr[bands,...].astype(np.uint8)
dtype = 'uint8'
else:
if bands is not None:
arr = arr[bands,...]
meta = {
'width': arr.shape[2],
'height': arr.shape[1],
'count': arr.shape[0],
'dtype': dtype,
'driver': 'GTiff',
'transform': tfm
}
if proj is not None:
meta["crs"] = {'init': proj}
if "tiled" in kwargs and kwargs["tiled"]:
meta.update(blockxsize=x_size, blockysize=y_size, tiled="yes")
with rasterio.open(path, "w", **meta) as dst:
writer = rio_writer(dst)
result = store(arr, writer, compute=False)
result.compute(scheduler=threaded_get)
return path
|
def to_geotiff(arr, path='./output.tif', proj=None, spec=None, bands=None, **kwargs):
''' Write out a geotiff file of the image
Args:
path (str): path to write the geotiff file to, default is ./output.tif
proj (str): EPSG string of projection to reproject to
spec (str): if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): list of bands to export. If spec='rgb' will default to RGB bands
Returns:
str: path the geotiff was written to'''
assert has_rasterio, "To create geotiff images please install rasterio"
try:
img_md = arr.rda.metadata["image"]
x_size = img_md["tileXSize"]
y_size = img_md["tileYSize"]
except (AttributeError, KeyError):
x_size = kwargs.get("chunk_size", 256)
y_size = kwargs.get("chunk_size", 256)
try:
tfm = kwargs['transform'] if 'transform' in kwargs else arr.affine
except:
tfm = None
dtype = arr.dtype.name if arr.dtype.name != 'int8' else 'uint8'
if spec is not None and spec.lower() == 'rgb':
if bands is None:
bands = arr._rgb_bands
# skip if already DRA'ed
if not arr.options.get('dra'):
# add the RDA HistogramDRA op to get a RGB 8-bit image
from gbdxtools.rda.interface import RDA
rda = RDA()
dra = rda.HistogramDRA(arr)
# Reset the bounds and select the bands on the new Dask
arr = dra.aoi(bbox=arr.bounds)
arr = arr[bands,...].astype(np.uint8)
dtype = 'uint8'
else:
if bands is not None:
arr = arr[bands,...]
meta = {
'width': arr.shape[2],
'height': arr.shape[1],
'count': arr.shape[0],
'dtype': dtype,
'driver': 'GTiff',
'transform': tfm
}
if proj is not None:
meta["crs"] = {'init': proj}
if "tiled" in kwargs and kwargs["tiled"]:
meta.update(blockxsize=x_size, blockysize=y_size, tiled="yes")
with rasterio.open(path, "w", **meta) as dst:
writer = rio_writer(dst)
result = store(arr, writer, compute=False)
result.compute(scheduler=threaded_get)
return path
|
[
"Write",
"out",
"a",
"geotiff",
"file",
"of",
"the",
"image"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/io.py#L27-L91
|
[
"def",
"to_geotiff",
"(",
"arr",
",",
"path",
"=",
"'./output.tif'",
",",
"proj",
"=",
"None",
",",
"spec",
"=",
"None",
",",
"bands",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"has_rasterio",
",",
"\"To create geotiff images please install rasterio\"",
"try",
":",
"img_md",
"=",
"arr",
".",
"rda",
".",
"metadata",
"[",
"\"image\"",
"]",
"x_size",
"=",
"img_md",
"[",
"\"tileXSize\"",
"]",
"y_size",
"=",
"img_md",
"[",
"\"tileYSize\"",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"x_size",
"=",
"kwargs",
".",
"get",
"(",
"\"chunk_size\"",
",",
"256",
")",
"y_size",
"=",
"kwargs",
".",
"get",
"(",
"\"chunk_size\"",
",",
"256",
")",
"try",
":",
"tfm",
"=",
"kwargs",
"[",
"'transform'",
"]",
"if",
"'transform'",
"in",
"kwargs",
"else",
"arr",
".",
"affine",
"except",
":",
"tfm",
"=",
"None",
"dtype",
"=",
"arr",
".",
"dtype",
".",
"name",
"if",
"arr",
".",
"dtype",
".",
"name",
"!=",
"'int8'",
"else",
"'uint8'",
"if",
"spec",
"is",
"not",
"None",
"and",
"spec",
".",
"lower",
"(",
")",
"==",
"'rgb'",
":",
"if",
"bands",
"is",
"None",
":",
"bands",
"=",
"arr",
".",
"_rgb_bands",
"# skip if already DRA'ed",
"if",
"not",
"arr",
".",
"options",
".",
"get",
"(",
"'dra'",
")",
":",
"# add the RDA HistogramDRA op to get a RGB 8-bit image",
"from",
"gbdxtools",
".",
"rda",
".",
"interface",
"import",
"RDA",
"rda",
"=",
"RDA",
"(",
")",
"dra",
"=",
"rda",
".",
"HistogramDRA",
"(",
"arr",
")",
"# Reset the bounds and select the bands on the new Dask",
"arr",
"=",
"dra",
".",
"aoi",
"(",
"bbox",
"=",
"arr",
".",
"bounds",
")",
"arr",
"=",
"arr",
"[",
"bands",
",",
"...",
"]",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
"dtype",
"=",
"'uint8'",
"else",
":",
"if",
"bands",
"is",
"not",
"None",
":",
"arr",
"=",
"arr",
"[",
"bands",
",",
"...",
"]",
"meta",
"=",
"{",
"'width'",
":",
"arr",
".",
"shape",
"[",
"2",
"]",
",",
"'height'",
":",
"arr",
".",
"shape",
"[",
"1",
"]",
",",
"'count'",
":",
"arr",
".",
"shape",
"[",
"0",
"]",
",",
"'dtype'",
":",
"dtype",
",",
"'driver'",
":",
"'GTiff'",
",",
"'transform'",
":",
"tfm",
"}",
"if",
"proj",
"is",
"not",
"None",
":",
"meta",
"[",
"\"crs\"",
"]",
"=",
"{",
"'init'",
":",
"proj",
"}",
"if",
"\"tiled\"",
"in",
"kwargs",
"and",
"kwargs",
"[",
"\"tiled\"",
"]",
":",
"meta",
".",
"update",
"(",
"blockxsize",
"=",
"x_size",
",",
"blockysize",
"=",
"y_size",
",",
"tiled",
"=",
"\"yes\"",
")",
"with",
"rasterio",
".",
"open",
"(",
"path",
",",
"\"w\"",
",",
"*",
"*",
"meta",
")",
"as",
"dst",
":",
"writer",
"=",
"rio_writer",
"(",
"dst",
")",
"result",
"=",
"store",
"(",
"arr",
",",
"writer",
",",
"compute",
"=",
"False",
")",
"result",
".",
"compute",
"(",
"scheduler",
"=",
"threaded_get",
")",
"return",
"path"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Recipe.ingest_vectors
|
append two required tasks to the given output to ingest to VS
|
gbdxtools/simple_answerfactory.py
|
def ingest_vectors(self, output_port_value):
''' append two required tasks to the given output to ingest to VS
'''
# append two tasks to self['definition']['tasks']
ingest_task = Task('IngestItemJsonToVectorServices')
ingest_task.inputs.items = output_port_value
ingest_task.impersonation_allowed = True
stage_task = Task('StageDataToS3')
stage_task.inputs.destination = 's3://{vector_ingest_bucket}/{recipe_id}/{run_id}/{task_name}'
stage_task.inputs.data = ingest_task.outputs.result.value
self.definition['tasks'].append(ingest_task.generate_task_workflow_json())
self.definition['tasks'].append(stage_task.generate_task_workflow_json())
|
def ingest_vectors(self, output_port_value):
''' append two required tasks to the given output to ingest to VS
'''
# append two tasks to self['definition']['tasks']
ingest_task = Task('IngestItemJsonToVectorServices')
ingest_task.inputs.items = output_port_value
ingest_task.impersonation_allowed = True
stage_task = Task('StageDataToS3')
stage_task.inputs.destination = 's3://{vector_ingest_bucket}/{recipe_id}/{run_id}/{task_name}'
stage_task.inputs.data = ingest_task.outputs.result.value
self.definition['tasks'].append(ingest_task.generate_task_workflow_json())
self.definition['tasks'].append(stage_task.generate_task_workflow_json())
|
[
"append",
"two",
"required",
"tasks",
"to",
"the",
"given",
"output",
"to",
"ingest",
"to",
"VS"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/simple_answerfactory.py#L550-L563
|
[
"def",
"ingest_vectors",
"(",
"self",
",",
"output_port_value",
")",
":",
"# append two tasks to self['definition']['tasks']",
"ingest_task",
"=",
"Task",
"(",
"'IngestItemJsonToVectorServices'",
")",
"ingest_task",
".",
"inputs",
".",
"items",
"=",
"output_port_value",
"ingest_task",
".",
"impersonation_allowed",
"=",
"True",
"stage_task",
"=",
"Task",
"(",
"'StageDataToS3'",
")",
"stage_task",
".",
"inputs",
".",
"destination",
"=",
"'s3://{vector_ingest_bucket}/{recipe_id}/{run_id}/{task_name}'",
"stage_task",
".",
"inputs",
".",
"data",
"=",
"ingest_task",
".",
"outputs",
".",
"result",
".",
"value",
"self",
".",
"definition",
"[",
"'tasks'",
"]",
".",
"append",
"(",
"ingest_task",
".",
"generate_task_workflow_json",
"(",
")",
")",
"self",
".",
"definition",
"[",
"'tasks'",
"]",
".",
"append",
"(",
"stage_task",
".",
"generate_task_workflow_json",
"(",
")",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Recipe.get
|
Retrieves an AnswerFactory Recipe by id
Args:
recipe_id The id of the recipe
Returns:
A JSON representation of the recipe
|
gbdxtools/answerfactory.py
|
def get(self, recipe_id):
'''
Retrieves an AnswerFactory Recipe by id
Args:
recipe_id The id of the recipe
Returns:
A JSON representation of the recipe
'''
self.logger.debug('Retrieving recipe by id: ' + recipe_id)
url = '%(base_url)s/recipe/%(recipe_id)s' % {
'base_url': self.base_url, 'recipe_id': recipe_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()
|
def get(self, recipe_id):
'''
Retrieves an AnswerFactory Recipe by id
Args:
recipe_id The id of the recipe
Returns:
A JSON representation of the recipe
'''
self.logger.debug('Retrieving recipe by id: ' + recipe_id)
url = '%(base_url)s/recipe/%(recipe_id)s' % {
'base_url': self.base_url, 'recipe_id': recipe_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()
|
[
"Retrieves",
"an",
"AnswerFactory",
"Recipe",
"by",
"id"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/answerfactory.py#L32-L48
|
[
"def",
"get",
"(",
"self",
",",
"recipe_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Retrieving recipe by id: '",
"+",
"recipe_id",
")",
"url",
"=",
"'%(base_url)s/recipe/%(recipe_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'recipe_id'",
":",
"recipe_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Recipe.save
|
Saves an AnswerFactory Recipe
Args:
recipe (dict): Dictionary specifying a recipe
Returns:
AnswerFactory Recipe id
|
gbdxtools/answerfactory.py
|
def save(self, recipe):
'''
Saves an AnswerFactory Recipe
Args:
recipe (dict): Dictionary specifying a recipe
Returns:
AnswerFactory Recipe id
'''
# test if this is a create vs. an update
if 'id' in recipe and recipe['id'] is not None:
# update -> use put op
self.logger.debug("Updating existing recipe: " + json.dumps(recipe))
url = '%(base_url)s/recipe/json/%(recipe_id)s' % {
'base_url': self.base_url, 'recipe_id': recipe['id']
}
r = self.gbdx_connection.put(url, json=recipe)
try:
r.raise_for_status()
except:
print(r.text)
raise
return recipe['id']
else:
# create -> use post op
self.logger.debug("Creating new recipe: " + json.dumps(recipe))
url = '%(base_url)s/recipe/json' % {
'base_url': self.base_url
}
r = self.gbdx_connection.post(url, json=recipe)
try:
r.raise_for_status()
except:
print(r.text)
raise
recipe_json = r.json()
return recipe_json['id']
|
def save(self, recipe):
'''
Saves an AnswerFactory Recipe
Args:
recipe (dict): Dictionary specifying a recipe
Returns:
AnswerFactory Recipe id
'''
# test if this is a create vs. an update
if 'id' in recipe and recipe['id'] is not None:
# update -> use put op
self.logger.debug("Updating existing recipe: " + json.dumps(recipe))
url = '%(base_url)s/recipe/json/%(recipe_id)s' % {
'base_url': self.base_url, 'recipe_id': recipe['id']
}
r = self.gbdx_connection.put(url, json=recipe)
try:
r.raise_for_status()
except:
print(r.text)
raise
return recipe['id']
else:
# create -> use post op
self.logger.debug("Creating new recipe: " + json.dumps(recipe))
url = '%(base_url)s/recipe/json' % {
'base_url': self.base_url
}
r = self.gbdx_connection.post(url, json=recipe)
try:
r.raise_for_status()
except:
print(r.text)
raise
recipe_json = r.json()
return recipe_json['id']
|
[
"Saves",
"an",
"AnswerFactory",
"Recipe"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/answerfactory.py#L68-L105
|
[
"def",
"save",
"(",
"self",
",",
"recipe",
")",
":",
"# test if this is a create vs. an update",
"if",
"'id'",
"in",
"recipe",
"and",
"recipe",
"[",
"'id'",
"]",
"is",
"not",
"None",
":",
"# update -> use put op",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Updating existing recipe: \"",
"+",
"json",
".",
"dumps",
"(",
"recipe",
")",
")",
"url",
"=",
"'%(base_url)s/recipe/json/%(recipe_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'recipe_id'",
":",
"recipe",
"[",
"'id'",
"]",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"put",
"(",
"url",
",",
"json",
"=",
"recipe",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"r",
".",
"text",
")",
"raise",
"return",
"recipe",
"[",
"'id'",
"]",
"else",
":",
"# create -> use post op",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Creating new recipe: \"",
"+",
"json",
".",
"dumps",
"(",
"recipe",
")",
")",
"url",
"=",
"'%(base_url)s/recipe/json'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"json",
"=",
"recipe",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"r",
".",
"text",
")",
"raise",
"recipe_json",
"=",
"r",
".",
"json",
"(",
")",
"return",
"recipe_json",
"[",
"'id'",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Project.save
|
Saves an AnswerFactory Project
Args:
project (dict): Dictionary specifying an AnswerFactory Project.
Returns:
AnswerFactory Project id
|
gbdxtools/answerfactory.py
|
def save(self, project):
'''
Saves an AnswerFactory Project
Args:
project (dict): Dictionary specifying an AnswerFactory Project.
Returns:
AnswerFactory Project id
'''
# test if this is a create vs. an update
if 'id' in project and project['id'] is not None:
# update -> use put op
self.logger.debug('Updating existing project: ' + json.dumps(project))
url = '%(base_url)s/%(project_id)s' % {
'base_url': self.base_url, 'project_id': project['id']
}
r = self.gbdx_connection.put(url, json=project)
try:
r.raise_for_status()
except:
print(r.text)
raise
# updates only get the Accepted response -> return the original project id
return project['id']
else:
self.logger.debug('Creating new project: ' + json.dumps(project))
# create -> use post op
url = self.base_url
r = self.gbdx_connection.post(url, json=project)
try:
r.raise_for_status()
except:
print(r.text)
raise
project_json = r.json()
# create returns the saved project -> return the project id that's saved
return project_json['id']
|
def save(self, project):
'''
Saves an AnswerFactory Project
Args:
project (dict): Dictionary specifying an AnswerFactory Project.
Returns:
AnswerFactory Project id
'''
# test if this is a create vs. an update
if 'id' in project and project['id'] is not None:
# update -> use put op
self.logger.debug('Updating existing project: ' + json.dumps(project))
url = '%(base_url)s/%(project_id)s' % {
'base_url': self.base_url, 'project_id': project['id']
}
r = self.gbdx_connection.put(url, json=project)
try:
r.raise_for_status()
except:
print(r.text)
raise
# updates only get the Accepted response -> return the original project id
return project['id']
else:
self.logger.debug('Creating new project: ' + json.dumps(project))
# create -> use post op
url = self.base_url
r = self.gbdx_connection.post(url, json=project)
try:
r.raise_for_status()
except:
print(r.text)
raise
project_json = r.json()
# create returns the saved project -> return the project id that's saved
return project_json['id']
|
[
"Saves",
"an",
"AnswerFactory",
"Project"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/answerfactory.py#L160-L198
|
[
"def",
"save",
"(",
"self",
",",
"project",
")",
":",
"# test if this is a create vs. an update",
"if",
"'id'",
"in",
"project",
"and",
"project",
"[",
"'id'",
"]",
"is",
"not",
"None",
":",
"# update -> use put op",
"self",
".",
"logger",
".",
"debug",
"(",
"'Updating existing project: '",
"+",
"json",
".",
"dumps",
"(",
"project",
")",
")",
"url",
"=",
"'%(base_url)s/%(project_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'project_id'",
":",
"project",
"[",
"'id'",
"]",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"put",
"(",
"url",
",",
"json",
"=",
"project",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"r",
".",
"text",
")",
"raise",
"# updates only get the Accepted response -> return the original project id",
"return",
"project",
"[",
"'id'",
"]",
"else",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Creating new project: '",
"+",
"json",
".",
"dumps",
"(",
"project",
")",
")",
"# create -> use post op",
"url",
"=",
"self",
".",
"base_url",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"json",
"=",
"project",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"r",
".",
"text",
")",
"raise",
"project_json",
"=",
"r",
".",
"json",
"(",
")",
"# create returns the saved project -> return the project id that's saved",
"return",
"project_json",
"[",
"'id'",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Project.delete
|
Deletes a project by id
Args:
project_id: The project id to delete
Returns:
Nothing
|
gbdxtools/answerfactory.py
|
def delete(self, project_id):
'''
Deletes a project by id
Args:
project_id: The project id to delete
Returns:
Nothing
'''
self.logger.debug('Deleting project by id: ' + project_id)
url = '%(base_url)s/%(project_id)s' % {
'base_url': self.base_url, 'project_id': project_id
}
r = self.gbdx_connection.delete(url)
r.raise_for_status()
|
def delete(self, project_id):
'''
Deletes a project by id
Args:
project_id: The project id to delete
Returns:
Nothing
'''
self.logger.debug('Deleting project by id: ' + project_id)
url = '%(base_url)s/%(project_id)s' % {
'base_url': self.base_url, 'project_id': project_id
}
r = self.gbdx_connection.delete(url)
r.raise_for_status()
|
[
"Deletes",
"a",
"project",
"by",
"id"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/answerfactory.py#L200-L215
|
[
"def",
"delete",
"(",
"self",
",",
"project_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Deleting project by id: '",
"+",
"project_id",
")",
"url",
"=",
"'%(base_url)s/%(project_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'project_id'",
":",
"project_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"delete",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
LineStyle.paint
|
Renders a javascript snippet suitable for use as a mapbox-gl line paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
|
gbdxtools/vector_styles.py
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl line paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
# TODO Figure out why i cant use some of these props
snippet = {
'line-opacity': VectorStyle.get_style_value(self.opacity),
'line-color': VectorStyle.get_style_value(self.color),
#'line-cap': self.cap,
#'line-join': self.join,
'line-width': VectorStyle.get_style_value(self.width),
#'line-gap-width': self.gap_width,
#'line-blur': self.blur,
}
if self.translate:
snippet['line-translate'] = self.translate
if self.dasharray:
snippet['line-dasharray'] = VectorStyle.get_style_value(self.dasharray)
return snippet
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl line paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
# TODO Figure out why i cant use some of these props
snippet = {
'line-opacity': VectorStyle.get_style_value(self.opacity),
'line-color': VectorStyle.get_style_value(self.color),
#'line-cap': self.cap,
#'line-join': self.join,
'line-width': VectorStyle.get_style_value(self.width),
#'line-gap-width': self.gap_width,
#'line-blur': self.blur,
}
if self.translate:
snippet['line-translate'] = self.translate
if self.dasharray:
snippet['line-dasharray'] = VectorStyle.get_style_value(self.dasharray)
return snippet
|
[
"Renders",
"a",
"javascript",
"snippet",
"suitable",
"for",
"use",
"as",
"a",
"mapbox",
"-",
"gl",
"line",
"paint",
"entry"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vector_styles.py#L120-L143
|
[
"def",
"paint",
"(",
"self",
")",
":",
"# TODO Figure out why i cant use some of these props",
"snippet",
"=",
"{",
"'line-opacity'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"opacity",
")",
",",
"'line-color'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"color",
")",
",",
"#'line-cap': self.cap,",
"#'line-join': self.join,",
"'line-width'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"width",
")",
",",
"#'line-gap-width': self.gap_width,",
"#'line-blur': self.blur,",
"}",
"if",
"self",
".",
"translate",
":",
"snippet",
"[",
"'line-translate'",
"]",
"=",
"self",
".",
"translate",
"if",
"self",
".",
"dasharray",
":",
"snippet",
"[",
"'line-dasharray'",
"]",
"=",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"dasharray",
")",
"return",
"snippet"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
FillStyle.paint
|
Renders a javascript snippet suitable for use as a mapbox-gl fill paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
|
gbdxtools/vector_styles.py
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl fill paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'fill-opacity': VectorStyle.get_style_value(self.opacity),
'fill-color': VectorStyle.get_style_value(self.color),
'fill-outline-color': VectorStyle.get_style_value(self.outline_color)
}
if self.translate:
snippet['fill-translate'] = self.translate
return snippet
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl fill paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'fill-opacity': VectorStyle.get_style_value(self.opacity),
'fill-color': VectorStyle.get_style_value(self.color),
'fill-outline-color': VectorStyle.get_style_value(self.outline_color)
}
if self.translate:
snippet['fill-translate'] = self.translate
return snippet
|
[
"Renders",
"a",
"javascript",
"snippet",
"suitable",
"for",
"use",
"as",
"a",
"mapbox",
"-",
"gl",
"fill",
"paint",
"entry"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vector_styles.py#L174-L189
|
[
"def",
"paint",
"(",
"self",
")",
":",
"snippet",
"=",
"{",
"'fill-opacity'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"opacity",
")",
",",
"'fill-color'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"color",
")",
",",
"'fill-outline-color'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"outline_color",
")",
"}",
"if",
"self",
".",
"translate",
":",
"snippet",
"[",
"'fill-translate'",
"]",
"=",
"self",
".",
"translate",
"return",
"snippet"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
FillExtrusionStyle.paint
|
Renders a javascript snippet suitable for use as a mapbox-gl fill-extrusion paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
|
gbdxtools/vector_styles.py
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl fill-extrusion paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'fill-extrusion-opacity': VectorStyle.get_style_value(self.opacity),
'fill-extrusion-color': VectorStyle.get_style_value(self.color),
'fill-extrusion-base': VectorStyle.get_style_value(self.base),
'fill-extrusion-height': VectorStyle.get_style_value(self.height)
}
if self.translate:
snippet['fill-extrusion-translate'] = self.translate
return snippet
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl fill-extrusion paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'fill-extrusion-opacity': VectorStyle.get_style_value(self.opacity),
'fill-extrusion-color': VectorStyle.get_style_value(self.color),
'fill-extrusion-base': VectorStyle.get_style_value(self.base),
'fill-extrusion-height': VectorStyle.get_style_value(self.height)
}
if self.translate:
snippet['fill-extrusion-translate'] = self.translate
return snippet
|
[
"Renders",
"a",
"javascript",
"snippet",
"suitable",
"for",
"use",
"as",
"a",
"mapbox",
"-",
"gl",
"fill",
"-",
"extrusion",
"paint",
"entry"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vector_styles.py#L216-L232
|
[
"def",
"paint",
"(",
"self",
")",
":",
"snippet",
"=",
"{",
"'fill-extrusion-opacity'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"opacity",
")",
",",
"'fill-extrusion-color'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"color",
")",
",",
"'fill-extrusion-base'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"base",
")",
",",
"'fill-extrusion-height'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"height",
")",
"}",
"if",
"self",
".",
"translate",
":",
"snippet",
"[",
"'fill-extrusion-translate'",
"]",
"=",
"self",
".",
"translate",
"return",
"snippet"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
HeatmapStyle.paint
|
Renders a javascript snippet suitable for use as a mapbox-gl heatmap paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
|
gbdxtools/vector_styles.py
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl heatmap paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'heatmap-radius': VectorStyle.get_style_value(self.radius),
'heatmap-opacity': VectorStyle.get_style_value(self.opacity),
'heatmap-color': VectorStyle.get_style_value(self.color),
'heatmap-intensity': VectorStyle.get_style_value(self.intensity),
'heatmap-weight': VectorStyle.get_style_value(self.weight)
}
return snippet
|
def paint(self):
"""
Renders a javascript snippet suitable for use as a mapbox-gl heatmap paint entry
Returns:
A dict that can be converted to a mapbox-gl javascript paint snippet
"""
snippet = {
'heatmap-radius': VectorStyle.get_style_value(self.radius),
'heatmap-opacity': VectorStyle.get_style_value(self.opacity),
'heatmap-color': VectorStyle.get_style_value(self.color),
'heatmap-intensity': VectorStyle.get_style_value(self.intensity),
'heatmap-weight': VectorStyle.get_style_value(self.weight)
}
return snippet
|
[
"Renders",
"a",
"javascript",
"snippet",
"suitable",
"for",
"use",
"as",
"a",
"mapbox",
"-",
"gl",
"heatmap",
"paint",
"entry"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vector_styles.py#L278-L293
|
[
"def",
"paint",
"(",
"self",
")",
":",
"snippet",
"=",
"{",
"'heatmap-radius'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"radius",
")",
",",
"'heatmap-opacity'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"opacity",
")",
",",
"'heatmap-color'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"color",
")",
",",
"'heatmap-intensity'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"intensity",
")",
",",
"'heatmap-weight'",
":",
"VectorStyle",
".",
"get_style_value",
"(",
"self",
".",
"weight",
")",
"}",
"return",
"snippet"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.create
|
Create a vectors in the vector service.
Args:
vectors: A single geojson vector or a list of geojson vectors. Item_type and ingest_source are required.
Returns:
(list): IDs of the vectors created
Example:
>>> vectors.create(
... {
... "type": "Feature",
... "geometry": {
... "type": "Point",
... "coordinates": [1.0,1.0]
... },
... "properties": {
... "text" : "item text",
... "name" : "item name",
... "item_type" : "type",
... "ingest_source" : "source",
... "attributes" : {
... "latitude" : 1,
... "institute_founded" : "2015-07-17",
... "mascot" : "moth"
... }
... }
... }
... )
|
gbdxtools/vectors.py
|
def create(self,vectors):
""" Create a vectors in the vector service.
Args:
vectors: A single geojson vector or a list of geojson vectors. Item_type and ingest_source are required.
Returns:
(list): IDs of the vectors created
Example:
>>> vectors.create(
... {
... "type": "Feature",
... "geometry": {
... "type": "Point",
... "coordinates": [1.0,1.0]
... },
... "properties": {
... "text" : "item text",
... "name" : "item name",
... "item_type" : "type",
... "ingest_source" : "source",
... "attributes" : {
... "latitude" : 1,
... "institute_founded" : "2015-07-17",
... "mascot" : "moth"
... }
... }
... }
... )
"""
if type(vectors) is dict:
vectors = [vectors]
# validate they all have item_type and ingest_source in properties
for vector in vectors:
if not 'properties' in list(vector.keys()):
raise Exception('Vector does not contain "properties" field.')
if not 'item_type' in list(vector['properties'].keys()):
raise Exception('Vector does not contain "item_type".')
if not 'ingest_source' in list(vector['properties'].keys()):
raise Exception('Vector does not contain "ingest_source".')
r = self.gbdx_connection.post(self.create_url, data=json.dumps(vectors))
r.raise_for_status()
return r.json()
|
def create(self,vectors):
""" Create a vectors in the vector service.
Args:
vectors: A single geojson vector or a list of geojson vectors. Item_type and ingest_source are required.
Returns:
(list): IDs of the vectors created
Example:
>>> vectors.create(
... {
... "type": "Feature",
... "geometry": {
... "type": "Point",
... "coordinates": [1.0,1.0]
... },
... "properties": {
... "text" : "item text",
... "name" : "item name",
... "item_type" : "type",
... "ingest_source" : "source",
... "attributes" : {
... "latitude" : 1,
... "institute_founded" : "2015-07-17",
... "mascot" : "moth"
... }
... }
... }
... )
"""
if type(vectors) is dict:
vectors = [vectors]
# validate they all have item_type and ingest_source in properties
for vector in vectors:
if not 'properties' in list(vector.keys()):
raise Exception('Vector does not contain "properties" field.')
if not 'item_type' in list(vector['properties'].keys()):
raise Exception('Vector does not contain "item_type".')
if not 'ingest_source' in list(vector['properties'].keys()):
raise Exception('Vector does not contain "ingest_source".')
r = self.gbdx_connection.post(self.create_url, data=json.dumps(vectors))
r.raise_for_status()
return r.json()
|
[
"Create",
"a",
"vectors",
"in",
"the",
"vector",
"service",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L53-L101
|
[
"def",
"create",
"(",
"self",
",",
"vectors",
")",
":",
"if",
"type",
"(",
"vectors",
")",
"is",
"dict",
":",
"vectors",
"=",
"[",
"vectors",
"]",
"# validate they all have item_type and ingest_source in properties",
"for",
"vector",
"in",
"vectors",
":",
"if",
"not",
"'properties'",
"in",
"list",
"(",
"vector",
".",
"keys",
"(",
")",
")",
":",
"raise",
"Exception",
"(",
"'Vector does not contain \"properties\" field.'",
")",
"if",
"not",
"'item_type'",
"in",
"list",
"(",
"vector",
"[",
"'properties'",
"]",
".",
"keys",
"(",
")",
")",
":",
"raise",
"Exception",
"(",
"'Vector does not contain \"item_type\".'",
")",
"if",
"not",
"'ingest_source'",
"in",
"list",
"(",
"vector",
"[",
"'properties'",
"]",
".",
"keys",
"(",
")",
")",
":",
"raise",
"Exception",
"(",
"'Vector does not contain \"ingest_source\".'",
")",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"self",
".",
"create_url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"vectors",
")",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.create_from_wkt
|
Create a single vector in the vector service
Args:
wkt (str): wkt representation of the geometry
item_type (str): item_type of the vector
ingest_source (str): source of the vector
attributes: a set of key-value pairs of attributes
Returns:
id (str): string identifier of the vector created
|
gbdxtools/vectors.py
|
def create_from_wkt(self, wkt, item_type, ingest_source, **attributes):
'''
Create a single vector in the vector service
Args:
wkt (str): wkt representation of the geometry
item_type (str): item_type of the vector
ingest_source (str): source of the vector
attributes: a set of key-value pairs of attributes
Returns:
id (str): string identifier of the vector created
'''
# verify the "depth" of the attributes is single layer
geojson = load_wkt(wkt).__geo_interface__
vector = {
'type': "Feature",
'geometry': geojson,
'properties': {
'item_type': item_type,
'ingest_source': ingest_source,
'attributes': attributes
}
}
return self.create(vector)[0]
|
def create_from_wkt(self, wkt, item_type, ingest_source, **attributes):
'''
Create a single vector in the vector service
Args:
wkt (str): wkt representation of the geometry
item_type (str): item_type of the vector
ingest_source (str): source of the vector
attributes: a set of key-value pairs of attributes
Returns:
id (str): string identifier of the vector created
'''
# verify the "depth" of the attributes is single layer
geojson = load_wkt(wkt).__geo_interface__
vector = {
'type': "Feature",
'geometry': geojson,
'properties': {
'item_type': item_type,
'ingest_source': ingest_source,
'attributes': attributes
}
}
return self.create(vector)[0]
|
[
"Create",
"a",
"single",
"vector",
"in",
"the",
"vector",
"service"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L103-L129
|
[
"def",
"create_from_wkt",
"(",
"self",
",",
"wkt",
",",
"item_type",
",",
"ingest_source",
",",
"*",
"*",
"attributes",
")",
":",
"# verify the \"depth\" of the attributes is single layer",
"geojson",
"=",
"load_wkt",
"(",
"wkt",
")",
".",
"__geo_interface__",
"vector",
"=",
"{",
"'type'",
":",
"\"Feature\"",
",",
"'geometry'",
":",
"geojson",
",",
"'properties'",
":",
"{",
"'item_type'",
":",
"item_type",
",",
"'ingest_source'",
":",
"ingest_source",
",",
"'attributes'",
":",
"attributes",
"}",
"}",
"return",
"self",
".",
"create",
"(",
"vector",
")",
"[",
"0",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.get
|
Retrieves a vector. Not usually necessary because searching is the best way to find & get stuff.
Args:
ID (str): ID of the vector object
index (str): Optional. Index the object lives in. defaults to 'vector-web-s'
Returns:
record (dict): A dict object identical to the json representation of the catalog record
|
gbdxtools/vectors.py
|
def get(self, ID, index='vector-web-s'):
'''Retrieves a vector. Not usually necessary because searching is the best way to find & get stuff.
Args:
ID (str): ID of the vector object
index (str): Optional. Index the object lives in. defaults to 'vector-web-s'
Returns:
record (dict): A dict object identical to the json representation of the catalog record
'''
url = self.get_url % index
r = self.gbdx_connection.get(url + ID)
r.raise_for_status()
return r.json()
|
def get(self, ID, index='vector-web-s'):
'''Retrieves a vector. Not usually necessary because searching is the best way to find & get stuff.
Args:
ID (str): ID of the vector object
index (str): Optional. Index the object lives in. defaults to 'vector-web-s'
Returns:
record (dict): A dict object identical to the json representation of the catalog record
'''
url = self.get_url % index
r = self.gbdx_connection.get(url + ID)
r.raise_for_status()
return r.json()
|
[
"Retrieves",
"a",
"vector",
".",
"Not",
"usually",
"necessary",
"because",
"searching",
"is",
"the",
"best",
"way",
"to",
"find",
"&",
"get",
"stuff",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L132-L146
|
[
"def",
"get",
"(",
"self",
",",
"ID",
",",
"index",
"=",
"'vector-web-s'",
")",
":",
"url",
"=",
"self",
".",
"get_url",
"%",
"index",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
"+",
"ID",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.query
|
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
List of vector results
|
gbdxtools/vectors.py
|
def query(self, searchAreaWkt, query, count=100, ttl='5m', index=default_index):
'''
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
List of vector results
'''
if count < 1000:
# issue a single page query
search_area_polygon = from_wkt(searchAreaWkt)
left, lower, right, upper = search_area_polygon.bounds
params = {
"q": query,
"count": min(count,1000),
"left": left,
"right": right,
"lower": lower,
"upper": upper
}
url = self.query_index_url % index if index else self.query_url
r = self.gbdx_connection.get(url, params=params)
r.raise_for_status()
return r.json()
else:
return list(self.query_iteratively(searchAreaWkt, query, count, ttl, index))
|
def query(self, searchAreaWkt, query, count=100, ttl='5m', index=default_index):
'''
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
List of vector results
'''
if count < 1000:
# issue a single page query
search_area_polygon = from_wkt(searchAreaWkt)
left, lower, right, upper = search_area_polygon.bounds
params = {
"q": query,
"count": min(count,1000),
"left": left,
"right": right,
"lower": lower,
"upper": upper
}
url = self.query_index_url % index if index else self.query_url
r = self.gbdx_connection.get(url, params=params)
r.raise_for_status()
return r.json()
else:
return list(self.query_iteratively(searchAreaWkt, query, count, ttl, index))
|
[
"Perform",
"a",
"vector",
"services",
"query",
"using",
"the",
"QUERY",
"API",
"(",
"https",
":",
"//",
"gbdxdocs",
".",
"digitalglobe",
".",
"com",
"/",
"docs",
"/",
"vs",
"-",
"query",
"-",
"list",
"-",
"vector",
"-",
"items",
"-",
"returns",
"-",
"default",
"-",
"fields",
")"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L149-L183
|
[
"def",
"query",
"(",
"self",
",",
"searchAreaWkt",
",",
"query",
",",
"count",
"=",
"100",
",",
"ttl",
"=",
"'5m'",
",",
"index",
"=",
"default_index",
")",
":",
"if",
"count",
"<",
"1000",
":",
"# issue a single page query",
"search_area_polygon",
"=",
"from_wkt",
"(",
"searchAreaWkt",
")",
"left",
",",
"lower",
",",
"right",
",",
"upper",
"=",
"search_area_polygon",
".",
"bounds",
"params",
"=",
"{",
"\"q\"",
":",
"query",
",",
"\"count\"",
":",
"min",
"(",
"count",
",",
"1000",
")",
",",
"\"left\"",
":",
"left",
",",
"\"right\"",
":",
"right",
",",
"\"lower\"",
":",
"lower",
",",
"\"upper\"",
":",
"upper",
"}",
"url",
"=",
"self",
".",
"query_index_url",
"%",
"index",
"if",
"index",
"else",
"self",
".",
"query_url",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
",",
"params",
"=",
"params",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")",
"else",
":",
"return",
"list",
"(",
"self",
".",
"query_iteratively",
"(",
"searchAreaWkt",
",",
"query",
",",
"count",
",",
"ttl",
",",
"index",
")",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.query_iteratively
|
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
generator of vector results
|
gbdxtools/vectors.py
|
def query_iteratively(self, searchAreaWkt, query, count=100, ttl='5m', index=default_index):
'''
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
generator of vector results
'''
search_area_polygon = from_wkt(searchAreaWkt)
left, lower, right, upper = search_area_polygon.bounds
params = {
"q": query,
"count": min(count,1000),
"ttl": ttl,
"left": left,
"right": right,
"lower": lower,
"upper": upper
}
# initialize paging request
url = self.query_index_page_url % index if index else self.query_page_url
r = self.gbdx_connection.get(url, params=params)
r.raise_for_status()
page = r.json()
paging_id = page['next_paging_id']
item_count = int(page['item_count'])
data = page['data']
num_results = 0
for vector in data:
num_results += 1
if num_results > count: break
yield vector
if num_results == count:
return
# get vectors from each page
while paging_id and item_count > 0 and num_results < count:
headers = {'Content-Type':'application/x-www-form-urlencoded'}
data = {
"pagingId": paging_id,
"ttl": ttl
}
r = self.gbdx_connection.post(self.page_url, headers=headers, data=data)
r.raise_for_status()
page = r.json()
paging_id = page['next_paging_id']
item_count = int(page['item_count'])
data = page['data']
for vector in data:
num_results += 1
if num_results > count: break
yield vector
|
def query_iteratively(self, searchAreaWkt, query, count=100, ttl='5m', index=default_index):
'''
Perform a vector services query using the QUERY API
(https://gbdxdocs.digitalglobe.com/docs/vs-query-list-vector-items-returns-default-fields)
Args:
searchAreaWkt: WKT Polygon of area to search
query: Elastic Search query
count: Maximum number of results to return
ttl: Amount of time for each temporary vector page to exist
Returns:
generator of vector results
'''
search_area_polygon = from_wkt(searchAreaWkt)
left, lower, right, upper = search_area_polygon.bounds
params = {
"q": query,
"count": min(count,1000),
"ttl": ttl,
"left": left,
"right": right,
"lower": lower,
"upper": upper
}
# initialize paging request
url = self.query_index_page_url % index if index else self.query_page_url
r = self.gbdx_connection.get(url, params=params)
r.raise_for_status()
page = r.json()
paging_id = page['next_paging_id']
item_count = int(page['item_count'])
data = page['data']
num_results = 0
for vector in data:
num_results += 1
if num_results > count: break
yield vector
if num_results == count:
return
# get vectors from each page
while paging_id and item_count > 0 and num_results < count:
headers = {'Content-Type':'application/x-www-form-urlencoded'}
data = {
"pagingId": paging_id,
"ttl": ttl
}
r = self.gbdx_connection.post(self.page_url, headers=headers, data=data)
r.raise_for_status()
page = r.json()
paging_id = page['next_paging_id']
item_count = int(page['item_count'])
data = page['data']
for vector in data:
num_results += 1
if num_results > count: break
yield vector
|
[
"Perform",
"a",
"vector",
"services",
"query",
"using",
"the",
"QUERY",
"API",
"(",
"https",
":",
"//",
"gbdxdocs",
".",
"digitalglobe",
".",
"com",
"/",
"docs",
"/",
"vs",
"-",
"query",
"-",
"list",
"-",
"vector",
"-",
"items",
"-",
"returns",
"-",
"default",
"-",
"fields",
")"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L186-L254
|
[
"def",
"query_iteratively",
"(",
"self",
",",
"searchAreaWkt",
",",
"query",
",",
"count",
"=",
"100",
",",
"ttl",
"=",
"'5m'",
",",
"index",
"=",
"default_index",
")",
":",
"search_area_polygon",
"=",
"from_wkt",
"(",
"searchAreaWkt",
")",
"left",
",",
"lower",
",",
"right",
",",
"upper",
"=",
"search_area_polygon",
".",
"bounds",
"params",
"=",
"{",
"\"q\"",
":",
"query",
",",
"\"count\"",
":",
"min",
"(",
"count",
",",
"1000",
")",
",",
"\"ttl\"",
":",
"ttl",
",",
"\"left\"",
":",
"left",
",",
"\"right\"",
":",
"right",
",",
"\"lower\"",
":",
"lower",
",",
"\"upper\"",
":",
"upper",
"}",
"# initialize paging request",
"url",
"=",
"self",
".",
"query_index_page_url",
"%",
"index",
"if",
"index",
"else",
"self",
".",
"query_page_url",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
",",
"params",
"=",
"params",
")",
"r",
".",
"raise_for_status",
"(",
")",
"page",
"=",
"r",
".",
"json",
"(",
")",
"paging_id",
"=",
"page",
"[",
"'next_paging_id'",
"]",
"item_count",
"=",
"int",
"(",
"page",
"[",
"'item_count'",
"]",
")",
"data",
"=",
"page",
"[",
"'data'",
"]",
"num_results",
"=",
"0",
"for",
"vector",
"in",
"data",
":",
"num_results",
"+=",
"1",
"if",
"num_results",
">",
"count",
":",
"break",
"yield",
"vector",
"if",
"num_results",
"==",
"count",
":",
"return",
"# get vectors from each page",
"while",
"paging_id",
"and",
"item_count",
">",
"0",
"and",
"num_results",
"<",
"count",
":",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/x-www-form-urlencoded'",
"}",
"data",
"=",
"{",
"\"pagingId\"",
":",
"paging_id",
",",
"\"ttl\"",
":",
"ttl",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"self",
".",
"page_url",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
")",
"r",
".",
"raise_for_status",
"(",
")",
"page",
"=",
"r",
".",
"json",
"(",
")",
"paging_id",
"=",
"page",
"[",
"'next_paging_id'",
"]",
"item_count",
"=",
"int",
"(",
"page",
"[",
"'item_count'",
"]",
")",
"data",
"=",
"page",
"[",
"'data'",
"]",
"for",
"vector",
"in",
"data",
":",
"num_results",
"+=",
"1",
"if",
"num_results",
">",
"count",
":",
"break",
"yield",
"vector"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.aggregate_query
|
Aggregates results of a query into buckets defined by the 'agg_def' parameter. The aggregations are
represented by dicts containing a 'name' key and a 'terms' key holding a list of the aggregation buckets.
Each bucket element is a dict containing a 'term' key containing the term used for this bucket, a 'count' key
containing the count of items that match this bucket, and an 'aggregations' key containing any child
aggregations.
Args:
searchAreaWkt (str): wkt representation of the geometry
agg_def (str or AggregationDef): the aggregation definitions
query (str): a valid Elasticsearch query string to constrain the items going into the aggregation
start_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
end_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
count (int): the number of buckets to include in the aggregations (the top N will be returned)
index (str): the index (or alias or wildcard index expression) to run aggregations against, set to None for the entire set of vector indexes
Returns:
results (list): A (usually single-element) list of dict objects containing the aggregation results.
|
gbdxtools/vectors.py
|
def aggregate_query(self, searchAreaWkt, agg_def, query=None, start_date=None, end_date=None, count=10, index=default_index):
"""Aggregates results of a query into buckets defined by the 'agg_def' parameter. The aggregations are
represented by dicts containing a 'name' key and a 'terms' key holding a list of the aggregation buckets.
Each bucket element is a dict containing a 'term' key containing the term used for this bucket, a 'count' key
containing the count of items that match this bucket, and an 'aggregations' key containing any child
aggregations.
Args:
searchAreaWkt (str): wkt representation of the geometry
agg_def (str or AggregationDef): the aggregation definitions
query (str): a valid Elasticsearch query string to constrain the items going into the aggregation
start_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
end_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
count (int): the number of buckets to include in the aggregations (the top N will be returned)
index (str): the index (or alias or wildcard index expression) to run aggregations against, set to None for the entire set of vector indexes
Returns:
results (list): A (usually single-element) list of dict objects containing the aggregation results.
"""
geojson = load_wkt(searchAreaWkt).__geo_interface__
aggs_str = str(agg_def) # could be string or AggregationDef
params = {
"count": count,
"aggs": aggs_str
}
if query:
params['query'] = query
if start_date:
params['start_date'] = start_date
if end_date:
params['end_date'] = end_date
url = self.aggregations_by_index_url % index if index else self.aggregations_url
r = self.gbdx_connection.post(url, params=params, json=geojson)
r.raise_for_status()
return r.json(object_pairs_hook=OrderedDict)['aggregations']
|
def aggregate_query(self, searchAreaWkt, agg_def, query=None, start_date=None, end_date=None, count=10, index=default_index):
"""Aggregates results of a query into buckets defined by the 'agg_def' parameter. The aggregations are
represented by dicts containing a 'name' key and a 'terms' key holding a list of the aggregation buckets.
Each bucket element is a dict containing a 'term' key containing the term used for this bucket, a 'count' key
containing the count of items that match this bucket, and an 'aggregations' key containing any child
aggregations.
Args:
searchAreaWkt (str): wkt representation of the geometry
agg_def (str or AggregationDef): the aggregation definitions
query (str): a valid Elasticsearch query string to constrain the items going into the aggregation
start_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
end_date (str): either an ISO-8601 date string or a 'now' expression (e.g. "now-6d" or just "now")
count (int): the number of buckets to include in the aggregations (the top N will be returned)
index (str): the index (or alias or wildcard index expression) to run aggregations against, set to None for the entire set of vector indexes
Returns:
results (list): A (usually single-element) list of dict objects containing the aggregation results.
"""
geojson = load_wkt(searchAreaWkt).__geo_interface__
aggs_str = str(agg_def) # could be string or AggregationDef
params = {
"count": count,
"aggs": aggs_str
}
if query:
params['query'] = query
if start_date:
params['start_date'] = start_date
if end_date:
params['end_date'] = end_date
url = self.aggregations_by_index_url % index if index else self.aggregations_url
r = self.gbdx_connection.post(url, params=params, json=geojson)
r.raise_for_status()
return r.json(object_pairs_hook=OrderedDict)['aggregations']
|
[
"Aggregates",
"results",
"of",
"a",
"query",
"into",
"buckets",
"defined",
"by",
"the",
"agg_def",
"parameter",
".",
"The",
"aggregations",
"are",
"represented",
"by",
"dicts",
"containing",
"a",
"name",
"key",
"and",
"a",
"terms",
"key",
"holding",
"a",
"list",
"of",
"the",
"aggregation",
"buckets",
".",
"Each",
"bucket",
"element",
"is",
"a",
"dict",
"containing",
"a",
"term",
"key",
"containing",
"the",
"term",
"used",
"for",
"this",
"bucket",
"a",
"count",
"key",
"containing",
"the",
"count",
"of",
"items",
"that",
"match",
"this",
"bucket",
"and",
"an",
"aggregations",
"key",
"containing",
"any",
"child",
"aggregations",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L256-L296
|
[
"def",
"aggregate_query",
"(",
"self",
",",
"searchAreaWkt",
",",
"agg_def",
",",
"query",
"=",
"None",
",",
"start_date",
"=",
"None",
",",
"end_date",
"=",
"None",
",",
"count",
"=",
"10",
",",
"index",
"=",
"default_index",
")",
":",
"geojson",
"=",
"load_wkt",
"(",
"searchAreaWkt",
")",
".",
"__geo_interface__",
"aggs_str",
"=",
"str",
"(",
"agg_def",
")",
"# could be string or AggregationDef",
"params",
"=",
"{",
"\"count\"",
":",
"count",
",",
"\"aggs\"",
":",
"aggs_str",
"}",
"if",
"query",
":",
"params",
"[",
"'query'",
"]",
"=",
"query",
"if",
"start_date",
":",
"params",
"[",
"'start_date'",
"]",
"=",
"start_date",
"if",
"end_date",
":",
"params",
"[",
"'end_date'",
"]",
"=",
"end_date",
"url",
"=",
"self",
".",
"aggregations_by_index_url",
"%",
"index",
"if",
"index",
"else",
"self",
".",
"aggregations_url",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"params",
"=",
"params",
",",
"json",
"=",
"geojson",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
"object_pairs_hook",
"=",
"OrderedDict",
")",
"[",
"'aggregations'",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.tilemap
|
Renders a mapbox gl map from a vector service query
|
gbdxtools/vectors.py
|
def tilemap(self, query, styles={}, bbox=[-180,-90,180,90], zoom=16,
api_key=os.environ.get('MAPBOX_API_KEY', None),
image=None, image_bounds=None,
index="vector-user-provided", name="GBDX_Task_Output", **kwargs):
"""
Renders a mapbox gl map from a vector service query
"""
try:
from IPython.display import display
except:
print("IPython is required to produce maps.")
return
assert api_key is not None, "No Mapbox API Key found. You can either pass in a token or set the MAPBOX_API_KEY environment variable."
wkt = box(*bbox).wkt
features = self.query(wkt, query, index=index)
union = cascaded_union([shape(f['geometry']) for f in features])
lon, lat = union.centroid.coords[0]
url = 'https://vector.geobigdata.io/insight-vector/api/mvt/{z}/{x}/{y}?';
url += 'q={}&index={}'.format(query, index);
if styles is not None and not isinstance(styles, list):
styles = [styles]
map_id = "map_{}".format(str(int(time.time())))
map_data = VectorTileLayer(url, source_name=name, styles=styles, **kwargs)
image_layer = self._build_image_layer(image, image_bounds)
template = BaseTemplate(map_id, **{
"lat": lat,
"lon": lon,
"zoom": zoom,
"datasource": json.dumps(map_data.datasource),
"layers": json.dumps(map_data.layers),
"image_layer": image_layer,
"mbkey": api_key,
"token": self.gbdx_connection.access_token
})
template.inject()
|
def tilemap(self, query, styles={}, bbox=[-180,-90,180,90], zoom=16,
api_key=os.environ.get('MAPBOX_API_KEY', None),
image=None, image_bounds=None,
index="vector-user-provided", name="GBDX_Task_Output", **kwargs):
"""
Renders a mapbox gl map from a vector service query
"""
try:
from IPython.display import display
except:
print("IPython is required to produce maps.")
return
assert api_key is not None, "No Mapbox API Key found. You can either pass in a token or set the MAPBOX_API_KEY environment variable."
wkt = box(*bbox).wkt
features = self.query(wkt, query, index=index)
union = cascaded_union([shape(f['geometry']) for f in features])
lon, lat = union.centroid.coords[0]
url = 'https://vector.geobigdata.io/insight-vector/api/mvt/{z}/{x}/{y}?';
url += 'q={}&index={}'.format(query, index);
if styles is not None and not isinstance(styles, list):
styles = [styles]
map_id = "map_{}".format(str(int(time.time())))
map_data = VectorTileLayer(url, source_name=name, styles=styles, **kwargs)
image_layer = self._build_image_layer(image, image_bounds)
template = BaseTemplate(map_id, **{
"lat": lat,
"lon": lon,
"zoom": zoom,
"datasource": json.dumps(map_data.datasource),
"layers": json.dumps(map_data.layers),
"image_layer": image_layer,
"mbkey": api_key,
"token": self.gbdx_connection.access_token
})
template.inject()
|
[
"Renders",
"a",
"mapbox",
"gl",
"map",
"from",
"a",
"vector",
"service",
"query"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L298-L339
|
[
"def",
"tilemap",
"(",
"self",
",",
"query",
",",
"styles",
"=",
"{",
"}",
",",
"bbox",
"=",
"[",
"-",
"180",
",",
"-",
"90",
",",
"180",
",",
"90",
"]",
",",
"zoom",
"=",
"16",
",",
"api_key",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'MAPBOX_API_KEY'",
",",
"None",
")",
",",
"image",
"=",
"None",
",",
"image_bounds",
"=",
"None",
",",
"index",
"=",
"\"vector-user-provided\"",
",",
"name",
"=",
"\"GBDX_Task_Output\"",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"from",
"IPython",
".",
"display",
"import",
"display",
"except",
":",
"print",
"(",
"\"IPython is required to produce maps.\"",
")",
"return",
"assert",
"api_key",
"is",
"not",
"None",
",",
"\"No Mapbox API Key found. You can either pass in a token or set the MAPBOX_API_KEY environment variable.\"",
"wkt",
"=",
"box",
"(",
"*",
"bbox",
")",
".",
"wkt",
"features",
"=",
"self",
".",
"query",
"(",
"wkt",
",",
"query",
",",
"index",
"=",
"index",
")",
"union",
"=",
"cascaded_union",
"(",
"[",
"shape",
"(",
"f",
"[",
"'geometry'",
"]",
")",
"for",
"f",
"in",
"features",
"]",
")",
"lon",
",",
"lat",
"=",
"union",
".",
"centroid",
".",
"coords",
"[",
"0",
"]",
"url",
"=",
"'https://vector.geobigdata.io/insight-vector/api/mvt/{z}/{x}/{y}?'",
"url",
"+=",
"'q={}&index={}'",
".",
"format",
"(",
"query",
",",
"index",
")",
"if",
"styles",
"is",
"not",
"None",
"and",
"not",
"isinstance",
"(",
"styles",
",",
"list",
")",
":",
"styles",
"=",
"[",
"styles",
"]",
"map_id",
"=",
"\"map_{}\"",
".",
"format",
"(",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
")",
"map_data",
"=",
"VectorTileLayer",
"(",
"url",
",",
"source_name",
"=",
"name",
",",
"styles",
"=",
"styles",
",",
"*",
"*",
"kwargs",
")",
"image_layer",
"=",
"self",
".",
"_build_image_layer",
"(",
"image",
",",
"image_bounds",
")",
"template",
"=",
"BaseTemplate",
"(",
"map_id",
",",
"*",
"*",
"{",
"\"lat\"",
":",
"lat",
",",
"\"lon\"",
":",
"lon",
",",
"\"zoom\"",
":",
"zoom",
",",
"\"datasource\"",
":",
"json",
".",
"dumps",
"(",
"map_data",
".",
"datasource",
")",
",",
"\"layers\"",
":",
"json",
".",
"dumps",
"(",
"map_data",
".",
"layers",
")",
",",
"\"image_layer\"",
":",
"image_layer",
",",
"\"mbkey\"",
":",
"api_key",
",",
"\"token\"",
":",
"self",
".",
"gbdx_connection",
".",
"access_token",
"}",
")",
"template",
".",
"inject",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Vectors.map
|
Renders a mapbox gl map from a vector service query or a list of geojson features
Args:
features (list): a list of geojson features
query (str): a VectorServices query
styles (list): a list of VectorStyles to apply to the features
bbox (list): a bounding box to query for features ([minx, miny, maxx, maxy])
zoom (int): the initial zoom level of the map
center (list): a list of [lat, lon] used to center the map
api_key (str): a valid Mapbox API key
image (dict): a CatalogImage or a ndarray
image_bounds (list): a list of bounds for image positioning
Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/
Pass the key using the `api_key` keyword or set an environmental variable called `MAPBOX API KEY`
cmap (str): MatPlotLib colormap to use for rendering single band images (default: viridis)
|
gbdxtools/vectors.py
|
def map(self, features=None, query=None, styles=None,
bbox=[-180,-90,180,90], zoom=10, center=None,
image=None, image_bounds=None, cmap='viridis',
api_key=os.environ.get('MAPBOX_API_KEY', None), **kwargs):
"""
Renders a mapbox gl map from a vector service query or a list of geojson features
Args:
features (list): a list of geojson features
query (str): a VectorServices query
styles (list): a list of VectorStyles to apply to the features
bbox (list): a bounding box to query for features ([minx, miny, maxx, maxy])
zoom (int): the initial zoom level of the map
center (list): a list of [lat, lon] used to center the map
api_key (str): a valid Mapbox API key
image (dict): a CatalogImage or a ndarray
image_bounds (list): a list of bounds for image positioning
Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/
Pass the key using the `api_key` keyword or set an environmental variable called `MAPBOX API KEY`
cmap (str): MatPlotLib colormap to use for rendering single band images (default: viridis)
"""
try:
from IPython.display import display
except:
print("IPython is required to produce maps.")
return
assert api_key is not None, "No Mapbox API Key found. You can either pass in a key or set the MAPBOX_API_KEY environment variable. Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/"
if features is None and query is not None:
wkt = box(*bbox).wkt
features = self.query(wkt, query, index=None)
elif features is None and query is None and image is None:
print('Must provide either a list of features or a query or an image')
return
if styles is not None and not isinstance(styles, list):
styles = [styles]
geojson = {"type":"FeatureCollection", "features": features}
if center is None and features is not None:
union = cascaded_union([shape(f['geometry']) for f in features])
lon, lat = union.centroid.coords[0]
elif center is None and image is not None:
try:
lon, lat = shape(image).centroid.coords[0]
except:
lon, lat = box(*image_bounds).centroid.coords[0]
else:
lat, lon = center
map_id = "map_{}".format(str(int(time.time())))
map_data = VectorGeojsonLayer(geojson, styles=styles, **kwargs)
image_layer = self._build_image_layer(image, image_bounds, cmap)
template = BaseTemplate(map_id, **{
"lat": lat,
"lon": lon,
"zoom": zoom,
"datasource": json.dumps(map_data.datasource),
"layers": json.dumps(map_data.layers),
"image_layer": image_layer,
"mbkey": api_key,
"token": 'dummy'
})
template.inject()
|
def map(self, features=None, query=None, styles=None,
bbox=[-180,-90,180,90], zoom=10, center=None,
image=None, image_bounds=None, cmap='viridis',
api_key=os.environ.get('MAPBOX_API_KEY', None), **kwargs):
"""
Renders a mapbox gl map from a vector service query or a list of geojson features
Args:
features (list): a list of geojson features
query (str): a VectorServices query
styles (list): a list of VectorStyles to apply to the features
bbox (list): a bounding box to query for features ([minx, miny, maxx, maxy])
zoom (int): the initial zoom level of the map
center (list): a list of [lat, lon] used to center the map
api_key (str): a valid Mapbox API key
image (dict): a CatalogImage or a ndarray
image_bounds (list): a list of bounds for image positioning
Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/
Pass the key using the `api_key` keyword or set an environmental variable called `MAPBOX API KEY`
cmap (str): MatPlotLib colormap to use for rendering single band images (default: viridis)
"""
try:
from IPython.display import display
except:
print("IPython is required to produce maps.")
return
assert api_key is not None, "No Mapbox API Key found. You can either pass in a key or set the MAPBOX_API_KEY environment variable. Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/"
if features is None and query is not None:
wkt = box(*bbox).wkt
features = self.query(wkt, query, index=None)
elif features is None and query is None and image is None:
print('Must provide either a list of features or a query or an image')
return
if styles is not None and not isinstance(styles, list):
styles = [styles]
geojson = {"type":"FeatureCollection", "features": features}
if center is None and features is not None:
union = cascaded_union([shape(f['geometry']) for f in features])
lon, lat = union.centroid.coords[0]
elif center is None and image is not None:
try:
lon, lat = shape(image).centroid.coords[0]
except:
lon, lat = box(*image_bounds).centroid.coords[0]
else:
lat, lon = center
map_id = "map_{}".format(str(int(time.time())))
map_data = VectorGeojsonLayer(geojson, styles=styles, **kwargs)
image_layer = self._build_image_layer(image, image_bounds, cmap)
template = BaseTemplate(map_id, **{
"lat": lat,
"lon": lon,
"zoom": zoom,
"datasource": json.dumps(map_data.datasource),
"layers": json.dumps(map_data.layers),
"image_layer": image_layer,
"mbkey": api_key,
"token": 'dummy'
})
template.inject()
|
[
"Renders",
"a",
"mapbox",
"gl",
"map",
"from",
"a",
"vector",
"service",
"query",
"or",
"a",
"list",
"of",
"geojson",
"features"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/vectors.py#L342-L407
|
[
"def",
"map",
"(",
"self",
",",
"features",
"=",
"None",
",",
"query",
"=",
"None",
",",
"styles",
"=",
"None",
",",
"bbox",
"=",
"[",
"-",
"180",
",",
"-",
"90",
",",
"180",
",",
"90",
"]",
",",
"zoom",
"=",
"10",
",",
"center",
"=",
"None",
",",
"image",
"=",
"None",
",",
"image_bounds",
"=",
"None",
",",
"cmap",
"=",
"'viridis'",
",",
"api_key",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'MAPBOX_API_KEY'",
",",
"None",
")",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"from",
"IPython",
".",
"display",
"import",
"display",
"except",
":",
"print",
"(",
"\"IPython is required to produce maps.\"",
")",
"return",
"assert",
"api_key",
"is",
"not",
"None",
",",
"\"No Mapbox API Key found. You can either pass in a key or set the MAPBOX_API_KEY environment variable. Use outside of GBDX Notebooks requires a MapBox API key, sign up for free at https://www.mapbox.com/pricing/\"",
"if",
"features",
"is",
"None",
"and",
"query",
"is",
"not",
"None",
":",
"wkt",
"=",
"box",
"(",
"*",
"bbox",
")",
".",
"wkt",
"features",
"=",
"self",
".",
"query",
"(",
"wkt",
",",
"query",
",",
"index",
"=",
"None",
")",
"elif",
"features",
"is",
"None",
"and",
"query",
"is",
"None",
"and",
"image",
"is",
"None",
":",
"print",
"(",
"'Must provide either a list of features or a query or an image'",
")",
"return",
"if",
"styles",
"is",
"not",
"None",
"and",
"not",
"isinstance",
"(",
"styles",
",",
"list",
")",
":",
"styles",
"=",
"[",
"styles",
"]",
"geojson",
"=",
"{",
"\"type\"",
":",
"\"FeatureCollection\"",
",",
"\"features\"",
":",
"features",
"}",
"if",
"center",
"is",
"None",
"and",
"features",
"is",
"not",
"None",
":",
"union",
"=",
"cascaded_union",
"(",
"[",
"shape",
"(",
"f",
"[",
"'geometry'",
"]",
")",
"for",
"f",
"in",
"features",
"]",
")",
"lon",
",",
"lat",
"=",
"union",
".",
"centroid",
".",
"coords",
"[",
"0",
"]",
"elif",
"center",
"is",
"None",
"and",
"image",
"is",
"not",
"None",
":",
"try",
":",
"lon",
",",
"lat",
"=",
"shape",
"(",
"image",
")",
".",
"centroid",
".",
"coords",
"[",
"0",
"]",
"except",
":",
"lon",
",",
"lat",
"=",
"box",
"(",
"*",
"image_bounds",
")",
".",
"centroid",
".",
"coords",
"[",
"0",
"]",
"else",
":",
"lat",
",",
"lon",
"=",
"center",
"map_id",
"=",
"\"map_{}\"",
".",
"format",
"(",
"str",
"(",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
")",
"map_data",
"=",
"VectorGeojsonLayer",
"(",
"geojson",
",",
"styles",
"=",
"styles",
",",
"*",
"*",
"kwargs",
")",
"image_layer",
"=",
"self",
".",
"_build_image_layer",
"(",
"image",
",",
"image_bounds",
",",
"cmap",
")",
"template",
"=",
"BaseTemplate",
"(",
"map_id",
",",
"*",
"*",
"{",
"\"lat\"",
":",
"lat",
",",
"\"lon\"",
":",
"lon",
",",
"\"zoom\"",
":",
"zoom",
",",
"\"datasource\"",
":",
"json",
".",
"dumps",
"(",
"map_data",
".",
"datasource",
")",
",",
"\"layers\"",
":",
"json",
".",
"dumps",
"(",
"map_data",
".",
"layers",
")",
",",
"\"image_layer\"",
":",
"image_layer",
",",
"\"mbkey\"",
":",
"api_key",
",",
"\"token\"",
":",
"'dummy'",
"}",
")",
"template",
".",
"inject",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
DaskImage.read
|
Reads data from a dask array and returns the computed ndarray matching the given bands
Args:
bands (list): band indices to read from the image. Returns bands in the order specified in the list of bands.
Returns:
ndarray: a numpy array of image data
|
gbdxtools/images/meta.py
|
def read(self, bands=None, **kwargs):
"""Reads data from a dask array and returns the computed ndarray matching the given bands
Args:
bands (list): band indices to read from the image. Returns bands in the order specified in the list of bands.
Returns:
ndarray: a numpy array of image data
"""
arr = self
if bands is not None:
arr = self[bands, ...]
return arr.compute(scheduler=threaded_get)
|
def read(self, bands=None, **kwargs):
"""Reads data from a dask array and returns the computed ndarray matching the given bands
Args:
bands (list): band indices to read from the image. Returns bands in the order specified in the list of bands.
Returns:
ndarray: a numpy array of image data
"""
arr = self
if bands is not None:
arr = self[bands, ...]
return arr.compute(scheduler=threaded_get)
|
[
"Reads",
"data",
"from",
"a",
"dask",
"array",
"and",
"returns",
"the",
"computed",
"ndarray",
"matching",
"the",
"given",
"bands"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L77-L89
|
[
"def",
"read",
"(",
"self",
",",
"bands",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"arr",
"=",
"self",
"if",
"bands",
"is",
"not",
"None",
":",
"arr",
"=",
"self",
"[",
"bands",
",",
"...",
"]",
"return",
"arr",
".",
"compute",
"(",
"scheduler",
"=",
"threaded_get",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
DaskImage.randwindow
|
Get a random window of a given shape from within an image
Args:
window_shape (tuple): The desired shape of the returned image as (height, width) in pixels.
Returns:
image: a new image object of the specified shape and same type
|
gbdxtools/images/meta.py
|
def randwindow(self, window_shape):
"""Get a random window of a given shape from within an image
Args:
window_shape (tuple): The desired shape of the returned image as (height, width) in pixels.
Returns:
image: a new image object of the specified shape and same type
"""
row = random.randrange(window_shape[0], self.shape[1])
col = random.randrange(window_shape[1], self.shape[2])
return self[:, row-window_shape[0]:row, col-window_shape[1]:col]
|
def randwindow(self, window_shape):
"""Get a random window of a given shape from within an image
Args:
window_shape (tuple): The desired shape of the returned image as (height, width) in pixels.
Returns:
image: a new image object of the specified shape and same type
"""
row = random.randrange(window_shape[0], self.shape[1])
col = random.randrange(window_shape[1], self.shape[2])
return self[:, row-window_shape[0]:row, col-window_shape[1]:col]
|
[
"Get",
"a",
"random",
"window",
"of",
"a",
"given",
"shape",
"from",
"within",
"an",
"image"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L91-L102
|
[
"def",
"randwindow",
"(",
"self",
",",
"window_shape",
")",
":",
"row",
"=",
"random",
".",
"randrange",
"(",
"window_shape",
"[",
"0",
"]",
",",
"self",
".",
"shape",
"[",
"1",
"]",
")",
"col",
"=",
"random",
".",
"randrange",
"(",
"window_shape",
"[",
"1",
"]",
",",
"self",
".",
"shape",
"[",
"2",
"]",
")",
"return",
"self",
"[",
":",
",",
"row",
"-",
"window_shape",
"[",
"0",
"]",
":",
"row",
",",
"col",
"-",
"window_shape",
"[",
"1",
"]",
":",
"col",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
DaskImage.iterwindows
|
Iterate over random windows of an image
Args:
count (int): the number of the windows to generate. Defaults to 64, if `None` will continue to iterate over random windows until stopped.
window_shape (tuple): The desired shape of each image as (height, width) in pixels.
Yields:
image: an image of the given shape and same type.
|
gbdxtools/images/meta.py
|
def iterwindows(self, count=64, window_shape=(256, 256)):
""" Iterate over random windows of an image
Args:
count (int): the number of the windows to generate. Defaults to 64, if `None` will continue to iterate over random windows until stopped.
window_shape (tuple): The desired shape of each image as (height, width) in pixels.
Yields:
image: an image of the given shape and same type.
"""
if count is None:
while True:
yield self.randwindow(window_shape)
else:
for i in xrange(count):
yield self.randwindow(window_shape)
|
def iterwindows(self, count=64, window_shape=(256, 256)):
""" Iterate over random windows of an image
Args:
count (int): the number of the windows to generate. Defaults to 64, if `None` will continue to iterate over random windows until stopped.
window_shape (tuple): The desired shape of each image as (height, width) in pixels.
Yields:
image: an image of the given shape and same type.
"""
if count is None:
while True:
yield self.randwindow(window_shape)
else:
for i in xrange(count):
yield self.randwindow(window_shape)
|
[
"Iterate",
"over",
"random",
"windows",
"of",
"an",
"image"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L104-L119
|
[
"def",
"iterwindows",
"(",
"self",
",",
"count",
"=",
"64",
",",
"window_shape",
"=",
"(",
"256",
",",
"256",
")",
")",
":",
"if",
"count",
"is",
"None",
":",
"while",
"True",
":",
"yield",
"self",
".",
"randwindow",
"(",
"window_shape",
")",
"else",
":",
"for",
"i",
"in",
"xrange",
"(",
"count",
")",
":",
"yield",
"self",
".",
"randwindow",
"(",
"window_shape",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
DaskImage.window_at
|
Return a subsetted window of a given size, centered on a geometry object
Useful for generating training sets from vector training data
Will throw a ValueError if the window is not within the image bounds
Args:
geom (shapely,geometry): Geometry to center the image on
window_shape (tuple): The desired shape of the image as (height, width) in pixels.
Returns:
image: image object of same type
|
gbdxtools/images/meta.py
|
def window_at(self, geom, window_shape):
"""Return a subsetted window of a given size, centered on a geometry object
Useful for generating training sets from vector training data
Will throw a ValueError if the window is not within the image bounds
Args:
geom (shapely,geometry): Geometry to center the image on
window_shape (tuple): The desired shape of the image as (height, width) in pixels.
Returns:
image: image object of same type
"""
# Centroids of the input geometry may not be centered on the object.
# For a covering image we use the bounds instead.
# This is also a workaround for issue 387.
y_size, x_size = window_shape[0], window_shape[1]
bounds = box(*geom.bounds)
px = ops.transform(self.__geo_transform__.rev, bounds).centroid
miny, maxy = int(px.y - y_size/2), int(px.y + y_size/2)
minx, maxx = int(px.x - x_size/2), int(px.x + x_size/2)
_, y_max, x_max = self.shape
if minx < 0 or miny < 0 or maxx > x_max or maxy > y_max:
raise ValueError("Input geometry resulted in a window outside of the image")
return self[:, miny:maxy, minx:maxx]
|
def window_at(self, geom, window_shape):
"""Return a subsetted window of a given size, centered on a geometry object
Useful for generating training sets from vector training data
Will throw a ValueError if the window is not within the image bounds
Args:
geom (shapely,geometry): Geometry to center the image on
window_shape (tuple): The desired shape of the image as (height, width) in pixels.
Returns:
image: image object of same type
"""
# Centroids of the input geometry may not be centered on the object.
# For a covering image we use the bounds instead.
# This is also a workaround for issue 387.
y_size, x_size = window_shape[0], window_shape[1]
bounds = box(*geom.bounds)
px = ops.transform(self.__geo_transform__.rev, bounds).centroid
miny, maxy = int(px.y - y_size/2), int(px.y + y_size/2)
minx, maxx = int(px.x - x_size/2), int(px.x + x_size/2)
_, y_max, x_max = self.shape
if minx < 0 or miny < 0 or maxx > x_max or maxy > y_max:
raise ValueError("Input geometry resulted in a window outside of the image")
return self[:, miny:maxy, minx:maxx]
|
[
"Return",
"a",
"subsetted",
"window",
"of",
"a",
"given",
"size",
"centered",
"on",
"a",
"geometry",
"object"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L121-L145
|
[
"def",
"window_at",
"(",
"self",
",",
"geom",
",",
"window_shape",
")",
":",
"# Centroids of the input geometry may not be centered on the object.",
"# For a covering image we use the bounds instead.",
"# This is also a workaround for issue 387.",
"y_size",
",",
"x_size",
"=",
"window_shape",
"[",
"0",
"]",
",",
"window_shape",
"[",
"1",
"]",
"bounds",
"=",
"box",
"(",
"*",
"geom",
".",
"bounds",
")",
"px",
"=",
"ops",
".",
"transform",
"(",
"self",
".",
"__geo_transform__",
".",
"rev",
",",
"bounds",
")",
".",
"centroid",
"miny",
",",
"maxy",
"=",
"int",
"(",
"px",
".",
"y",
"-",
"y_size",
"/",
"2",
")",
",",
"int",
"(",
"px",
".",
"y",
"+",
"y_size",
"/",
"2",
")",
"minx",
",",
"maxx",
"=",
"int",
"(",
"px",
".",
"x",
"-",
"x_size",
"/",
"2",
")",
",",
"int",
"(",
"px",
".",
"x",
"+",
"x_size",
"/",
"2",
")",
"_",
",",
"y_max",
",",
"x_max",
"=",
"self",
".",
"shape",
"if",
"minx",
"<",
"0",
"or",
"miny",
"<",
"0",
"or",
"maxx",
">",
"x_max",
"or",
"maxy",
">",
"y_max",
":",
"raise",
"ValueError",
"(",
"\"Input geometry resulted in a window outside of the image\"",
")",
"return",
"self",
"[",
":",
",",
"miny",
":",
"maxy",
",",
"minx",
":",
"maxx",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
DaskImage.window_cover
|
Iterate over a grid of windows of a specified shape covering an image.
The image is divided into a grid of tiles of size window_shape. Each iteration returns
the next window.
Args:
window_shape (tuple): The desired shape of each image as (height,
width) in pixels.
pad: (bool): Whether or not to pad edge cells. If False, cells that do not
have the desired shape will not be returned. Defaults to True.
Yields:
image: image object of same type.
|
gbdxtools/images/meta.py
|
def window_cover(self, window_shape, pad=True):
""" Iterate over a grid of windows of a specified shape covering an image.
The image is divided into a grid of tiles of size window_shape. Each iteration returns
the next window.
Args:
window_shape (tuple): The desired shape of each image as (height,
width) in pixels.
pad: (bool): Whether or not to pad edge cells. If False, cells that do not
have the desired shape will not be returned. Defaults to True.
Yields:
image: image object of same type.
"""
size_y, size_x = window_shape[0], window_shape[1]
_ndepth, _nheight, _nwidth = self.shape
nheight, _m = divmod(_nheight, size_y)
nwidth, _n = divmod(_nwidth, size_x)
img = self
if pad is True:
new_height, new_width = _nheight, _nwidth
if _m != 0:
new_height = (nheight + 1) * size_y
if _n != 0:
new_width = (nwidth + 1) * size_x
if (new_height, new_width) != (_nheight, _nwidth):
bounds = box(0, 0, new_width, new_height)
geom = ops.transform(self.__geo_transform__.fwd, bounds)
img = self[geom]
row_lims = range(0, img.shape[1], size_y)
col_lims = range(0, img.shape[2], size_x)
for maxy, maxx in product(row_lims, col_lims):
reg = img[:, maxy:(maxy + size_y), maxx:(maxx + size_x)]
if pad is False:
if reg.shape[1:] == window_shape:
yield reg
else:
yield reg
|
def window_cover(self, window_shape, pad=True):
""" Iterate over a grid of windows of a specified shape covering an image.
The image is divided into a grid of tiles of size window_shape. Each iteration returns
the next window.
Args:
window_shape (tuple): The desired shape of each image as (height,
width) in pixels.
pad: (bool): Whether or not to pad edge cells. If False, cells that do not
have the desired shape will not be returned. Defaults to True.
Yields:
image: image object of same type.
"""
size_y, size_x = window_shape[0], window_shape[1]
_ndepth, _nheight, _nwidth = self.shape
nheight, _m = divmod(_nheight, size_y)
nwidth, _n = divmod(_nwidth, size_x)
img = self
if pad is True:
new_height, new_width = _nheight, _nwidth
if _m != 0:
new_height = (nheight + 1) * size_y
if _n != 0:
new_width = (nwidth + 1) * size_x
if (new_height, new_width) != (_nheight, _nwidth):
bounds = box(0, 0, new_width, new_height)
geom = ops.transform(self.__geo_transform__.fwd, bounds)
img = self[geom]
row_lims = range(0, img.shape[1], size_y)
col_lims = range(0, img.shape[2], size_x)
for maxy, maxx in product(row_lims, col_lims):
reg = img[:, maxy:(maxy + size_y), maxx:(maxx + size_x)]
if pad is False:
if reg.shape[1:] == window_shape:
yield reg
else:
yield reg
|
[
"Iterate",
"over",
"a",
"grid",
"of",
"windows",
"of",
"a",
"specified",
"shape",
"covering",
"an",
"image",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L147-L188
|
[
"def",
"window_cover",
"(",
"self",
",",
"window_shape",
",",
"pad",
"=",
"True",
")",
":",
"size_y",
",",
"size_x",
"=",
"window_shape",
"[",
"0",
"]",
",",
"window_shape",
"[",
"1",
"]",
"_ndepth",
",",
"_nheight",
",",
"_nwidth",
"=",
"self",
".",
"shape",
"nheight",
",",
"_m",
"=",
"divmod",
"(",
"_nheight",
",",
"size_y",
")",
"nwidth",
",",
"_n",
"=",
"divmod",
"(",
"_nwidth",
",",
"size_x",
")",
"img",
"=",
"self",
"if",
"pad",
"is",
"True",
":",
"new_height",
",",
"new_width",
"=",
"_nheight",
",",
"_nwidth",
"if",
"_m",
"!=",
"0",
":",
"new_height",
"=",
"(",
"nheight",
"+",
"1",
")",
"*",
"size_y",
"if",
"_n",
"!=",
"0",
":",
"new_width",
"=",
"(",
"nwidth",
"+",
"1",
")",
"*",
"size_x",
"if",
"(",
"new_height",
",",
"new_width",
")",
"!=",
"(",
"_nheight",
",",
"_nwidth",
")",
":",
"bounds",
"=",
"box",
"(",
"0",
",",
"0",
",",
"new_width",
",",
"new_height",
")",
"geom",
"=",
"ops",
".",
"transform",
"(",
"self",
".",
"__geo_transform__",
".",
"fwd",
",",
"bounds",
")",
"img",
"=",
"self",
"[",
"geom",
"]",
"row_lims",
"=",
"range",
"(",
"0",
",",
"img",
".",
"shape",
"[",
"1",
"]",
",",
"size_y",
")",
"col_lims",
"=",
"range",
"(",
"0",
",",
"img",
".",
"shape",
"[",
"2",
"]",
",",
"size_x",
")",
"for",
"maxy",
",",
"maxx",
"in",
"product",
"(",
"row_lims",
",",
"col_lims",
")",
":",
"reg",
"=",
"img",
"[",
":",
",",
"maxy",
":",
"(",
"maxy",
"+",
"size_y",
")",
",",
"maxx",
":",
"(",
"maxx",
"+",
"size_x",
")",
"]",
"if",
"pad",
"is",
"False",
":",
"if",
"reg",
".",
"shape",
"[",
"1",
":",
"]",
"==",
"window_shape",
":",
"yield",
"reg",
"else",
":",
"yield",
"reg"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
GeoDaskImage.aoi
|
Subsets the Image by the given bounds
Args:
bbox (list): optional. A bounding box array [minx, miny, maxx, maxy]
wkt (str): optional. A WKT geometry string
geojson (str): optional. A GeoJSON geometry dictionary
Returns:
image: an image instance of the same type
|
gbdxtools/images/meta.py
|
def aoi(self, **kwargs):
""" Subsets the Image by the given bounds
Args:
bbox (list): optional. A bounding box array [minx, miny, maxx, maxy]
wkt (str): optional. A WKT geometry string
geojson (str): optional. A GeoJSON geometry dictionary
Returns:
image: an image instance of the same type
"""
g = self._parse_geoms(**kwargs)
if g is None:
return self
else:
return self[g]
|
def aoi(self, **kwargs):
""" Subsets the Image by the given bounds
Args:
bbox (list): optional. A bounding box array [minx, miny, maxx, maxy]
wkt (str): optional. A WKT geometry string
geojson (str): optional. A GeoJSON geometry dictionary
Returns:
image: an image instance of the same type
"""
g = self._parse_geoms(**kwargs)
if g is None:
return self
else:
return self[g]
|
[
"Subsets",
"the",
"Image",
"by",
"the",
"given",
"bounds"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L243-L258
|
[
"def",
"aoi",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"g",
"=",
"self",
".",
"_parse_geoms",
"(",
"*",
"*",
"kwargs",
")",
"if",
"g",
"is",
"None",
":",
"return",
"self",
"else",
":",
"return",
"self",
"[",
"g",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
GeoDaskImage.pxbounds
|
Returns the bounds of a geometry object in pixel coordinates
Args:
geom: Shapely geometry object or GeoJSON as Python dictionary or WKT string
clip (bool): Clip the bounds to the min/max extent of the image
Returns:
list: bounds in pixels [min x, min y, max x, max y] clipped to image bounds
|
gbdxtools/images/meta.py
|
def pxbounds(self, geom, clip=False):
""" Returns the bounds of a geometry object in pixel coordinates
Args:
geom: Shapely geometry object or GeoJSON as Python dictionary or WKT string
clip (bool): Clip the bounds to the min/max extent of the image
Returns:
list: bounds in pixels [min x, min y, max x, max y] clipped to image bounds
"""
try:
if isinstance(geom, dict):
if 'geometry' in geom:
geom = shape(geom['geometry'])
else:
geom = shape(geom)
elif isinstance(geom, BaseGeometry):
geom = shape(geom)
else:
geom = wkt.loads(geom)
except:
raise TypeError ("Invalid geometry object")
# if geometry doesn't overlap the image, return an error
if geom.disjoint(shape(self)):
raise ValueError("Geometry outside of image bounds")
# clip to pixels within the image
(xmin, ymin, xmax, ymax) = ops.transform(self.__geo_transform__.rev, geom).bounds
_nbands, ysize, xsize = self.shape
if clip:
xmin = max(xmin, 0)
ymin = max(ymin, 0)
xmax = min(xmax, xsize)
ymax = min(ymax, ysize)
return (xmin, ymin, xmax, ymax)
|
def pxbounds(self, geom, clip=False):
""" Returns the bounds of a geometry object in pixel coordinates
Args:
geom: Shapely geometry object or GeoJSON as Python dictionary or WKT string
clip (bool): Clip the bounds to the min/max extent of the image
Returns:
list: bounds in pixels [min x, min y, max x, max y] clipped to image bounds
"""
try:
if isinstance(geom, dict):
if 'geometry' in geom:
geom = shape(geom['geometry'])
else:
geom = shape(geom)
elif isinstance(geom, BaseGeometry):
geom = shape(geom)
else:
geom = wkt.loads(geom)
except:
raise TypeError ("Invalid geometry object")
# if geometry doesn't overlap the image, return an error
if geom.disjoint(shape(self)):
raise ValueError("Geometry outside of image bounds")
# clip to pixels within the image
(xmin, ymin, xmax, ymax) = ops.transform(self.__geo_transform__.rev, geom).bounds
_nbands, ysize, xsize = self.shape
if clip:
xmin = max(xmin, 0)
ymin = max(ymin, 0)
xmax = min(xmax, xsize)
ymax = min(ymax, ysize)
return (xmin, ymin, xmax, ymax)
|
[
"Returns",
"the",
"bounds",
"of",
"a",
"geometry",
"object",
"in",
"pixel",
"coordinates"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L260-L296
|
[
"def",
"pxbounds",
"(",
"self",
",",
"geom",
",",
"clip",
"=",
"False",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"geom",
",",
"dict",
")",
":",
"if",
"'geometry'",
"in",
"geom",
":",
"geom",
"=",
"shape",
"(",
"geom",
"[",
"'geometry'",
"]",
")",
"else",
":",
"geom",
"=",
"shape",
"(",
"geom",
")",
"elif",
"isinstance",
"(",
"geom",
",",
"BaseGeometry",
")",
":",
"geom",
"=",
"shape",
"(",
"geom",
")",
"else",
":",
"geom",
"=",
"wkt",
".",
"loads",
"(",
"geom",
")",
"except",
":",
"raise",
"TypeError",
"(",
"\"Invalid geometry object\"",
")",
"# if geometry doesn't overlap the image, return an error",
"if",
"geom",
".",
"disjoint",
"(",
"shape",
"(",
"self",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"Geometry outside of image bounds\"",
")",
"# clip to pixels within the image",
"(",
"xmin",
",",
"ymin",
",",
"xmax",
",",
"ymax",
")",
"=",
"ops",
".",
"transform",
"(",
"self",
".",
"__geo_transform__",
".",
"rev",
",",
"geom",
")",
".",
"bounds",
"_nbands",
",",
"ysize",
",",
"xsize",
"=",
"self",
".",
"shape",
"if",
"clip",
":",
"xmin",
"=",
"max",
"(",
"xmin",
",",
"0",
")",
"ymin",
"=",
"max",
"(",
"ymin",
",",
"0",
")",
"xmax",
"=",
"min",
"(",
"xmax",
",",
"xsize",
")",
"ymax",
"=",
"min",
"(",
"ymax",
",",
"ysize",
")",
"return",
"(",
"xmin",
",",
"ymin",
",",
"xmax",
",",
"ymax",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
GeoDaskImage.geotiff
|
Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to
|
gbdxtools/images/meta.py
|
def geotiff(self, **kwargs):
""" Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to """
if 'proj' not in kwargs:
kwargs['proj'] = self.proj
return to_geotiff(self, **kwargs)
|
def geotiff(self, **kwargs):
""" Creates a geotiff on the filesystem
Args:
path (str): optional, path to write the geotiff file to, default is ./output.tif
proj (str): optional, EPSG string of projection to reproject to
spec (str): optional, if set to 'rgb', write out color-balanced 8-bit RGB tif
bands (list): optional, list of bands to export. If spec='rgb' will default to RGB bands,
otherwise will export all bands
Returns:
str: path the geotiff was written to """
if 'proj' not in kwargs:
kwargs['proj'] = self.proj
return to_geotiff(self, **kwargs)
|
[
"Creates",
"a",
"geotiff",
"on",
"the",
"filesystem"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L298-L313
|
[
"def",
"geotiff",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'proj'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'proj'",
"]",
"=",
"self",
".",
"proj",
"return",
"to_geotiff",
"(",
"self",
",",
"*",
"*",
"kwargs",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
GeoDaskImage.warp
|
Delayed warp across an entire AOI or Image
Creates a new dask image by deferring calls to the warp_geometry on chunks
Args:
dem (ndarray): optional. A DEM for warping to specific elevation planes
proj (str): optional. An EPSG proj string to project the image data into ("EPSG:32612")
Returns:
daskarray: a warped image as deferred image array
|
gbdxtools/images/meta.py
|
def warp(self, dem=None, proj="EPSG:4326", **kwargs):
"""Delayed warp across an entire AOI or Image
Creates a new dask image by deferring calls to the warp_geometry on chunks
Args:
dem (ndarray): optional. A DEM for warping to specific elevation planes
proj (str): optional. An EPSG proj string to project the image data into ("EPSG:32612")
Returns:
daskarray: a warped image as deferred image array
"""
try:
img_md = self.rda.metadata["image"]
x_size = img_md["tileXSize"]
y_size = img_md["tileYSize"]
except (AttributeError, KeyError):
x_size = kwargs.get("chunk_size", 256)
y_size = kwargs.get("chunk_size", 256)
# Create an affine transform to convert between real-world and pixels
if self.proj is None:
from_proj = "EPSG:4326"
else:
from_proj = self.proj
try:
# NOTE: this only works on images that have rda rpcs metadata
center = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).centroid
g = box(*(center.buffer(self.rda.metadata["rpcs"]["gsd"] / 2).bounds))
tfm = partial(pyproj.transform, pyproj.Proj(init="EPSG:4326"), pyproj.Proj(init=proj))
gsd = kwargs.get("gsd", ops.transform(tfm, g).area ** 0.5)
current_bounds = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).bounds
except (AttributeError, KeyError, TypeError):
tfm = partial(pyproj.transform, pyproj.Proj(init=self.proj), pyproj.Proj(init=proj))
gsd = kwargs.get("gsd", (ops.transform(tfm, shape(self)).area / (self.shape[1] * self.shape[2])) ** 0.5 )
current_bounds = self.bounds
tfm = partial(pyproj.transform, pyproj.Proj(init=from_proj), pyproj.Proj(init=proj))
itfm = partial(pyproj.transform, pyproj.Proj(init=proj), pyproj.Proj(init=from_proj))
output_bounds = ops.transform(tfm, box(*current_bounds)).bounds
gtf = Affine.from_gdal(output_bounds[0], gsd, 0.0, output_bounds[3], 0.0, -1 * gsd)
ll = ~gtf * (output_bounds[:2])
ur = ~gtf * (output_bounds[2:])
x_chunks = int((ur[0] - ll[0]) / x_size) + 1
y_chunks = int((ll[1] - ur[1]) / y_size) + 1
num_bands = self.shape[0]
try:
dtype = RDA_TO_DTYPE[img_md["dataType"]]
except:
dtype = 'uint8'
daskmeta = {
"dask": {},
"chunks": (num_bands, y_size, x_size),
"dtype": dtype,
"name": "warp-{}".format(self.name),
"shape": (num_bands, y_chunks * y_size, x_chunks * x_size)
}
def px_to_geom(xmin, ymin):
xmax = int(xmin + x_size)
ymax = int(ymin + y_size)
bounds = list((gtf * (xmin, ymax)) + (gtf * (xmax, ymin)))
return box(*bounds)
full_bounds = box(*output_bounds)
dasks = []
if isinstance(dem, GeoDaskImage):
if dem.proj != proj:
dem = dem.warp(proj=proj, dem=dem)
dasks.append(dem.dask)
for y in xrange(y_chunks):
for x in xrange(x_chunks):
xmin = x * x_size
ymin = y * y_size
geometry = px_to_geom(xmin, ymin)
daskmeta["dask"][(daskmeta["name"], 0, y, x)] = (self._warp, geometry, gsd, dem, proj, dtype, 5)
daskmeta["dask"], _ = optimization.cull(HighLevelGraph.merge(daskmeta["dask"], *dasks), list(daskmeta["dask"].keys()))
gi = mapping(full_bounds)
gt = AffineTransform(gtf, proj)
image = GeoDaskImage(daskmeta, __geo_interface__ = gi, __geo_transform__ = gt)
return image[box(*output_bounds)]
|
def warp(self, dem=None, proj="EPSG:4326", **kwargs):
"""Delayed warp across an entire AOI or Image
Creates a new dask image by deferring calls to the warp_geometry on chunks
Args:
dem (ndarray): optional. A DEM for warping to specific elevation planes
proj (str): optional. An EPSG proj string to project the image data into ("EPSG:32612")
Returns:
daskarray: a warped image as deferred image array
"""
try:
img_md = self.rda.metadata["image"]
x_size = img_md["tileXSize"]
y_size = img_md["tileYSize"]
except (AttributeError, KeyError):
x_size = kwargs.get("chunk_size", 256)
y_size = kwargs.get("chunk_size", 256)
# Create an affine transform to convert between real-world and pixels
if self.proj is None:
from_proj = "EPSG:4326"
else:
from_proj = self.proj
try:
# NOTE: this only works on images that have rda rpcs metadata
center = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).centroid
g = box(*(center.buffer(self.rda.metadata["rpcs"]["gsd"] / 2).bounds))
tfm = partial(pyproj.transform, pyproj.Proj(init="EPSG:4326"), pyproj.Proj(init=proj))
gsd = kwargs.get("gsd", ops.transform(tfm, g).area ** 0.5)
current_bounds = wkt.loads(self.rda.metadata["image"]["imageBoundsWGS84"]).bounds
except (AttributeError, KeyError, TypeError):
tfm = partial(pyproj.transform, pyproj.Proj(init=self.proj), pyproj.Proj(init=proj))
gsd = kwargs.get("gsd", (ops.transform(tfm, shape(self)).area / (self.shape[1] * self.shape[2])) ** 0.5 )
current_bounds = self.bounds
tfm = partial(pyproj.transform, pyproj.Proj(init=from_proj), pyproj.Proj(init=proj))
itfm = partial(pyproj.transform, pyproj.Proj(init=proj), pyproj.Proj(init=from_proj))
output_bounds = ops.transform(tfm, box(*current_bounds)).bounds
gtf = Affine.from_gdal(output_bounds[0], gsd, 0.0, output_bounds[3], 0.0, -1 * gsd)
ll = ~gtf * (output_bounds[:2])
ur = ~gtf * (output_bounds[2:])
x_chunks = int((ur[0] - ll[0]) / x_size) + 1
y_chunks = int((ll[1] - ur[1]) / y_size) + 1
num_bands = self.shape[0]
try:
dtype = RDA_TO_DTYPE[img_md["dataType"]]
except:
dtype = 'uint8'
daskmeta = {
"dask": {},
"chunks": (num_bands, y_size, x_size),
"dtype": dtype,
"name": "warp-{}".format(self.name),
"shape": (num_bands, y_chunks * y_size, x_chunks * x_size)
}
def px_to_geom(xmin, ymin):
xmax = int(xmin + x_size)
ymax = int(ymin + y_size)
bounds = list((gtf * (xmin, ymax)) + (gtf * (xmax, ymin)))
return box(*bounds)
full_bounds = box(*output_bounds)
dasks = []
if isinstance(dem, GeoDaskImage):
if dem.proj != proj:
dem = dem.warp(proj=proj, dem=dem)
dasks.append(dem.dask)
for y in xrange(y_chunks):
for x in xrange(x_chunks):
xmin = x * x_size
ymin = y * y_size
geometry = px_to_geom(xmin, ymin)
daskmeta["dask"][(daskmeta["name"], 0, y, x)] = (self._warp, geometry, gsd, dem, proj, dtype, 5)
daskmeta["dask"], _ = optimization.cull(HighLevelGraph.merge(daskmeta["dask"], *dasks), list(daskmeta["dask"].keys()))
gi = mapping(full_bounds)
gt = AffineTransform(gtf, proj)
image = GeoDaskImage(daskmeta, __geo_interface__ = gi, __geo_transform__ = gt)
return image[box(*output_bounds)]
|
[
"Delayed",
"warp",
"across",
"an",
"entire",
"AOI",
"or",
"Image"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L318-L406
|
[
"def",
"warp",
"(",
"self",
",",
"dem",
"=",
"None",
",",
"proj",
"=",
"\"EPSG:4326\"",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"img_md",
"=",
"self",
".",
"rda",
".",
"metadata",
"[",
"\"image\"",
"]",
"x_size",
"=",
"img_md",
"[",
"\"tileXSize\"",
"]",
"y_size",
"=",
"img_md",
"[",
"\"tileYSize\"",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"x_size",
"=",
"kwargs",
".",
"get",
"(",
"\"chunk_size\"",
",",
"256",
")",
"y_size",
"=",
"kwargs",
".",
"get",
"(",
"\"chunk_size\"",
",",
"256",
")",
"# Create an affine transform to convert between real-world and pixels",
"if",
"self",
".",
"proj",
"is",
"None",
":",
"from_proj",
"=",
"\"EPSG:4326\"",
"else",
":",
"from_proj",
"=",
"self",
".",
"proj",
"try",
":",
"# NOTE: this only works on images that have rda rpcs metadata",
"center",
"=",
"wkt",
".",
"loads",
"(",
"self",
".",
"rda",
".",
"metadata",
"[",
"\"image\"",
"]",
"[",
"\"imageBoundsWGS84\"",
"]",
")",
".",
"centroid",
"g",
"=",
"box",
"(",
"*",
"(",
"center",
".",
"buffer",
"(",
"self",
".",
"rda",
".",
"metadata",
"[",
"\"rpcs\"",
"]",
"[",
"\"gsd\"",
"]",
"/",
"2",
")",
".",
"bounds",
")",
")",
"tfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"\"EPSG:4326\"",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"proj",
")",
")",
"gsd",
"=",
"kwargs",
".",
"get",
"(",
"\"gsd\"",
",",
"ops",
".",
"transform",
"(",
"tfm",
",",
"g",
")",
".",
"area",
"**",
"0.5",
")",
"current_bounds",
"=",
"wkt",
".",
"loads",
"(",
"self",
".",
"rda",
".",
"metadata",
"[",
"\"image\"",
"]",
"[",
"\"imageBoundsWGS84\"",
"]",
")",
".",
"bounds",
"except",
"(",
"AttributeError",
",",
"KeyError",
",",
"TypeError",
")",
":",
"tfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"self",
".",
"proj",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"proj",
")",
")",
"gsd",
"=",
"kwargs",
".",
"get",
"(",
"\"gsd\"",
",",
"(",
"ops",
".",
"transform",
"(",
"tfm",
",",
"shape",
"(",
"self",
")",
")",
".",
"area",
"/",
"(",
"self",
".",
"shape",
"[",
"1",
"]",
"*",
"self",
".",
"shape",
"[",
"2",
"]",
")",
")",
"**",
"0.5",
")",
"current_bounds",
"=",
"self",
".",
"bounds",
"tfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"from_proj",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"proj",
")",
")",
"itfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"proj",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"from_proj",
")",
")",
"output_bounds",
"=",
"ops",
".",
"transform",
"(",
"tfm",
",",
"box",
"(",
"*",
"current_bounds",
")",
")",
".",
"bounds",
"gtf",
"=",
"Affine",
".",
"from_gdal",
"(",
"output_bounds",
"[",
"0",
"]",
",",
"gsd",
",",
"0.0",
",",
"output_bounds",
"[",
"3",
"]",
",",
"0.0",
",",
"-",
"1",
"*",
"gsd",
")",
"ll",
"=",
"~",
"gtf",
"*",
"(",
"output_bounds",
"[",
":",
"2",
"]",
")",
"ur",
"=",
"~",
"gtf",
"*",
"(",
"output_bounds",
"[",
"2",
":",
"]",
")",
"x_chunks",
"=",
"int",
"(",
"(",
"ur",
"[",
"0",
"]",
"-",
"ll",
"[",
"0",
"]",
")",
"/",
"x_size",
")",
"+",
"1",
"y_chunks",
"=",
"int",
"(",
"(",
"ll",
"[",
"1",
"]",
"-",
"ur",
"[",
"1",
"]",
")",
"/",
"y_size",
")",
"+",
"1",
"num_bands",
"=",
"self",
".",
"shape",
"[",
"0",
"]",
"try",
":",
"dtype",
"=",
"RDA_TO_DTYPE",
"[",
"img_md",
"[",
"\"dataType\"",
"]",
"]",
"except",
":",
"dtype",
"=",
"'uint8'",
"daskmeta",
"=",
"{",
"\"dask\"",
":",
"{",
"}",
",",
"\"chunks\"",
":",
"(",
"num_bands",
",",
"y_size",
",",
"x_size",
")",
",",
"\"dtype\"",
":",
"dtype",
",",
"\"name\"",
":",
"\"warp-{}\"",
".",
"format",
"(",
"self",
".",
"name",
")",
",",
"\"shape\"",
":",
"(",
"num_bands",
",",
"y_chunks",
"*",
"y_size",
",",
"x_chunks",
"*",
"x_size",
")",
"}",
"def",
"px_to_geom",
"(",
"xmin",
",",
"ymin",
")",
":",
"xmax",
"=",
"int",
"(",
"xmin",
"+",
"x_size",
")",
"ymax",
"=",
"int",
"(",
"ymin",
"+",
"y_size",
")",
"bounds",
"=",
"list",
"(",
"(",
"gtf",
"*",
"(",
"xmin",
",",
"ymax",
")",
")",
"+",
"(",
"gtf",
"*",
"(",
"xmax",
",",
"ymin",
")",
")",
")",
"return",
"box",
"(",
"*",
"bounds",
")",
"full_bounds",
"=",
"box",
"(",
"*",
"output_bounds",
")",
"dasks",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"dem",
",",
"GeoDaskImage",
")",
":",
"if",
"dem",
".",
"proj",
"!=",
"proj",
":",
"dem",
"=",
"dem",
".",
"warp",
"(",
"proj",
"=",
"proj",
",",
"dem",
"=",
"dem",
")",
"dasks",
".",
"append",
"(",
"dem",
".",
"dask",
")",
"for",
"y",
"in",
"xrange",
"(",
"y_chunks",
")",
":",
"for",
"x",
"in",
"xrange",
"(",
"x_chunks",
")",
":",
"xmin",
"=",
"x",
"*",
"x_size",
"ymin",
"=",
"y",
"*",
"y_size",
"geometry",
"=",
"px_to_geom",
"(",
"xmin",
",",
"ymin",
")",
"daskmeta",
"[",
"\"dask\"",
"]",
"[",
"(",
"daskmeta",
"[",
"\"name\"",
"]",
",",
"0",
",",
"y",
",",
"x",
")",
"]",
"=",
"(",
"self",
".",
"_warp",
",",
"geometry",
",",
"gsd",
",",
"dem",
",",
"proj",
",",
"dtype",
",",
"5",
")",
"daskmeta",
"[",
"\"dask\"",
"]",
",",
"_",
"=",
"optimization",
".",
"cull",
"(",
"HighLevelGraph",
".",
"merge",
"(",
"daskmeta",
"[",
"\"dask\"",
"]",
",",
"*",
"dasks",
")",
",",
"list",
"(",
"daskmeta",
"[",
"\"dask\"",
"]",
".",
"keys",
"(",
")",
")",
")",
"gi",
"=",
"mapping",
"(",
"full_bounds",
")",
"gt",
"=",
"AffineTransform",
"(",
"gtf",
",",
"proj",
")",
"image",
"=",
"GeoDaskImage",
"(",
"daskmeta",
",",
"__geo_interface__",
"=",
"gi",
",",
"__geo_transform__",
"=",
"gt",
")",
"return",
"image",
"[",
"box",
"(",
"*",
"output_bounds",
")",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
GeoDaskImage._parse_geoms
|
Finds supported geometry types, parses them and returns the bbox
|
gbdxtools/images/meta.py
|
def _parse_geoms(self, **kwargs):
""" Finds supported geometry types, parses them and returns the bbox """
bbox = kwargs.get('bbox', None)
wkt_geom = kwargs.get('wkt', None)
geojson = kwargs.get('geojson', None)
if bbox is not None:
g = box(*bbox)
elif wkt_geom is not None:
g = wkt.loads(wkt_geom)
elif geojson is not None:
g = shape(geojson)
else:
return None
if self.proj is None:
return g
else:
return self._reproject(g, from_proj=kwargs.get('from_proj', 'EPSG:4326'))
|
def _parse_geoms(self, **kwargs):
""" Finds supported geometry types, parses them and returns the bbox """
bbox = kwargs.get('bbox', None)
wkt_geom = kwargs.get('wkt', None)
geojson = kwargs.get('geojson', None)
if bbox is not None:
g = box(*bbox)
elif wkt_geom is not None:
g = wkt.loads(wkt_geom)
elif geojson is not None:
g = shape(geojson)
else:
return None
if self.proj is None:
return g
else:
return self._reproject(g, from_proj=kwargs.get('from_proj', 'EPSG:4326'))
|
[
"Finds",
"supported",
"geometry",
"types",
"parses",
"them",
"and",
"returns",
"the",
"bbox"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/meta.py#L451-L467
|
[
"def",
"_parse_geoms",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"bbox",
"=",
"kwargs",
".",
"get",
"(",
"'bbox'",
",",
"None",
")",
"wkt_geom",
"=",
"kwargs",
".",
"get",
"(",
"'wkt'",
",",
"None",
")",
"geojson",
"=",
"kwargs",
".",
"get",
"(",
"'geojson'",
",",
"None",
")",
"if",
"bbox",
"is",
"not",
"None",
":",
"g",
"=",
"box",
"(",
"*",
"bbox",
")",
"elif",
"wkt_geom",
"is",
"not",
"None",
":",
"g",
"=",
"wkt",
".",
"loads",
"(",
"wkt_geom",
")",
"elif",
"geojson",
"is",
"not",
"None",
":",
"g",
"=",
"shape",
"(",
"geojson",
")",
"else",
":",
"return",
"None",
"if",
"self",
".",
"proj",
"is",
"None",
":",
"return",
"g",
"else",
":",
"return",
"self",
".",
"_reproject",
"(",
"g",
",",
"from_proj",
"=",
"kwargs",
".",
"get",
"(",
"'from_proj'",
",",
"'EPSG:4326'",
")",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
load_url
|
Loads a geotiff url inside a thread and returns as an ndarray
|
gbdxtools/images/tms_image.py
|
def load_url(url, shape=(8, 256, 256)):
""" Loads a geotiff url inside a thread and returns as an ndarray """
thread_id = threading.current_thread().ident
_curl = _curl_pool[thread_id]
_curl.setopt(_curl.URL, url)
_curl.setopt(pycurl.NOSIGNAL, 1)
_, ext = os.path.splitext(urlparse(url).path)
with NamedTemporaryFile(prefix="gbdxtools", suffix="."+ext, delete=False) as temp: # TODO: apply correct file extension
_curl.setopt(_curl.WRITEDATA, temp.file)
_curl.perform()
code = _curl.getinfo(pycurl.HTTP_CODE)
try:
if(code != 200):
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
arr = np.rollaxis(imread(temp), 2, 0)
except Exception as e:
print(e)
temp.seek(0)
print(temp.read())
arr = np.zeros(shape, dtype=np.uint8)
_curl.close()
del _curl_pool[thread_id]
finally:
temp.file.flush()
temp.close()
os.remove(temp.name)
return arr
|
def load_url(url, shape=(8, 256, 256)):
""" Loads a geotiff url inside a thread and returns as an ndarray """
thread_id = threading.current_thread().ident
_curl = _curl_pool[thread_id]
_curl.setopt(_curl.URL, url)
_curl.setopt(pycurl.NOSIGNAL, 1)
_, ext = os.path.splitext(urlparse(url).path)
with NamedTemporaryFile(prefix="gbdxtools", suffix="."+ext, delete=False) as temp: # TODO: apply correct file extension
_curl.setopt(_curl.WRITEDATA, temp.file)
_curl.perform()
code = _curl.getinfo(pycurl.HTTP_CODE)
try:
if(code != 200):
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
arr = np.rollaxis(imread(temp), 2, 0)
except Exception as e:
print(e)
temp.seek(0)
print(temp.read())
arr = np.zeros(shape, dtype=np.uint8)
_curl.close()
del _curl_pool[thread_id]
finally:
temp.file.flush()
temp.close()
os.remove(temp.name)
return arr
|
[
"Loads",
"a",
"geotiff",
"url",
"inside",
"a",
"thread",
"and",
"returns",
"as",
"an",
"ndarray"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/tms_image.py#L42-L68
|
[
"def",
"load_url",
"(",
"url",
",",
"shape",
"=",
"(",
"8",
",",
"256",
",",
"256",
")",
")",
":",
"thread_id",
"=",
"threading",
".",
"current_thread",
"(",
")",
".",
"ident",
"_curl",
"=",
"_curl_pool",
"[",
"thread_id",
"]",
"_curl",
".",
"setopt",
"(",
"_curl",
".",
"URL",
",",
"url",
")",
"_curl",
".",
"setopt",
"(",
"pycurl",
".",
"NOSIGNAL",
",",
"1",
")",
"_",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"urlparse",
"(",
"url",
")",
".",
"path",
")",
"with",
"NamedTemporaryFile",
"(",
"prefix",
"=",
"\"gbdxtools\"",
",",
"suffix",
"=",
"\".\"",
"+",
"ext",
",",
"delete",
"=",
"False",
")",
"as",
"temp",
":",
"# TODO: apply correct file extension",
"_curl",
".",
"setopt",
"(",
"_curl",
".",
"WRITEDATA",
",",
"temp",
".",
"file",
")",
"_curl",
".",
"perform",
"(",
")",
"code",
"=",
"_curl",
".",
"getinfo",
"(",
"pycurl",
".",
"HTTP_CODE",
")",
"try",
":",
"if",
"(",
"code",
"!=",
"200",
")",
":",
"raise",
"TypeError",
"(",
"\"Request for {} returned unexpected error code: {}\"",
".",
"format",
"(",
"url",
",",
"code",
")",
")",
"arr",
"=",
"np",
".",
"rollaxis",
"(",
"imread",
"(",
"temp",
")",
",",
"2",
",",
"0",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"e",
")",
"temp",
".",
"seek",
"(",
"0",
")",
"print",
"(",
"temp",
".",
"read",
"(",
")",
")",
"arr",
"=",
"np",
".",
"zeros",
"(",
"shape",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
"_curl",
".",
"close",
"(",
")",
"del",
"_curl_pool",
"[",
"thread_id",
"]",
"finally",
":",
"temp",
".",
"file",
".",
"flush",
"(",
")",
"temp",
".",
"close",
"(",
")",
"os",
".",
"remove",
"(",
"temp",
".",
"name",
")",
"return",
"arr"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
TmsMeta._tile_coords
|
convert mercator bbox to tile index limits
|
gbdxtools/images/tms_image.py
|
def _tile_coords(self, bounds):
""" convert mercator bbox to tile index limits """
tfm = partial(pyproj.transform,
pyproj.Proj(init="epsg:3857"),
pyproj.Proj(init="epsg:4326"))
bounds = ops.transform(tfm, box(*bounds)).bounds
# because tiles have a common corner, the tiles that cover a
# given tile includes the adjacent neighbors.
# https://github.com/mapbox/mercantile/issues/84#issuecomment-413113791
west, south, east, north = bounds
epsilon = 1.0e-10
if east != west and north != south:
# 2D bbox
# shrink the bounds a small amount so that
# shapes/tiles round trip.
west += epsilon
south += epsilon
east -= epsilon
north -= epsilon
params = [west, south, east, north, [self.zoom_level]]
tile_coords = [(tile.x, tile.y) for tile in mercantile.tiles(*params)]
xtiles, ytiles = zip(*tile_coords)
minx = min(xtiles)
miny = min(ytiles)
maxx = max(xtiles)
maxy = max(ytiles)
return minx, miny, maxx, maxy
|
def _tile_coords(self, bounds):
""" convert mercator bbox to tile index limits """
tfm = partial(pyproj.transform,
pyproj.Proj(init="epsg:3857"),
pyproj.Proj(init="epsg:4326"))
bounds = ops.transform(tfm, box(*bounds)).bounds
# because tiles have a common corner, the tiles that cover a
# given tile includes the adjacent neighbors.
# https://github.com/mapbox/mercantile/issues/84#issuecomment-413113791
west, south, east, north = bounds
epsilon = 1.0e-10
if east != west and north != south:
# 2D bbox
# shrink the bounds a small amount so that
# shapes/tiles round trip.
west += epsilon
south += epsilon
east -= epsilon
north -= epsilon
params = [west, south, east, north, [self.zoom_level]]
tile_coords = [(tile.x, tile.y) for tile in mercantile.tiles(*params)]
xtiles, ytiles = zip(*tile_coords)
minx = min(xtiles)
miny = min(ytiles)
maxx = max(xtiles)
maxy = max(ytiles)
return minx, miny, maxx, maxy
|
[
"convert",
"mercator",
"bbox",
"to",
"tile",
"index",
"limits"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/images/tms_image.py#L166-L195
|
[
"def",
"_tile_coords",
"(",
"self",
",",
"bounds",
")",
":",
"tfm",
"=",
"partial",
"(",
"pyproj",
".",
"transform",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"\"epsg:3857\"",
")",
",",
"pyproj",
".",
"Proj",
"(",
"init",
"=",
"\"epsg:4326\"",
")",
")",
"bounds",
"=",
"ops",
".",
"transform",
"(",
"tfm",
",",
"box",
"(",
"*",
"bounds",
")",
")",
".",
"bounds",
"# because tiles have a common corner, the tiles that cover a",
"# given tile includes the adjacent neighbors.",
"# https://github.com/mapbox/mercantile/issues/84#issuecomment-413113791",
"west",
",",
"south",
",",
"east",
",",
"north",
"=",
"bounds",
"epsilon",
"=",
"1.0e-10",
"if",
"east",
"!=",
"west",
"and",
"north",
"!=",
"south",
":",
"# 2D bbox",
"# shrink the bounds a small amount so that",
"# shapes/tiles round trip.",
"west",
"+=",
"epsilon",
"south",
"+=",
"epsilon",
"east",
"-=",
"epsilon",
"north",
"-=",
"epsilon",
"params",
"=",
"[",
"west",
",",
"south",
",",
"east",
",",
"north",
",",
"[",
"self",
".",
"zoom_level",
"]",
"]",
"tile_coords",
"=",
"[",
"(",
"tile",
".",
"x",
",",
"tile",
".",
"y",
")",
"for",
"tile",
"in",
"mercantile",
".",
"tiles",
"(",
"*",
"params",
")",
"]",
"xtiles",
",",
"ytiles",
"=",
"zip",
"(",
"*",
"tile_coords",
")",
"minx",
"=",
"min",
"(",
"xtiles",
")",
"miny",
"=",
"min",
"(",
"ytiles",
")",
"maxx",
"=",
"max",
"(",
"xtiles",
")",
"maxy",
"=",
"max",
"(",
"ytiles",
")",
"return",
"minx",
",",
"miny",
",",
"maxx",
",",
"maxy"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
InputPorts.get
|
>>> inputs = InputPorts({"one": 1})
>>> "one" in inputs._ports
True
>>> "one" in inputs._vals
True
>>> inputs.get("one", 2) == 1
True
>>> inputs.get("two", 2) == 2
True
>>> "two" in inputs._ports
True
>>> "two" in inputs._vals
False
|
gbdxtools/task.py
|
def get(self, key, default=None):
"""
>>> inputs = InputPorts({"one": 1})
>>> "one" in inputs._ports
True
>>> "one" in inputs._vals
True
>>> inputs.get("one", 2) == 1
True
>>> inputs.get("two", 2) == 2
True
>>> "two" in inputs._ports
True
>>> "two" in inputs._vals
False
"""
if key not in self._ports:
self._ports[key] = self._port_template(key)
return self._vals.get(key, default)
|
def get(self, key, default=None):
"""
>>> inputs = InputPorts({"one": 1})
>>> "one" in inputs._ports
True
>>> "one" in inputs._vals
True
>>> inputs.get("one", 2) == 1
True
>>> inputs.get("two", 2) == 2
True
>>> "two" in inputs._ports
True
>>> "two" in inputs._vals
False
"""
if key not in self._ports:
self._ports[key] = self._port_template(key)
return self._vals.get(key, default)
|
[
">>>",
"inputs",
"=",
"InputPorts",
"(",
"{",
"one",
":",
"1",
"}",
")",
">>>",
"one",
"in",
"inputs",
".",
"_ports",
"True",
">>>",
"one",
"in",
"inputs",
".",
"_vals",
"True",
">>>",
"inputs",
".",
"get",
"(",
"one",
"2",
")",
"==",
"1",
"True",
">>>",
"inputs",
".",
"get",
"(",
"two",
"2",
")",
"==",
"2",
"True",
">>>",
"two",
"in",
"inputs",
".",
"_ports",
"True",
">>>",
"two",
"in",
"inputs",
".",
"_vals",
"False"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/task.py#L45-L63
|
[
"def",
"get",
"(",
"self",
",",
"key",
",",
"default",
"=",
"None",
")",
":",
"if",
"key",
"not",
"in",
"self",
".",
"_ports",
":",
"self",
".",
"_ports",
"[",
"key",
"]",
"=",
"self",
".",
"_port_template",
"(",
"key",
")",
"return",
"self",
".",
"_vals",
".",
"get",
"(",
"key",
",",
"default",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
load_url
|
Loads a geotiff url inside a thread and returns as an ndarray
|
gbdxtools/rda/fetch/threaded/libcurl/easy.py
|
def load_url(url, token, shape=(8, 256, 256)):
""" Loads a geotiff url inside a thread and returns as an ndarray """
_, ext = os.path.splitext(urlparse(url).path)
success = False
for i in xrange(MAX_RETRIES):
thread_id = threading.current_thread().ident
_curl = _curl_pool[thread_id]
_curl.setopt(_curl.URL, url)
_curl.setopt(pycurl.NOSIGNAL, 1)
_curl.setopt(pycurl.HTTPHEADER, ['Authorization: Bearer {}'.format(token)])
with NamedTemporaryFile(prefix="gbdxtools", suffix=ext, delete=False) as temp: # TODO: apply correct file extension
_curl.setopt(_curl.WRITEDATA, temp.file)
_curl.perform()
code = _curl.getinfo(pycurl.HTTP_CODE)
try:
if(code != 200):
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
temp.file.flush()
temp.close()
arr = imread(temp.name)
if len(arr.shape) == 3:
arr = np.rollaxis(arr, 2, 0)
else:
arr = np.expand_dims(arr, axis=0)
success = True
return arr
except Exception as e:
_curl.close()
del _curl_pool[thread_id]
finally:
temp.close()
os.remove(temp.name)
if success is False:
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
return arr
|
def load_url(url, token, shape=(8, 256, 256)):
""" Loads a geotiff url inside a thread and returns as an ndarray """
_, ext = os.path.splitext(urlparse(url).path)
success = False
for i in xrange(MAX_RETRIES):
thread_id = threading.current_thread().ident
_curl = _curl_pool[thread_id]
_curl.setopt(_curl.URL, url)
_curl.setopt(pycurl.NOSIGNAL, 1)
_curl.setopt(pycurl.HTTPHEADER, ['Authorization: Bearer {}'.format(token)])
with NamedTemporaryFile(prefix="gbdxtools", suffix=ext, delete=False) as temp: # TODO: apply correct file extension
_curl.setopt(_curl.WRITEDATA, temp.file)
_curl.perform()
code = _curl.getinfo(pycurl.HTTP_CODE)
try:
if(code != 200):
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
temp.file.flush()
temp.close()
arr = imread(temp.name)
if len(arr.shape) == 3:
arr = np.rollaxis(arr, 2, 0)
else:
arr = np.expand_dims(arr, axis=0)
success = True
return arr
except Exception as e:
_curl.close()
del _curl_pool[thread_id]
finally:
temp.close()
os.remove(temp.name)
if success is False:
raise TypeError("Request for {} returned unexpected error code: {}".format(url, code))
return arr
|
[
"Loads",
"a",
"geotiff",
"url",
"inside",
"a",
"thread",
"and",
"returns",
"as",
"an",
"ndarray"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/rda/fetch/threaded/libcurl/easy.py#L36-L71
|
[
"def",
"load_url",
"(",
"url",
",",
"token",
",",
"shape",
"=",
"(",
"8",
",",
"256",
",",
"256",
")",
")",
":",
"_",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"urlparse",
"(",
"url",
")",
".",
"path",
")",
"success",
"=",
"False",
"for",
"i",
"in",
"xrange",
"(",
"MAX_RETRIES",
")",
":",
"thread_id",
"=",
"threading",
".",
"current_thread",
"(",
")",
".",
"ident",
"_curl",
"=",
"_curl_pool",
"[",
"thread_id",
"]",
"_curl",
".",
"setopt",
"(",
"_curl",
".",
"URL",
",",
"url",
")",
"_curl",
".",
"setopt",
"(",
"pycurl",
".",
"NOSIGNAL",
",",
"1",
")",
"_curl",
".",
"setopt",
"(",
"pycurl",
".",
"HTTPHEADER",
",",
"[",
"'Authorization: Bearer {}'",
".",
"format",
"(",
"token",
")",
"]",
")",
"with",
"NamedTemporaryFile",
"(",
"prefix",
"=",
"\"gbdxtools\"",
",",
"suffix",
"=",
"ext",
",",
"delete",
"=",
"False",
")",
"as",
"temp",
":",
"# TODO: apply correct file extension",
"_curl",
".",
"setopt",
"(",
"_curl",
".",
"WRITEDATA",
",",
"temp",
".",
"file",
")",
"_curl",
".",
"perform",
"(",
")",
"code",
"=",
"_curl",
".",
"getinfo",
"(",
"pycurl",
".",
"HTTP_CODE",
")",
"try",
":",
"if",
"(",
"code",
"!=",
"200",
")",
":",
"raise",
"TypeError",
"(",
"\"Request for {} returned unexpected error code: {}\"",
".",
"format",
"(",
"url",
",",
"code",
")",
")",
"temp",
".",
"file",
".",
"flush",
"(",
")",
"temp",
".",
"close",
"(",
")",
"arr",
"=",
"imread",
"(",
"temp",
".",
"name",
")",
"if",
"len",
"(",
"arr",
".",
"shape",
")",
"==",
"3",
":",
"arr",
"=",
"np",
".",
"rollaxis",
"(",
"arr",
",",
"2",
",",
"0",
")",
"else",
":",
"arr",
"=",
"np",
".",
"expand_dims",
"(",
"arr",
",",
"axis",
"=",
"0",
")",
"success",
"=",
"True",
"return",
"arr",
"except",
"Exception",
"as",
"e",
":",
"_curl",
".",
"close",
"(",
")",
"del",
"_curl_pool",
"[",
"thread_id",
"]",
"finally",
":",
"temp",
".",
"close",
"(",
")",
"os",
".",
"remove",
"(",
"temp",
".",
"name",
")",
"if",
"success",
"is",
"False",
":",
"raise",
"TypeError",
"(",
"\"Request for {} returned unexpected error code: {}\"",
".",
"format",
"(",
"url",
",",
"code",
")",
")",
"return",
"arr"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.launch
|
Launches GBDX workflow.
Args:
workflow (dict): Dictionary specifying workflow tasks.
Returns:
Workflow id (str).
|
gbdxtools/workflow.py
|
def launch(self, workflow):
"""Launches GBDX workflow.
Args:
workflow (dict): Dictionary specifying workflow tasks.
Returns:
Workflow id (str).
"""
# hit workflow api
try:
r = self.gbdx_connection.post(self.workflows_url, json=workflow)
try:
r.raise_for_status()
except:
print("GBDX API Status Code: %s" % r.status_code)
print("GBDX API Response: %s" % r.text)
r.raise_for_status()
workflow_id = r.json()['id']
return workflow_id
except TypeError:
self.logger.debug('Workflow not launched!')
|
def launch(self, workflow):
"""Launches GBDX workflow.
Args:
workflow (dict): Dictionary specifying workflow tasks.
Returns:
Workflow id (str).
"""
# hit workflow api
try:
r = self.gbdx_connection.post(self.workflows_url, json=workflow)
try:
r.raise_for_status()
except:
print("GBDX API Status Code: %s" % r.status_code)
print("GBDX API Response: %s" % r.text)
r.raise_for_status()
workflow_id = r.json()['id']
return workflow_id
except TypeError:
self.logger.debug('Workflow not launched!')
|
[
"Launches",
"GBDX",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L35-L57
|
[
"def",
"launch",
"(",
"self",
",",
"workflow",
")",
":",
"# hit workflow api",
"try",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"self",
".",
"workflows_url",
",",
"json",
"=",
"workflow",
")",
"try",
":",
"r",
".",
"raise_for_status",
"(",
")",
"except",
":",
"print",
"(",
"\"GBDX API Status Code: %s\"",
"%",
"r",
".",
"status_code",
")",
"print",
"(",
"\"GBDX API Response: %s\"",
"%",
"r",
".",
"text",
")",
"r",
".",
"raise_for_status",
"(",
")",
"workflow_id",
"=",
"r",
".",
"json",
"(",
")",
"[",
"'id'",
"]",
"return",
"workflow_id",
"except",
"TypeError",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Workflow not launched!'",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.status
|
Checks workflow status.
Args:
workflow_id (str): Workflow id.
Returns:
Workflow status (str).
|
gbdxtools/workflow.py
|
def status(self, workflow_id):
"""Checks workflow status.
Args:
workflow_id (str): Workflow id.
Returns:
Workflow status (str).
"""
self.logger.debug('Get status of workflow: ' + workflow_id)
url = '%(wf_url)s/%(wf_id)s' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()['state']
|
def status(self, workflow_id):
"""Checks workflow status.
Args:
workflow_id (str): Workflow id.
Returns:
Workflow status (str).
"""
self.logger.debug('Get status of workflow: ' + workflow_id)
url = '%(wf_url)s/%(wf_id)s' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()['state']
|
[
"Checks",
"workflow",
"status",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L59-L74
|
[
"def",
"status",
"(",
"self",
",",
"workflow_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Get status of workflow: '",
"+",
"workflow_id",
")",
"url",
"=",
"'%(wf_url)s/%(wf_id)s'",
"%",
"{",
"'wf_url'",
":",
"self",
".",
"workflows_url",
",",
"'wf_id'",
":",
"workflow_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")",
"[",
"'state'",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.get_stdout
|
Get stdout for a particular task.
Args:
workflow_id (str): Workflow id.
task_id (str): Task id.
Returns:
Stdout of the task (string).
|
gbdxtools/workflow.py
|
def get_stdout(self, workflow_id, task_id):
"""Get stdout for a particular task.
Args:
workflow_id (str): Workflow id.
task_id (str): Task id.
Returns:
Stdout of the task (string).
"""
url = '%(wf_url)s/%(wf_id)s/tasks/%(task_id)s/stdout' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id, 'task_id': task_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.text
|
def get_stdout(self, workflow_id, task_id):
"""Get stdout for a particular task.
Args:
workflow_id (str): Workflow id.
task_id (str): Task id.
Returns:
Stdout of the task (string).
"""
url = '%(wf_url)s/%(wf_id)s/tasks/%(task_id)s/stdout' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id, 'task_id': task_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.text
|
[
"Get",
"stdout",
"for",
"a",
"particular",
"task",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L94-L110
|
[
"def",
"get_stdout",
"(",
"self",
",",
"workflow_id",
",",
"task_id",
")",
":",
"url",
"=",
"'%(wf_url)s/%(wf_id)s/tasks/%(task_id)s/stdout'",
"%",
"{",
"'wf_url'",
":",
"self",
".",
"workflows_url",
",",
"'wf_id'",
":",
"workflow_id",
",",
"'task_id'",
":",
"task_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"text"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.cancel
|
Cancels a running workflow.
Args:
workflow_id (str): Workflow id.
Returns:
Nothing
|
gbdxtools/workflow.py
|
def cancel(self, workflow_id):
"""Cancels a running workflow.
Args:
workflow_id (str): Workflow id.
Returns:
Nothing
"""
self.logger.debug('Canceling workflow: ' + workflow_id)
url = '%(wf_url)s/%(wf_id)s/cancel' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id
}
r = self.gbdx_connection.post(url, data='')
r.raise_for_status()
|
def cancel(self, workflow_id):
"""Cancels a running workflow.
Args:
workflow_id (str): Workflow id.
Returns:
Nothing
"""
self.logger.debug('Canceling workflow: ' + workflow_id)
url = '%(wf_url)s/%(wf_id)s/cancel' % {
'wf_url': self.workflows_url, 'wf_id': workflow_id
}
r = self.gbdx_connection.post(url, data='')
r.raise_for_status()
|
[
"Cancels",
"a",
"running",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L147-L161
|
[
"def",
"cancel",
"(",
"self",
",",
"workflow_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Canceling workflow: '",
"+",
"workflow_id",
")",
"url",
"=",
"'%(wf_url)s/%(wf_id)s/cancel'",
"%",
"{",
"'wf_url'",
":",
"self",
".",
"workflows_url",
",",
"'wf_id'",
":",
"workflow_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"data",
"=",
"''",
")",
"r",
".",
"raise_for_status",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.launch_batch_workflow
|
Launches GBDX batch workflow.
Args:
batch_workflow (dict): Dictionary specifying batch workflow tasks.
Returns:
Batch Workflow id (str).
|
gbdxtools/workflow.py
|
def launch_batch_workflow(self, batch_workflow):
"""Launches GBDX batch workflow.
Args:
batch_workflow (dict): Dictionary specifying batch workflow tasks.
Returns:
Batch Workflow id (str).
"""
# hit workflow api
url = '%(base_url)s/batch_workflows' % {
'base_url': self.base_url
}
try:
r = self.gbdx_connection.post(url, json=batch_workflow)
batch_workflow_id = r.json()['batch_workflow_id']
return batch_workflow_id
except TypeError as e:
self.logger.debug('Batch Workflow not launched, reason: {0}'.format(e))
|
def launch_batch_workflow(self, batch_workflow):
"""Launches GBDX batch workflow.
Args:
batch_workflow (dict): Dictionary specifying batch workflow tasks.
Returns:
Batch Workflow id (str).
"""
# hit workflow api
url = '%(base_url)s/batch_workflows' % {
'base_url': self.base_url
}
try:
r = self.gbdx_connection.post(url, json=batch_workflow)
batch_workflow_id = r.json()['batch_workflow_id']
return batch_workflow_id
except TypeError as e:
self.logger.debug('Batch Workflow not launched, reason: {0}'.format(e))
|
[
"Launches",
"GBDX",
"batch",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L163-L182
|
[
"def",
"launch_batch_workflow",
"(",
"self",
",",
"batch_workflow",
")",
":",
"# hit workflow api",
"url",
"=",
"'%(base_url)s/batch_workflows'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
"}",
"try",
":",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"json",
"=",
"batch_workflow",
")",
"batch_workflow_id",
"=",
"r",
".",
"json",
"(",
")",
"[",
"'batch_workflow_id'",
"]",
"return",
"batch_workflow_id",
"except",
"TypeError",
"as",
"e",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Batch Workflow not launched, reason: {0}'",
".",
"format",
"(",
"e",
")",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.batch_workflow_status
|
Checks GBDX batch workflow status.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
|
gbdxtools/workflow.py
|
def batch_workflow_status(self, batch_workflow_id):
"""Checks GBDX batch workflow status.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
"""
self.logger.debug('Get status of batch workflow: ' + batch_workflow_id)
url = '%(base_url)s/batch_workflows/%(batch_id)s' % {
'base_url': self.base_url, 'batch_id': batch_workflow_id
}
r = self.gbdx_connection.get(url)
return r.json()
|
def batch_workflow_status(self, batch_workflow_id):
"""Checks GBDX batch workflow status.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
"""
self.logger.debug('Get status of batch workflow: ' + batch_workflow_id)
url = '%(base_url)s/batch_workflows/%(batch_id)s' % {
'base_url': self.base_url, 'batch_id': batch_workflow_id
}
r = self.gbdx_connection.get(url)
return r.json()
|
[
"Checks",
"GBDX",
"batch",
"workflow",
"status",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L184-L199
|
[
"def",
"batch_workflow_status",
"(",
"self",
",",
"batch_workflow_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Get status of batch workflow: '",
"+",
"batch_workflow_id",
")",
"url",
"=",
"'%(base_url)s/batch_workflows/%(batch_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'batch_id'",
":",
"batch_workflow_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.batch_workflow_cancel
|
Cancels GBDX batch workflow.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
|
gbdxtools/workflow.py
|
def batch_workflow_cancel(self, batch_workflow_id):
"""Cancels GBDX batch workflow.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
"""
self.logger.debug('Cancel batch workflow: ' + batch_workflow_id)
url = '%(base_url)s/batch_workflows/%(batch_id)s/cancel' % {
'base_url': self.base_url, 'batch_id': batch_workflow_id
}
r = self.gbdx_connection.post(url)
return r.json()
|
def batch_workflow_cancel(self, batch_workflow_id):
"""Cancels GBDX batch workflow.
Args:
batch workflow_id (str): Batch workflow id.
Returns:
Batch Workflow status (str).
"""
self.logger.debug('Cancel batch workflow: ' + batch_workflow_id)
url = '%(base_url)s/batch_workflows/%(batch_id)s/cancel' % {
'base_url': self.base_url, 'batch_id': batch_workflow_id
}
r = self.gbdx_connection.post(url)
return r.json()
|
[
"Cancels",
"GBDX",
"batch",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L201-L216
|
[
"def",
"batch_workflow_cancel",
"(",
"self",
",",
"batch_workflow_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Cancel batch workflow: '",
"+",
"batch_workflow_id",
")",
"url",
"=",
"'%(base_url)s/batch_workflows/%(batch_id)s/cancel'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'batch_id'",
":",
"batch_workflow_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Workflow.search
|
Cancels GBDX batch workflow.
Params:
lookback_h (int): Look back time in hours.
owner (str): Workflow owner to search by
state (str): State to filter by, eg:
"submitted",
"scheduled",
"started",
"canceled",
"cancelling",
"failed",
"succeeded",
"timedout",
"pending",
"running",
"complete",
"waiting",
"all"
Returns:
Batch Workflow status (str).
|
gbdxtools/workflow.py
|
def search(self, lookback_h=12, owner=None, state="all"):
"""Cancels GBDX batch workflow.
Params:
lookback_h (int): Look back time in hours.
owner (str): Workflow owner to search by
state (str): State to filter by, eg:
"submitted",
"scheduled",
"started",
"canceled",
"cancelling",
"failed",
"succeeded",
"timedout",
"pending",
"running",
"complete",
"waiting",
"all"
Returns:
Batch Workflow status (str).
"""
postdata = {
"lookback_h": lookback_h,
"state": state
}
if owner is not None:
postdata['owner'] = owner
url = "{}/workflows/search".format(self.base_url)
headers = {'Content-Type':'application/json'}
r = self.gbdx_connection.post(url, headers=headers, data=json.dumps(postdata))
return r.json()
|
def search(self, lookback_h=12, owner=None, state="all"):
"""Cancels GBDX batch workflow.
Params:
lookback_h (int): Look back time in hours.
owner (str): Workflow owner to search by
state (str): State to filter by, eg:
"submitted",
"scheduled",
"started",
"canceled",
"cancelling",
"failed",
"succeeded",
"timedout",
"pending",
"running",
"complete",
"waiting",
"all"
Returns:
Batch Workflow status (str).
"""
postdata = {
"lookback_h": lookback_h,
"state": state
}
if owner is not None:
postdata['owner'] = owner
url = "{}/workflows/search".format(self.base_url)
headers = {'Content-Type':'application/json'}
r = self.gbdx_connection.post(url, headers=headers, data=json.dumps(postdata))
return r.json()
|
[
"Cancels",
"GBDX",
"batch",
"workflow",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/workflow.py#L218-L253
|
[
"def",
"search",
"(",
"self",
",",
"lookback_h",
"=",
"12",
",",
"owner",
"=",
"None",
",",
"state",
"=",
"\"all\"",
")",
":",
"postdata",
"=",
"{",
"\"lookback_h\"",
":",
"lookback_h",
",",
"\"state\"",
":",
"state",
"}",
"if",
"owner",
"is",
"not",
"None",
":",
"postdata",
"[",
"'owner'",
"]",
"=",
"owner",
"url",
"=",
"\"{}/workflows/search\"",
".",
"format",
"(",
"self",
".",
"base_url",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"postdata",
")",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Ordering.order
|
Orders images from GBDX.
Args:
image_catalog_ids (str or list): A single catalog id or a list of
catalog ids.
batch_size (int): The image_catalog_ids will be split into
batches of batch_size. The ordering API max
batch size is 100, if batch_size is greater
than 100 it will be truncated.
callback (str): A url to call when ordering is completed.
Returns:
order_ids (str or list): If one batch, returns a string. If more
than one batch, returns a list of order ids,
one for each batch.
|
gbdxtools/ordering.py
|
def order(self, image_catalog_ids, batch_size=100, callback=None):
'''Orders images from GBDX.
Args:
image_catalog_ids (str or list): A single catalog id or a list of
catalog ids.
batch_size (int): The image_catalog_ids will be split into
batches of batch_size. The ordering API max
batch size is 100, if batch_size is greater
than 100 it will be truncated.
callback (str): A url to call when ordering is completed.
Returns:
order_ids (str or list): If one batch, returns a string. If more
than one batch, returns a list of order ids,
one for each batch.
'''
def _order_single_batch(url_, ids, results_list):
data = json.dumps(ids) if callback is None else json.dumps({"acquisitionIds": ids, "callback": callback})
r = self.gbdx_connection.post(url_, data=data)
r.raise_for_status()
order_id = r.json().get("order_id")
if order_id:
results_list.append(order_id)
self.logger.debug('Place order')
url = ('%s/order' if callback is None else '%s/ordercb') % self.base_url
batch_size = min(100, batch_size)
if not isinstance(image_catalog_ids, list):
image_catalog_ids = [image_catalog_ids]
sanitized_ids = list(set((id for id in (_id.strip() for _id in image_catalog_ids) if id)))
res = []
# Use itertool batch recipe
acq_ids_by_batch = zip(*([iter(sanitized_ids)] * batch_size))
for ids_batch in acq_ids_by_batch:
_order_single_batch(url, ids_batch, res)
# Order reminder
remain_count = len(sanitized_ids) % batch_size
if remain_count > 0:
_order_single_batch(url, sanitized_ids[-remain_count:], res)
if len(res) == 1:
return res[0]
elif len(res)>1:
return res
|
def order(self, image_catalog_ids, batch_size=100, callback=None):
'''Orders images from GBDX.
Args:
image_catalog_ids (str or list): A single catalog id or a list of
catalog ids.
batch_size (int): The image_catalog_ids will be split into
batches of batch_size. The ordering API max
batch size is 100, if batch_size is greater
than 100 it will be truncated.
callback (str): A url to call when ordering is completed.
Returns:
order_ids (str or list): If one batch, returns a string. If more
than one batch, returns a list of order ids,
one for each batch.
'''
def _order_single_batch(url_, ids, results_list):
data = json.dumps(ids) if callback is None else json.dumps({"acquisitionIds": ids, "callback": callback})
r = self.gbdx_connection.post(url_, data=data)
r.raise_for_status()
order_id = r.json().get("order_id")
if order_id:
results_list.append(order_id)
self.logger.debug('Place order')
url = ('%s/order' if callback is None else '%s/ordercb') % self.base_url
batch_size = min(100, batch_size)
if not isinstance(image_catalog_ids, list):
image_catalog_ids = [image_catalog_ids]
sanitized_ids = list(set((id for id in (_id.strip() for _id in image_catalog_ids) if id)))
res = []
# Use itertool batch recipe
acq_ids_by_batch = zip(*([iter(sanitized_ids)] * batch_size))
for ids_batch in acq_ids_by_batch:
_order_single_batch(url, ids_batch, res)
# Order reminder
remain_count = len(sanitized_ids) % batch_size
if remain_count > 0:
_order_single_batch(url, sanitized_ids[-remain_count:], res)
if len(res) == 1:
return res[0]
elif len(res)>1:
return res
|
[
"Orders",
"images",
"from",
"GBDX",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/ordering.py#L27-L76
|
[
"def",
"order",
"(",
"self",
",",
"image_catalog_ids",
",",
"batch_size",
"=",
"100",
",",
"callback",
"=",
"None",
")",
":",
"def",
"_order_single_batch",
"(",
"url_",
",",
"ids",
",",
"results_list",
")",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"ids",
")",
"if",
"callback",
"is",
"None",
"else",
"json",
".",
"dumps",
"(",
"{",
"\"acquisitionIds\"",
":",
"ids",
",",
"\"callback\"",
":",
"callback",
"}",
")",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url_",
",",
"data",
"=",
"data",
")",
"r",
".",
"raise_for_status",
"(",
")",
"order_id",
"=",
"r",
".",
"json",
"(",
")",
".",
"get",
"(",
"\"order_id\"",
")",
"if",
"order_id",
":",
"results_list",
".",
"append",
"(",
"order_id",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Place order'",
")",
"url",
"=",
"(",
"'%s/order'",
"if",
"callback",
"is",
"None",
"else",
"'%s/ordercb'",
")",
"%",
"self",
".",
"base_url",
"batch_size",
"=",
"min",
"(",
"100",
",",
"batch_size",
")",
"if",
"not",
"isinstance",
"(",
"image_catalog_ids",
",",
"list",
")",
":",
"image_catalog_ids",
"=",
"[",
"image_catalog_ids",
"]",
"sanitized_ids",
"=",
"list",
"(",
"set",
"(",
"(",
"id",
"for",
"id",
"in",
"(",
"_id",
".",
"strip",
"(",
")",
"for",
"_id",
"in",
"image_catalog_ids",
")",
"if",
"id",
")",
")",
")",
"res",
"=",
"[",
"]",
"# Use itertool batch recipe",
"acq_ids_by_batch",
"=",
"zip",
"(",
"*",
"(",
"[",
"iter",
"(",
"sanitized_ids",
")",
"]",
"*",
"batch_size",
")",
")",
"for",
"ids_batch",
"in",
"acq_ids_by_batch",
":",
"_order_single_batch",
"(",
"url",
",",
"ids_batch",
",",
"res",
")",
"# Order reminder",
"remain_count",
"=",
"len",
"(",
"sanitized_ids",
")",
"%",
"batch_size",
"if",
"remain_count",
">",
"0",
":",
"_order_single_batch",
"(",
"url",
",",
"sanitized_ids",
"[",
"-",
"remain_count",
":",
"]",
",",
"res",
")",
"if",
"len",
"(",
"res",
")",
"==",
"1",
":",
"return",
"res",
"[",
"0",
"]",
"elif",
"len",
"(",
"res",
")",
">",
"1",
":",
"return",
"res"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Ordering.status
|
Checks imagery order status. There can be more than one image per
order and this function returns the status of all images
within the order.
Args:
order_id (str): The id of the order placed.
Returns:
List of dictionaries, one per image. Each dictionary consists
of the keys 'acquisition_id', 'location' and 'state'.
|
gbdxtools/ordering.py
|
def status(self, order_id):
'''Checks imagery order status. There can be more than one image per
order and this function returns the status of all images
within the order.
Args:
order_id (str): The id of the order placed.
Returns:
List of dictionaries, one per image. Each dictionary consists
of the keys 'acquisition_id', 'location' and 'state'.
'''
self.logger.debug('Get status of order ' + order_id)
url = '%(base_url)s/order/%(order_id)s' % {
'base_url': self.base_url, 'order_id': order_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json().get("acquisitions", {})
|
def status(self, order_id):
'''Checks imagery order status. There can be more than one image per
order and this function returns the status of all images
within the order.
Args:
order_id (str): The id of the order placed.
Returns:
List of dictionaries, one per image. Each dictionary consists
of the keys 'acquisition_id', 'location' and 'state'.
'''
self.logger.debug('Get status of order ' + order_id)
url = '%(base_url)s/order/%(order_id)s' % {
'base_url': self.base_url, 'order_id': order_id
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json().get("acquisitions", {})
|
[
"Checks",
"imagery",
"order",
"status",
".",
"There",
"can",
"be",
"more",
"than",
"one",
"image",
"per",
"order",
"and",
"this",
"function",
"returns",
"the",
"status",
"of",
"all",
"images",
"within",
"the",
"order",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/ordering.py#L78-L97
|
[
"def",
"status",
"(",
"self",
",",
"order_id",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Get status of order '",
"+",
"order_id",
")",
"url",
"=",
"'%(base_url)s/order/%(order_id)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'order_id'",
":",
"order_id",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")",
".",
"get",
"(",
"\"acquisitions\"",
",",
"{",
"}",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Ordering.heartbeat
|
Check the heartbeat of the ordering API
Args: None
Returns: True or False
|
gbdxtools/ordering.py
|
def heartbeat(self):
'''
Check the heartbeat of the ordering API
Args: None
Returns: True or False
'''
url = '%s/heartbeat' % self.base_url
# Auth is not required to hit the heartbeat
r = requests.get(url)
try:
return r.json() == "ok"
except:
return False
|
def heartbeat(self):
'''
Check the heartbeat of the ordering API
Args: None
Returns: True or False
'''
url = '%s/heartbeat' % self.base_url
# Auth is not required to hit the heartbeat
r = requests.get(url)
try:
return r.json() == "ok"
except:
return False
|
[
"Check",
"the",
"heartbeat",
"of",
"the",
"ordering",
"API"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/ordering.py#L99-L114
|
[
"def",
"heartbeat",
"(",
"self",
")",
":",
"url",
"=",
"'%s/heartbeat'",
"%",
"self",
".",
"base_url",
"# Auth is not required to hit the heartbeat",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"try",
":",
"return",
"r",
".",
"json",
"(",
")",
"==",
"\"ok\"",
"except",
":",
"return",
"False"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.get
|
Retrieves the strip footprint WKT string given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
includeRelationships (bool): whether to include graph links to related objects. Default False.
Returns:
record (dict): A dict object identical to the json representation of the catalog record
|
gbdxtools/catalog.py
|
def get(self, catID, includeRelationships=False):
'''Retrieves the strip footprint WKT string given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
includeRelationships (bool): whether to include graph links to related objects. Default False.
Returns:
record (dict): A dict object identical to the json representation of the catalog record
'''
url = '%(base_url)s/record/%(catID)s' % {
'base_url': self.base_url, 'catID': catID
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()
|
def get(self, catID, includeRelationships=False):
'''Retrieves the strip footprint WKT string given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
includeRelationships (bool): whether to include graph links to related objects. Default False.
Returns:
record (dict): A dict object identical to the json representation of the catalog record
'''
url = '%(base_url)s/record/%(catID)s' % {
'base_url': self.base_url, 'catID': catID
}
r = self.gbdx_connection.get(url)
r.raise_for_status()
return r.json()
|
[
"Retrieves",
"the",
"strip",
"footprint",
"WKT",
"string",
"given",
"a",
"cat",
"ID",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L55-L70
|
[
"def",
"get",
"(",
"self",
",",
"catID",
",",
"includeRelationships",
"=",
"False",
")",
":",
"url",
"=",
"'%(base_url)s/record/%(catID)s'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'catID'",
":",
"catID",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"return",
"r",
".",
"json",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.get_strip_metadata
|
Retrieves the strip catalog metadata given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
Returns:
metadata (dict): A metadata dictionary .
TODO: have this return a class object with interesting information exposed.
|
gbdxtools/catalog.py
|
def get_strip_metadata(self, catID):
'''Retrieves the strip catalog metadata given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
Returns:
metadata (dict): A metadata dictionary .
TODO: have this return a class object with interesting information exposed.
'''
self.logger.debug('Retrieving strip catalog metadata')
url = '%(base_url)s/record/%(catID)s?includeRelationships=false' % {
'base_url': self.base_url, 'catID': catID
}
r = self.gbdx_connection.get(url)
if r.status_code == 200:
return r.json()['properties']
elif r.status_code == 404:
self.logger.debug('Strip not found: %s' % catID)
r.raise_for_status()
else:
self.logger.debug('There was a problem retrieving catid: %s' % catID)
r.raise_for_status()
|
def get_strip_metadata(self, catID):
'''Retrieves the strip catalog metadata given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
Returns:
metadata (dict): A metadata dictionary .
TODO: have this return a class object with interesting information exposed.
'''
self.logger.debug('Retrieving strip catalog metadata')
url = '%(base_url)s/record/%(catID)s?includeRelationships=false' % {
'base_url': self.base_url, 'catID': catID
}
r = self.gbdx_connection.get(url)
if r.status_code == 200:
return r.json()['properties']
elif r.status_code == 404:
self.logger.debug('Strip not found: %s' % catID)
r.raise_for_status()
else:
self.logger.debug('There was a problem retrieving catid: %s' % catID)
r.raise_for_status()
|
[
"Retrieves",
"the",
"strip",
"catalog",
"metadata",
"given",
"a",
"cat",
"ID",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L73-L97
|
[
"def",
"get_strip_metadata",
"(",
"self",
",",
"catID",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Retrieving strip catalog metadata'",
")",
"url",
"=",
"'%(base_url)s/record/%(catID)s?includeRelationships=false'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
",",
"'catID'",
":",
"catID",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"get",
"(",
"url",
")",
"if",
"r",
".",
"status_code",
"==",
"200",
":",
"return",
"r",
".",
"json",
"(",
")",
"[",
"'properties'",
"]",
"elif",
"r",
".",
"status_code",
"==",
"404",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Strip not found: %s'",
"%",
"catID",
")",
"r",
".",
"raise_for_status",
"(",
")",
"else",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'There was a problem retrieving catid: %s'",
"%",
"catID",
")",
"r",
".",
"raise_for_status",
"(",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.get_address_coords
|
Use the google geocoder to get latitude and longitude for an address string
Args:
address: any address string
Returns:
A tuple of (lat,lng)
|
gbdxtools/catalog.py
|
def get_address_coords(self, address):
''' Use the google geocoder to get latitude and longitude for an address string
Args:
address: any address string
Returns:
A tuple of (lat,lng)
'''
url = "https://maps.googleapis.com/maps/api/geocode/json?&address=" + address
r = requests.get(url)
r.raise_for_status()
results = r.json()['results']
lat = results[0]['geometry']['location']['lat']
lng = results[0]['geometry']['location']['lng']
return lat, lng
|
def get_address_coords(self, address):
''' Use the google geocoder to get latitude and longitude for an address string
Args:
address: any address string
Returns:
A tuple of (lat,lng)
'''
url = "https://maps.googleapis.com/maps/api/geocode/json?&address=" + address
r = requests.get(url)
r.raise_for_status()
results = r.json()['results']
lat = results[0]['geometry']['location']['lat']
lng = results[0]['geometry']['location']['lng']
return lat, lng
|
[
"Use",
"the",
"google",
"geocoder",
"to",
"get",
"latitude",
"and",
"longitude",
"for",
"an",
"address",
"string"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L100-L115
|
[
"def",
"get_address_coords",
"(",
"self",
",",
"address",
")",
":",
"url",
"=",
"\"https://maps.googleapis.com/maps/api/geocode/json?&address=\"",
"+",
"address",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"r",
".",
"raise_for_status",
"(",
")",
"results",
"=",
"r",
".",
"json",
"(",
")",
"[",
"'results'",
"]",
"lat",
"=",
"results",
"[",
"0",
"]",
"[",
"'geometry'",
"]",
"[",
"'location'",
"]",
"[",
"'lat'",
"]",
"lng",
"=",
"results",
"[",
"0",
"]",
"[",
"'geometry'",
"]",
"[",
"'location'",
"]",
"[",
"'lng'",
"]",
"return",
"lat",
",",
"lng"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.search_address
|
Perform a catalog search over an address string
Args:
address: any address string
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
|
gbdxtools/catalog.py
|
def search_address(self, address, filters=None, startDate=None, endDate=None, types=None):
''' Perform a catalog search over an address string
Args:
address: any address string
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
lat, lng = self.get_address_coords(address)
return self.search_point(lat,lng, filters=filters, startDate=startDate, endDate=endDate, types=types)
|
def search_address(self, address, filters=None, startDate=None, endDate=None, types=None):
''' Perform a catalog search over an address string
Args:
address: any address string
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
lat, lng = self.get_address_coords(address)
return self.search_point(lat,lng, filters=filters, startDate=startDate, endDate=endDate, types=types)
|
[
"Perform",
"a",
"catalog",
"search",
"over",
"an",
"address",
"string"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L117-L136
|
[
"def",
"search_address",
"(",
"self",
",",
"address",
",",
"filters",
"=",
"None",
",",
"startDate",
"=",
"None",
",",
"endDate",
"=",
"None",
",",
"types",
"=",
"None",
")",
":",
"lat",
",",
"lng",
"=",
"self",
".",
"get_address_coords",
"(",
"address",
")",
"return",
"self",
".",
"search_point",
"(",
"lat",
",",
"lng",
",",
"filters",
"=",
"filters",
",",
"startDate",
"=",
"startDate",
",",
"endDate",
"=",
"endDate",
",",
"types",
"=",
"types",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.search_point
|
Perform a catalog search over a specific point, specified by lat,lng
Args:
lat: latitude
lng: longitude
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
|
gbdxtools/catalog.py
|
def search_point(self, lat, lng, filters=None, startDate=None, endDate=None, types=None, type=None):
''' Perform a catalog search over a specific point, specified by lat,lng
Args:
lat: latitude
lng: longitude
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
searchAreaWkt = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lng, lat,lng,lat,lng,lat,lng,lat,lng,lat)
return self.search(searchAreaWkt=searchAreaWkt, filters=filters, startDate=startDate, endDate=endDate, types=types)
|
def search_point(self, lat, lng, filters=None, startDate=None, endDate=None, types=None, type=None):
''' Perform a catalog search over a specific point, specified by lat,lng
Args:
lat: latitude
lng: longitude
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
searchAreaWkt = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lng, lat,lng,lat,lng,lat,lng,lat,lng,lat)
return self.search(searchAreaWkt=searchAreaWkt, filters=filters, startDate=startDate, endDate=endDate, types=types)
|
[
"Perform",
"a",
"catalog",
"search",
"over",
"a",
"specific",
"point",
"specified",
"by",
"lat",
"lng"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L139-L159
|
[
"def",
"search_point",
"(",
"self",
",",
"lat",
",",
"lng",
",",
"filters",
"=",
"None",
",",
"startDate",
"=",
"None",
",",
"endDate",
"=",
"None",
",",
"types",
"=",
"None",
",",
"type",
"=",
"None",
")",
":",
"searchAreaWkt",
"=",
"\"POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))\"",
"%",
"(",
"lng",
",",
"lat",
",",
"lng",
",",
"lat",
",",
"lng",
",",
"lat",
",",
"lng",
",",
"lat",
",",
"lng",
",",
"lat",
")",
"return",
"self",
".",
"search",
"(",
"searchAreaWkt",
"=",
"searchAreaWkt",
",",
"filters",
"=",
"filters",
",",
"startDate",
"=",
"startDate",
",",
"endDate",
"=",
"endDate",
",",
"types",
"=",
"types",
")"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.get_data_location
|
Find and return the S3 data location given a catalog_id.
Args:
catalog_id: The catalog ID
Returns:
A string containing the s3 location of the data associated with a catalog ID. Returns
None if the catalog ID is not found, or if there is no data yet associated with it.
|
gbdxtools/catalog.py
|
def get_data_location(self, catalog_id):
"""
Find and return the S3 data location given a catalog_id.
Args:
catalog_id: The catalog ID
Returns:
A string containing the s3 location of the data associated with a catalog ID. Returns
None if the catalog ID is not found, or if there is no data yet associated with it.
"""
try:
record = self.get(catalog_id)
except:
return None
# Handle Landsat8
if 'Landsat8' in record['type'] and 'LandsatAcquisition' in record['type']:
bucket = record['properties']['bucketName']
prefix = record['properties']['bucketPrefix']
return 's3://' + bucket + '/' + prefix
# Handle DG Acquisition
if 'DigitalGlobeAcquisition' in record['type']:
o = Ordering()
res = o.location([catalog_id])
return res['acquisitions'][0]['location']
return None
|
def get_data_location(self, catalog_id):
"""
Find and return the S3 data location given a catalog_id.
Args:
catalog_id: The catalog ID
Returns:
A string containing the s3 location of the data associated with a catalog ID. Returns
None if the catalog ID is not found, or if there is no data yet associated with it.
"""
try:
record = self.get(catalog_id)
except:
return None
# Handle Landsat8
if 'Landsat8' in record['type'] and 'LandsatAcquisition' in record['type']:
bucket = record['properties']['bucketName']
prefix = record['properties']['bucketPrefix']
return 's3://' + bucket + '/' + prefix
# Handle DG Acquisition
if 'DigitalGlobeAcquisition' in record['type']:
o = Ordering()
res = o.location([catalog_id])
return res['acquisitions'][0]['location']
return None
|
[
"Find",
"and",
"return",
"the",
"S3",
"data",
"location",
"given",
"a",
"catalog_id",
"."
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L161-L190
|
[
"def",
"get_data_location",
"(",
"self",
",",
"catalog_id",
")",
":",
"try",
":",
"record",
"=",
"self",
".",
"get",
"(",
"catalog_id",
")",
"except",
":",
"return",
"None",
"# Handle Landsat8",
"if",
"'Landsat8'",
"in",
"record",
"[",
"'type'",
"]",
"and",
"'LandsatAcquisition'",
"in",
"record",
"[",
"'type'",
"]",
":",
"bucket",
"=",
"record",
"[",
"'properties'",
"]",
"[",
"'bucketName'",
"]",
"prefix",
"=",
"record",
"[",
"'properties'",
"]",
"[",
"'bucketPrefix'",
"]",
"return",
"'s3://'",
"+",
"bucket",
"+",
"'/'",
"+",
"prefix",
"# Handle DG Acquisition",
"if",
"'DigitalGlobeAcquisition'",
"in",
"record",
"[",
"'type'",
"]",
":",
"o",
"=",
"Ordering",
"(",
")",
"res",
"=",
"o",
".",
"location",
"(",
"[",
"catalog_id",
"]",
")",
"return",
"res",
"[",
"'acquisitions'",
"]",
"[",
"0",
"]",
"[",
"'location'",
"]",
"return",
"None"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.search
|
Perform a catalog search
Args:
searchAreaWkt: WKT Polygon of area to search. Optional.
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
|
gbdxtools/catalog.py
|
def search(self, searchAreaWkt=None, filters=None, startDate=None, endDate=None, types=None):
''' Perform a catalog search
Args:
searchAreaWkt: WKT Polygon of area to search. Optional.
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
# Default to search for Acquisition type objects.
if not types:
types = ['Acquisition']
# validation: we must have either a WKT or one-week of time window
if startDate:
startDateTime = datetime.datetime.strptime(startDate, '%Y-%m-%dT%H:%M:%S.%fZ')
if endDate:
endDateTime = datetime.datetime.strptime(endDate, '%Y-%m-%dT%H:%M:%S.%fZ')
if startDate and endDate:
diff = endDateTime - startDateTime
if diff.days < 0:
raise Exception("startDate must come before endDate.")
postdata = {
"searchAreaWkt": searchAreaWkt,
"types": types,
"startDate": startDate,
"endDate": endDate,
}
if filters:
postdata['filters'] = filters
if searchAreaWkt:
postdata['searchAreaWkt'] = searchAreaWkt
url = '%(base_url)s/search' % {
'base_url': self.base_url
}
headers = {'Content-Type':'application/json'}
r = self.gbdx_connection.post(url, headers=headers, data=json.dumps(postdata))
r.raise_for_status()
results = r.json()['results']
return results
|
def search(self, searchAreaWkt=None, filters=None, startDate=None, endDate=None, types=None):
''' Perform a catalog search
Args:
searchAreaWkt: WKT Polygon of area to search. Optional.
filters: Array of filters. Optional. Example:
[
"(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')",
"cloudCover < 10",
"offNadirAngle < 10"
]
startDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
endDate: string. Optional. Example: "2004-01-01T00:00:00.000Z"
types: Array of types to search for. Optional. Example (and default): ["Acquisition"]
Returns:
catalog search resultset
'''
# Default to search for Acquisition type objects.
if not types:
types = ['Acquisition']
# validation: we must have either a WKT or one-week of time window
if startDate:
startDateTime = datetime.datetime.strptime(startDate, '%Y-%m-%dT%H:%M:%S.%fZ')
if endDate:
endDateTime = datetime.datetime.strptime(endDate, '%Y-%m-%dT%H:%M:%S.%fZ')
if startDate and endDate:
diff = endDateTime - startDateTime
if diff.days < 0:
raise Exception("startDate must come before endDate.")
postdata = {
"searchAreaWkt": searchAreaWkt,
"types": types,
"startDate": startDate,
"endDate": endDate,
}
if filters:
postdata['filters'] = filters
if searchAreaWkt:
postdata['searchAreaWkt'] = searchAreaWkt
url = '%(base_url)s/search' % {
'base_url': self.base_url
}
headers = {'Content-Type':'application/json'}
r = self.gbdx_connection.post(url, headers=headers, data=json.dumps(postdata))
r.raise_for_status()
results = r.json()['results']
return results
|
[
"Perform",
"a",
"catalog",
"search"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L192-L247
|
[
"def",
"search",
"(",
"self",
",",
"searchAreaWkt",
"=",
"None",
",",
"filters",
"=",
"None",
",",
"startDate",
"=",
"None",
",",
"endDate",
"=",
"None",
",",
"types",
"=",
"None",
")",
":",
"# Default to search for Acquisition type objects.",
"if",
"not",
"types",
":",
"types",
"=",
"[",
"'Acquisition'",
"]",
"# validation: we must have either a WKT or one-week of time window",
"if",
"startDate",
":",
"startDateTime",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"startDate",
",",
"'%Y-%m-%dT%H:%M:%S.%fZ'",
")",
"if",
"endDate",
":",
"endDateTime",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"endDate",
",",
"'%Y-%m-%dT%H:%M:%S.%fZ'",
")",
"if",
"startDate",
"and",
"endDate",
":",
"diff",
"=",
"endDateTime",
"-",
"startDateTime",
"if",
"diff",
".",
"days",
"<",
"0",
":",
"raise",
"Exception",
"(",
"\"startDate must come before endDate.\"",
")",
"postdata",
"=",
"{",
"\"searchAreaWkt\"",
":",
"searchAreaWkt",
",",
"\"types\"",
":",
"types",
",",
"\"startDate\"",
":",
"startDate",
",",
"\"endDate\"",
":",
"endDate",
",",
"}",
"if",
"filters",
":",
"postdata",
"[",
"'filters'",
"]",
"=",
"filters",
"if",
"searchAreaWkt",
":",
"postdata",
"[",
"'searchAreaWkt'",
"]",
"=",
"searchAreaWkt",
"url",
"=",
"'%(base_url)s/search'",
"%",
"{",
"'base_url'",
":",
"self",
".",
"base_url",
"}",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"r",
"=",
"self",
".",
"gbdx_connection",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"postdata",
")",
")",
"r",
".",
"raise_for_status",
"(",
")",
"results",
"=",
"r",
".",
"json",
"(",
")",
"[",
"'results'",
"]",
"return",
"results"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
Catalog.get_most_recent_images
|
Return the most recent image
Args:
results: a catalog resultset, as returned from a search
types: array of types you want. optional.
sensors: array of sensornames. optional.
N: number of recent images to return. defaults to 1.
Returns:
single catalog item, or none if not found
|
gbdxtools/catalog.py
|
def get_most_recent_images(self, results, types=[], sensors=[], N=1):
''' Return the most recent image
Args:
results: a catalog resultset, as returned from a search
types: array of types you want. optional.
sensors: array of sensornames. optional.
N: number of recent images to return. defaults to 1.
Returns:
single catalog item, or none if not found
'''
if not len(results):
return None
# filter on type
if types:
results = [r for r in results if r['type'] in types]
# filter on sensor
if sensors:
results = [r for r in results if r['properties'].get('sensorPlatformName') in sensors]
# sort by date:
#sorted(results, key=results.__getitem__('properties').get('timestamp'))
newlist = sorted(results, key=lambda k: k['properties'].get('timestamp'), reverse=True)
return newlist[:N]
|
def get_most_recent_images(self, results, types=[], sensors=[], N=1):
''' Return the most recent image
Args:
results: a catalog resultset, as returned from a search
types: array of types you want. optional.
sensors: array of sensornames. optional.
N: number of recent images to return. defaults to 1.
Returns:
single catalog item, or none if not found
'''
if not len(results):
return None
# filter on type
if types:
results = [r for r in results if r['type'] in types]
# filter on sensor
if sensors:
results = [r for r in results if r['properties'].get('sensorPlatformName') in sensors]
# sort by date:
#sorted(results, key=results.__getitem__('properties').get('timestamp'))
newlist = sorted(results, key=lambda k: k['properties'].get('timestamp'), reverse=True)
return newlist[:N]
|
[
"Return",
"the",
"most",
"recent",
"image"
] |
DigitalGlobe/gbdxtools
|
python
|
https://github.com/DigitalGlobe/gbdxtools/blob/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb/gbdxtools/catalog.py#L249-L277
|
[
"def",
"get_most_recent_images",
"(",
"self",
",",
"results",
",",
"types",
"=",
"[",
"]",
",",
"sensors",
"=",
"[",
"]",
",",
"N",
"=",
"1",
")",
":",
"if",
"not",
"len",
"(",
"results",
")",
":",
"return",
"None",
"# filter on type",
"if",
"types",
":",
"results",
"=",
"[",
"r",
"for",
"r",
"in",
"results",
"if",
"r",
"[",
"'type'",
"]",
"in",
"types",
"]",
"# filter on sensor",
"if",
"sensors",
":",
"results",
"=",
"[",
"r",
"for",
"r",
"in",
"results",
"if",
"r",
"[",
"'properties'",
"]",
".",
"get",
"(",
"'sensorPlatformName'",
")",
"in",
"sensors",
"]",
"# sort by date:",
"#sorted(results, key=results.__getitem__('properties').get('timestamp'))",
"newlist",
"=",
"sorted",
"(",
"results",
",",
"key",
"=",
"lambda",
"k",
":",
"k",
"[",
"'properties'",
"]",
".",
"get",
"(",
"'timestamp'",
")",
",",
"reverse",
"=",
"True",
")",
"return",
"newlist",
"[",
":",
"N",
"]"
] |
def62f8f2d77b168aa2bd115290aaa0f9a08a4bb
|
valid
|
get_bytes_from_blob
|
不同数据库从blob拿出的数据有所差别,有的是memoryview有的是bytes
|
slim/utils/__init__.py
|
def get_bytes_from_blob(val) -> bytes:
""" 不同数据库从blob拿出的数据有所差别,有的是memoryview有的是bytes """
if isinstance(val, bytes):
return val
elif isinstance(val, memoryview):
return val.tobytes()
else:
raise TypeError('invalid type for get bytes')
|
def get_bytes_from_blob(val) -> bytes:
""" 不同数据库从blob拿出的数据有所差别,有的是memoryview有的是bytes """
if isinstance(val, bytes):
return val
elif isinstance(val, memoryview):
return val.tobytes()
else:
raise TypeError('invalid type for get bytes')
|
[
"不同数据库从blob拿出的数据有所差别,有的是memoryview有的是bytes"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/utils/__init__.py#L44-L51
|
[
"def",
"get_bytes_from_blob",
"(",
"val",
")",
"->",
"bytes",
":",
"if",
"isinstance",
"(",
"val",
",",
"bytes",
")",
":",
"return",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"memoryview",
")",
":",
"return",
"val",
".",
"tobytes",
"(",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'invalid type for get bytes'",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
pagination_calc
|
:param nearby:
:param items_count: count of all items
:param page_size: size of one page
:param cur_page: current page number, accept string digit
:return: num of pages, an iterator
|
slim/utils/pagination.py
|
def pagination_calc(items_count, page_size, cur_page=1, nearby=2):
"""
:param nearby:
:param items_count: count of all items
:param page_size: size of one page
:param cur_page: current page number, accept string digit
:return: num of pages, an iterator
"""
if type(cur_page) == str:
# noinspection PyUnresolvedReferences
cur_page = int(cur_page) if cur_page.isdigit() else 1
elif type(cur_page) == int:
if cur_page <= 0:
cur_page = 1
else:
cur_page = 1
page_count = 1 if page_size == -1 else int(math.ceil(items_count / page_size))
items_length = nearby * 2 + 1
# if first page in page items, first_page is None,
# it means the "go to first page" button should not be available.
first_page = None
last_page = None
prev_page = cur_page - 1 if cur_page != 1 else None
next_page = cur_page + 1 if cur_page != page_count else None
if page_count <= items_length:
items = range(1, page_count + 1)
elif cur_page <= nearby:
# start of items
items = range(1, items_length + 1)
last_page = True
elif cur_page >= page_count - nearby:
# end of items
items = range(page_count - items_length + 1, page_count + 1)
first_page = True
else:
items = range(cur_page - nearby, cur_page + nearby + 1)
first_page, last_page = True, True
if first_page:
first_page = 1
if last_page:
last_page = page_count
return {
'cur_page': cur_page,
'prev_page': prev_page,
'next_page': next_page,
'first_page': first_page,
'last_page': last_page,
'page_numbers': list(items),
'info': {
'page_size': page_size,
'page_count': page_count,
'items_count': items_count,
}
}
|
def pagination_calc(items_count, page_size, cur_page=1, nearby=2):
"""
:param nearby:
:param items_count: count of all items
:param page_size: size of one page
:param cur_page: current page number, accept string digit
:return: num of pages, an iterator
"""
if type(cur_page) == str:
# noinspection PyUnresolvedReferences
cur_page = int(cur_page) if cur_page.isdigit() else 1
elif type(cur_page) == int:
if cur_page <= 0:
cur_page = 1
else:
cur_page = 1
page_count = 1 if page_size == -1 else int(math.ceil(items_count / page_size))
items_length = nearby * 2 + 1
# if first page in page items, first_page is None,
# it means the "go to first page" button should not be available.
first_page = None
last_page = None
prev_page = cur_page - 1 if cur_page != 1 else None
next_page = cur_page + 1 if cur_page != page_count else None
if page_count <= items_length:
items = range(1, page_count + 1)
elif cur_page <= nearby:
# start of items
items = range(1, items_length + 1)
last_page = True
elif cur_page >= page_count - nearby:
# end of items
items = range(page_count - items_length + 1, page_count + 1)
first_page = True
else:
items = range(cur_page - nearby, cur_page + nearby + 1)
first_page, last_page = True, True
if first_page:
first_page = 1
if last_page:
last_page = page_count
return {
'cur_page': cur_page,
'prev_page': prev_page,
'next_page': next_page,
'first_page': first_page,
'last_page': last_page,
'page_numbers': list(items),
'info': {
'page_size': page_size,
'page_count': page_count,
'items_count': items_count,
}
}
|
[
":",
"param",
"nearby",
":",
":",
"param",
"items_count",
":",
"count",
"of",
"all",
"items",
":",
"param",
"page_size",
":",
"size",
"of",
"one",
"page",
":",
"param",
"cur_page",
":",
"current",
"page",
"number",
"accept",
"string",
"digit",
":",
"return",
":",
"num",
"of",
"pages",
"an",
"iterator"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/utils/pagination.py#L4-L65
|
[
"def",
"pagination_calc",
"(",
"items_count",
",",
"page_size",
",",
"cur_page",
"=",
"1",
",",
"nearby",
"=",
"2",
")",
":",
"if",
"type",
"(",
"cur_page",
")",
"==",
"str",
":",
"# noinspection PyUnresolvedReferences",
"cur_page",
"=",
"int",
"(",
"cur_page",
")",
"if",
"cur_page",
".",
"isdigit",
"(",
")",
"else",
"1",
"elif",
"type",
"(",
"cur_page",
")",
"==",
"int",
":",
"if",
"cur_page",
"<=",
"0",
":",
"cur_page",
"=",
"1",
"else",
":",
"cur_page",
"=",
"1",
"page_count",
"=",
"1",
"if",
"page_size",
"==",
"-",
"1",
"else",
"int",
"(",
"math",
".",
"ceil",
"(",
"items_count",
"/",
"page_size",
")",
")",
"items_length",
"=",
"nearby",
"*",
"2",
"+",
"1",
"# if first page in page items, first_page is None,",
"# it means the \"go to first page\" button should not be available.",
"first_page",
"=",
"None",
"last_page",
"=",
"None",
"prev_page",
"=",
"cur_page",
"-",
"1",
"if",
"cur_page",
"!=",
"1",
"else",
"None",
"next_page",
"=",
"cur_page",
"+",
"1",
"if",
"cur_page",
"!=",
"page_count",
"else",
"None",
"if",
"page_count",
"<=",
"items_length",
":",
"items",
"=",
"range",
"(",
"1",
",",
"page_count",
"+",
"1",
")",
"elif",
"cur_page",
"<=",
"nearby",
":",
"# start of items",
"items",
"=",
"range",
"(",
"1",
",",
"items_length",
"+",
"1",
")",
"last_page",
"=",
"True",
"elif",
"cur_page",
">=",
"page_count",
"-",
"nearby",
":",
"# end of items",
"items",
"=",
"range",
"(",
"page_count",
"-",
"items_length",
"+",
"1",
",",
"page_count",
"+",
"1",
")",
"first_page",
"=",
"True",
"else",
":",
"items",
"=",
"range",
"(",
"cur_page",
"-",
"nearby",
",",
"cur_page",
"+",
"nearby",
"+",
"1",
")",
"first_page",
",",
"last_page",
"=",
"True",
",",
"True",
"if",
"first_page",
":",
"first_page",
"=",
"1",
"if",
"last_page",
":",
"last_page",
"=",
"page_count",
"return",
"{",
"'cur_page'",
":",
"cur_page",
",",
"'prev_page'",
":",
"prev_page",
",",
"'next_page'",
":",
"next_page",
",",
"'first_page'",
":",
"first_page",
",",
"'last_page'",
":",
"last_page",
",",
"'page_numbers'",
":",
"list",
"(",
"items",
")",
",",
"'info'",
":",
"{",
"'page_size'",
":",
"page_size",
",",
"'page_count'",
":",
"page_count",
",",
"'items_count'",
":",
"items_count",
",",
"}",
"}"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
Ability.add_common_check
|
emitted before query
:param actions:
:param table:
:param func:
:return:
|
slim/base/permission.py
|
def add_common_check(self, actions, table, func):
"""
emitted before query
:param actions:
:param table:
:param func:
:return:
"""
self.common_checks.append([table, actions, func])
"""def func(ability, user, action, available_columns: list):
pass
"""
|
def add_common_check(self, actions, table, func):
"""
emitted before query
:param actions:
:param table:
:param func:
:return:
"""
self.common_checks.append([table, actions, func])
"""def func(ability, user, action, available_columns: list):
pass
"""
|
[
"emitted",
"before",
"query",
":",
"param",
"actions",
":",
":",
"param",
"table",
":",
":",
"param",
"func",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/permission.py#L130-L142
|
[
"def",
"add_common_check",
"(",
"self",
",",
"actions",
",",
"table",
",",
"func",
")",
":",
"self",
".",
"common_checks",
".",
"append",
"(",
"[",
"table",
",",
"actions",
",",
"func",
"]",
")",
"\"\"\"def func(ability, user, action, available_columns: list):\n pass\n \"\"\""
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
Ability.add_record_check
|
def func(ability, user, action, record: DataRecord, available_columns: list):
pass
|
slim/base/permission.py
|
def add_record_check(self, actions, table, func):
# emitted after query
# table: 'table_name'
# column: ('table_name', 'column_name')
assert isinstance(table, str), '`table` must be table name'
for i in actions:
assert i not in (A.QUERY, A.CREATE), "meaningless action check with record: [%s]" % i
self.record_checks.append([table, actions, func])
"""def func(ability, user, action, record: DataRecord, available_columns: list):
pass
"""
|
def add_record_check(self, actions, table, func):
# emitted after query
# table: 'table_name'
# column: ('table_name', 'column_name')
assert isinstance(table, str), '`table` must be table name'
for i in actions:
assert i not in (A.QUERY, A.CREATE), "meaningless action check with record: [%s]" % i
self.record_checks.append([table, actions, func])
"""def func(ability, user, action, record: DataRecord, available_columns: list):
pass
"""
|
[
"def",
"func",
"(",
"ability",
"user",
"action",
"record",
":",
"DataRecord",
"available_columns",
":",
"list",
")",
":",
"pass"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/permission.py#L144-L156
|
[
"def",
"add_record_check",
"(",
"self",
",",
"actions",
",",
"table",
",",
"func",
")",
":",
"# emitted after query",
"# table: 'table_name'",
"# column: ('table_name', 'column_name')",
"assert",
"isinstance",
"(",
"table",
",",
"str",
")",
",",
"'`table` must be table name'",
"for",
"i",
"in",
"actions",
":",
"assert",
"i",
"not",
"in",
"(",
"A",
".",
"QUERY",
",",
"A",
".",
"CREATE",
")",
",",
"\"meaningless action check with record: [%s]\"",
"%",
"i",
"self",
".",
"record_checks",
".",
"append",
"(",
"[",
"table",
",",
"actions",
",",
"func",
"]",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
Ability._parse_permission
|
从 obj 中取出权限
:param obj:
:return: [A.QUERY, A.WRITE, ...]
|
slim/base/permission.py
|
def _parse_permission(self, obj):
"""
从 obj 中取出权限
:param obj:
:return: [A.QUERY, A.WRITE, ...]
"""
if isinstance(obj, str):
if obj == '*':
return A.ALL
elif obj in A.ALL:
return obj,
else:
logger.warning('Invalid permission action: %s', obj)
elif isinstance(obj, (list, tuple)):
for i in obj:
if i not in A.ALL:
logger.warning('Invalid permission action: %s', i)
return obj
elif isinstance(obj, dict):
return self._parse_permission(obj.get('*'))
|
def _parse_permission(self, obj):
"""
从 obj 中取出权限
:param obj:
:return: [A.QUERY, A.WRITE, ...]
"""
if isinstance(obj, str):
if obj == '*':
return A.ALL
elif obj in A.ALL:
return obj,
else:
logger.warning('Invalid permission action: %s', obj)
elif isinstance(obj, (list, tuple)):
for i in obj:
if i not in A.ALL:
logger.warning('Invalid permission action: %s', i)
return obj
elif isinstance(obj, dict):
return self._parse_permission(obj.get('*'))
|
[
"从",
"obj",
"中取出权限",
":",
"param",
"obj",
":",
":",
"return",
":",
"[",
"A",
".",
"QUERY",
"A",
".",
"WRITE",
"...",
"]"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/permission.py#L158-L177
|
[
"def",
"_parse_permission",
"(",
"self",
",",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"str",
")",
":",
"if",
"obj",
"==",
"'*'",
":",
"return",
"A",
".",
"ALL",
"elif",
"obj",
"in",
"A",
".",
"ALL",
":",
"return",
"obj",
",",
"else",
":",
"logger",
".",
"warning",
"(",
"'Invalid permission action: %s'",
",",
"obj",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"i",
"in",
"obj",
":",
"if",
"i",
"not",
"in",
"A",
".",
"ALL",
":",
"logger",
".",
"warning",
"(",
"'Invalid permission action: %s'",
",",
"i",
")",
"return",
"obj",
"elif",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"return",
"self",
".",
"_parse_permission",
"(",
"obj",
".",
"get",
"(",
"'*'",
")",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
Ability.can_with_columns
|
根据权限进行列过滤
注意一点,只要有一个条件能够通过权限检测,那么过滤后还会有剩余条件,最终就不会报错。
如果全部条件都不能过检测,就会爆出权限错误了。
:param user:
:param action: 行为
:param table: 表名
:param columns: 列名列表
:return: 可用列的列表
|
slim/base/permission.py
|
def can_with_columns(self, user, action, table, columns):
"""
根据权限进行列过滤
注意一点,只要有一个条件能够通过权限检测,那么过滤后还会有剩余条件,最终就不会报错。
如果全部条件都不能过检测,就会爆出权限错误了。
:param user:
:param action: 行为
:param table: 表名
:param columns: 列名列表
:return: 可用列的列表
"""
# TODO: 此过程可以加缓存
# 全局
global_data = self.rules.get('*')
global_actions = self._parse_permission(global_data)
if global_actions and action in global_actions:
available = list(columns)
else:
available = []
# table
table_data = self.rules.get(table)
table_actions = self._parse_permission(table_data)
if table_actions and action in table_actions:
available = list(columns)
# column
if type(table_data) == dict:
# 这意味着有详细的列权限设定,不然类型是 list
for column in columns:
column_actions = self._parse_permission(table_data.get(column))
if column_actions is not None:
if action in column_actions:
# 有权限,试图加入列表
if column not in available:
available.append(column)
else:
# 无权限,从列表剔除
if column in available:
available.remove(column)
for check in self.common_checks:
if check[0] == table and action in check[1]:
ret = check[-1](self, user, action, available)
if isinstance(ret, (tuple, set, list)):
# 返回列表则进行值覆盖
available = list(ret)
elif ret == '*':
# 返回 * 加上所有可用列
available = list(columns)
elif ret is False:
# 返回 false 清空
available = []
if not available: break
return available
|
def can_with_columns(self, user, action, table, columns):
"""
根据权限进行列过滤
注意一点,只要有一个条件能够通过权限检测,那么过滤后还会有剩余条件,最终就不会报错。
如果全部条件都不能过检测,就会爆出权限错误了。
:param user:
:param action: 行为
:param table: 表名
:param columns: 列名列表
:return: 可用列的列表
"""
# TODO: 此过程可以加缓存
# 全局
global_data = self.rules.get('*')
global_actions = self._parse_permission(global_data)
if global_actions and action in global_actions:
available = list(columns)
else:
available = []
# table
table_data = self.rules.get(table)
table_actions = self._parse_permission(table_data)
if table_actions and action in table_actions:
available = list(columns)
# column
if type(table_data) == dict:
# 这意味着有详细的列权限设定,不然类型是 list
for column in columns:
column_actions = self._parse_permission(table_data.get(column))
if column_actions is not None:
if action in column_actions:
# 有权限,试图加入列表
if column not in available:
available.append(column)
else:
# 无权限,从列表剔除
if column in available:
available.remove(column)
for check in self.common_checks:
if check[0] == table and action in check[1]:
ret = check[-1](self, user, action, available)
if isinstance(ret, (tuple, set, list)):
# 返回列表则进行值覆盖
available = list(ret)
elif ret == '*':
# 返回 * 加上所有可用列
available = list(columns)
elif ret is False:
# 返回 false 清空
available = []
if not available: break
return available
|
[
"根据权限进行列过滤",
"注意一点,只要有一个条件能够通过权限检测,那么过滤后还会有剩余条件,最终就不会报错。",
"如果全部条件都不能过检测,就会爆出权限错误了。"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/permission.py#L179-L237
|
[
"def",
"can_with_columns",
"(",
"self",
",",
"user",
",",
"action",
",",
"table",
",",
"columns",
")",
":",
"# TODO: 此过程可以加缓存",
"# 全局",
"global_data",
"=",
"self",
".",
"rules",
".",
"get",
"(",
"'*'",
")",
"global_actions",
"=",
"self",
".",
"_parse_permission",
"(",
"global_data",
")",
"if",
"global_actions",
"and",
"action",
"in",
"global_actions",
":",
"available",
"=",
"list",
"(",
"columns",
")",
"else",
":",
"available",
"=",
"[",
"]",
"# table",
"table_data",
"=",
"self",
".",
"rules",
".",
"get",
"(",
"table",
")",
"table_actions",
"=",
"self",
".",
"_parse_permission",
"(",
"table_data",
")",
"if",
"table_actions",
"and",
"action",
"in",
"table_actions",
":",
"available",
"=",
"list",
"(",
"columns",
")",
"# column",
"if",
"type",
"(",
"table_data",
")",
"==",
"dict",
":",
"# 这意味着有详细的列权限设定,不然类型是 list",
"for",
"column",
"in",
"columns",
":",
"column_actions",
"=",
"self",
".",
"_parse_permission",
"(",
"table_data",
".",
"get",
"(",
"column",
")",
")",
"if",
"column_actions",
"is",
"not",
"None",
":",
"if",
"action",
"in",
"column_actions",
":",
"# 有权限,试图加入列表",
"if",
"column",
"not",
"in",
"available",
":",
"available",
".",
"append",
"(",
"column",
")",
"else",
":",
"# 无权限,从列表剔除",
"if",
"column",
"in",
"available",
":",
"available",
".",
"remove",
"(",
"column",
")",
"for",
"check",
"in",
"self",
".",
"common_checks",
":",
"if",
"check",
"[",
"0",
"]",
"==",
"table",
"and",
"action",
"in",
"check",
"[",
"1",
"]",
":",
"ret",
"=",
"check",
"[",
"-",
"1",
"]",
"(",
"self",
",",
"user",
",",
"action",
",",
"available",
")",
"if",
"isinstance",
"(",
"ret",
",",
"(",
"tuple",
",",
"set",
",",
"list",
")",
")",
":",
"# 返回列表则进行值覆盖",
"available",
"=",
"list",
"(",
"ret",
")",
"elif",
"ret",
"==",
"'*'",
":",
"# 返回 * 加上所有可用列",
"available",
"=",
"list",
"(",
"columns",
")",
"elif",
"ret",
"is",
"False",
":",
"# 返回 false 清空",
"available",
"=",
"[",
"]",
"if",
"not",
"available",
":",
"break",
"return",
"available"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
Ability.can_with_record
|
进行基于 Record 的权限判定,返回可用列。
:param user:
:param action:
:param record:
:param available: 限定检查范围
:return: 可用列
|
slim/base/permission.py
|
def can_with_record(self, user, action, record: DataRecord, *, available=None):
"""
进行基于 Record 的权限判定,返回可用列。
:param user:
:param action:
:param record:
:param available: 限定检查范围
:return: 可用列
"""
assert action not in (A.QUERY, A.CREATE), "meaningless action check with record: [%s]" % action
# 先行匹配规则适用范围
rules = []
for rule in self.record_checks:
if record.table == rule[0] and action in rule[1]:
rules.append(rule)
# 逐个过检查
if available is None: available = self.can_with_columns(user, action, record.table, record.keys())
else: available = list(available)
bak = available.copy()
for rule in rules:
ret = rule[-1](self, user, action, record, available)
if isinstance(ret, (tuple, set, list)):
available = list(ret)
elif ret == '*':
available = list(bak)
elif not ret:
available = []
return available
|
def can_with_record(self, user, action, record: DataRecord, *, available=None):
"""
进行基于 Record 的权限判定,返回可用列。
:param user:
:param action:
:param record:
:param available: 限定检查范围
:return: 可用列
"""
assert action not in (A.QUERY, A.CREATE), "meaningless action check with record: [%s]" % action
# 先行匹配规则适用范围
rules = []
for rule in self.record_checks:
if record.table == rule[0] and action in rule[1]:
rules.append(rule)
# 逐个过检查
if available is None: available = self.can_with_columns(user, action, record.table, record.keys())
else: available = list(available)
bak = available.copy()
for rule in rules:
ret = rule[-1](self, user, action, record, available)
if isinstance(ret, (tuple, set, list)):
available = list(ret)
elif ret == '*':
available = list(bak)
elif not ret:
available = []
return available
|
[
"进行基于",
"Record",
"的权限判定,返回可用列。",
":",
"param",
"user",
":",
":",
"param",
"action",
":",
":",
"param",
"record",
":",
":",
"param",
"available",
":",
"限定检查范围",
":",
"return",
":",
"可用列"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/permission.py#L239-L270
|
[
"def",
"can_with_record",
"(",
"self",
",",
"user",
",",
"action",
",",
"record",
":",
"DataRecord",
",",
"*",
",",
"available",
"=",
"None",
")",
":",
"assert",
"action",
"not",
"in",
"(",
"A",
".",
"QUERY",
",",
"A",
".",
"CREATE",
")",
",",
"\"meaningless action check with record: [%s]\"",
"%",
"action",
"# 先行匹配规则适用范围",
"rules",
"=",
"[",
"]",
"for",
"rule",
"in",
"self",
".",
"record_checks",
":",
"if",
"record",
".",
"table",
"==",
"rule",
"[",
"0",
"]",
"and",
"action",
"in",
"rule",
"[",
"1",
"]",
":",
"rules",
".",
"append",
"(",
"rule",
")",
"# 逐个过检查",
"if",
"available",
"is",
"None",
":",
"available",
"=",
"self",
".",
"can_with_columns",
"(",
"user",
",",
"action",
",",
"record",
".",
"table",
",",
"record",
".",
"keys",
"(",
")",
")",
"else",
":",
"available",
"=",
"list",
"(",
"available",
")",
"bak",
"=",
"available",
".",
"copy",
"(",
")",
"for",
"rule",
"in",
"rules",
":",
"ret",
"=",
"rule",
"[",
"-",
"1",
"]",
"(",
"self",
",",
"user",
",",
"action",
",",
"record",
",",
"available",
")",
"if",
"isinstance",
"(",
"ret",
",",
"(",
"tuple",
",",
"set",
",",
"list",
")",
")",
":",
"available",
"=",
"list",
"(",
"ret",
")",
"elif",
"ret",
"==",
"'*'",
":",
"available",
"=",
"list",
"(",
"bak",
")",
"elif",
"not",
"ret",
":",
"available",
"=",
"[",
"]",
"return",
"available"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
BaseView.use
|
interface helper function
|
slim/base/view.py
|
def use(cls, name, method: [str, Set, List], url=None):
""" interface helper function"""
if not isinstance(method, (str, list, set, tuple)):
raise BaseException('Invalid type of method: %s' % type(method).__name__)
if isinstance(method, str):
method = {method}
# TODO: check methods available
cls._interface[name] = [{'method': method, 'url': url}]
|
def use(cls, name, method: [str, Set, List], url=None):
""" interface helper function"""
if not isinstance(method, (str, list, set, tuple)):
raise BaseException('Invalid type of method: %s' % type(method).__name__)
if isinstance(method, str):
method = {method}
# TODO: check methods available
cls._interface[name] = [{'method': method, 'url': url}]
|
[
"interface",
"helper",
"function"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L40-L49
|
[
"def",
"use",
"(",
"cls",
",",
"name",
",",
"method",
":",
"[",
"str",
",",
"Set",
",",
"List",
"]",
",",
"url",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"method",
",",
"(",
"str",
",",
"list",
",",
"set",
",",
"tuple",
")",
")",
":",
"raise",
"BaseException",
"(",
"'Invalid type of method: %s'",
"%",
"type",
"(",
"method",
")",
".",
"__name__",
")",
"if",
"isinstance",
"(",
"method",
",",
"str",
")",
":",
"method",
"=",
"{",
"method",
"}",
"# TODO: check methods available",
"cls",
".",
"_interface",
"[",
"name",
"]",
"=",
"[",
"{",
"'method'",
":",
"method",
",",
"'url'",
":",
"url",
"}",
"]"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
BaseView.get_ip
|
get ip address of client
:return:
|
slim/base/view.py
|
async def get_ip(self) -> Union[IPv4Address, IPv6Address]:
"""
get ip address of client
:return:
"""
xff = await self.get_x_forwarded_for()
if xff: return xff[0]
ip_addr = self._request.transport.get_extra_info('peername')[0]
return ip_address(ip_addr)
|
async def get_ip(self) -> Union[IPv4Address, IPv6Address]:
"""
get ip address of client
:return:
"""
xff = await self.get_x_forwarded_for()
if xff: return xff[0]
ip_addr = self._request.transport.get_extra_info('peername')[0]
return ip_address(ip_addr)
|
[
"get",
"ip",
"address",
"of",
"client",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L128-L136
|
[
"async",
"def",
"get_ip",
"(",
"self",
")",
"->",
"Union",
"[",
"IPv4Address",
",",
"IPv6Address",
"]",
":",
"xff",
"=",
"await",
"self",
".",
"get_x_forwarded_for",
"(",
")",
"if",
"xff",
":",
"return",
"xff",
"[",
"0",
"]",
"ip_addr",
"=",
"self",
".",
"_request",
".",
"transport",
".",
"get_extra_info",
"(",
"'peername'",
")",
"[",
"0",
"]",
"return",
"ip_address",
"(",
"ip_addr",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
BaseView.finish
|
Set response as {'code': xxx, 'data': xxx}
:param code:
:param data:
:return:
|
slim/base/view.py
|
def finish(self, code, data=NotImplemented):
"""
Set response as {'code': xxx, 'data': xxx}
:param code:
:param data:
:return:
"""
if data is NotImplemented:
data = RETCODE.txt_cn.get(code, None)
self.ret_val = {'code': code, 'data': data} # for access in inhreads method
self.response = web.json_response(self.ret_val, dumps=json_ex_dumps)
logger.debug('finish: %s' % self.ret_val)
self._finish_end()
|
def finish(self, code, data=NotImplemented):
"""
Set response as {'code': xxx, 'data': xxx}
:param code:
:param data:
:return:
"""
if data is NotImplemented:
data = RETCODE.txt_cn.get(code, None)
self.ret_val = {'code': code, 'data': data} # for access in inhreads method
self.response = web.json_response(self.ret_val, dumps=json_ex_dumps)
logger.debug('finish: %s' % self.ret_val)
self._finish_end()
|
[
"Set",
"response",
"as",
"{",
"code",
":",
"xxx",
"data",
":",
"xxx",
"}",
":",
"param",
"code",
":",
":",
"param",
"data",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L179-L191
|
[
"def",
"finish",
"(",
"self",
",",
"code",
",",
"data",
"=",
"NotImplemented",
")",
":",
"if",
"data",
"is",
"NotImplemented",
":",
"data",
"=",
"RETCODE",
".",
"txt_cn",
".",
"get",
"(",
"code",
",",
"None",
")",
"self",
".",
"ret_val",
"=",
"{",
"'code'",
":",
"code",
",",
"'data'",
":",
"data",
"}",
"# for access in inhreads method",
"self",
".",
"response",
"=",
"web",
".",
"json_response",
"(",
"self",
".",
"ret_val",
",",
"dumps",
"=",
"json_ex_dumps",
")",
"logger",
".",
"debug",
"(",
"'finish: %s'",
"%",
"self",
".",
"ret_val",
")",
"self",
".",
"_finish_end",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
BaseView.finish_raw
|
Set raw response
:param body:
:param status:
:param content_type:
:return:
|
slim/base/view.py
|
def finish_raw(self, body: bytes, status: int = 200, content_type: Optional[str] = None):
"""
Set raw response
:param body:
:param status:
:param content_type:
:return:
"""
self.ret_val = body
self.response = web.Response(body=body, status=status, content_type=content_type)
logger.debug('finish: raw body(%d bytes)' % len(body))
self._finish_end()
|
def finish_raw(self, body: bytes, status: int = 200, content_type: Optional[str] = None):
"""
Set raw response
:param body:
:param status:
:param content_type:
:return:
"""
self.ret_val = body
self.response = web.Response(body=body, status=status, content_type=content_type)
logger.debug('finish: raw body(%d bytes)' % len(body))
self._finish_end()
|
[
"Set",
"raw",
"response",
":",
"param",
"body",
":",
":",
"param",
"status",
":",
":",
"param",
"content_type",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L193-L204
|
[
"def",
"finish_raw",
"(",
"self",
",",
"body",
":",
"bytes",
",",
"status",
":",
"int",
"=",
"200",
",",
"content_type",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
":",
"self",
".",
"ret_val",
"=",
"body",
"self",
".",
"response",
"=",
"web",
".",
"Response",
"(",
"body",
"=",
"body",
",",
"status",
"=",
"status",
",",
"content_type",
"=",
"content_type",
")",
"logger",
".",
"debug",
"(",
"'finish: raw body(%d bytes)'",
"%",
"len",
"(",
"body",
")",
")",
"self",
".",
"_finish_end",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLView.add_soft_foreign_key
|
the column stores foreign table's primary key but isn't a foreign key (to avoid constraint)
warning: if the table not exists, will crash when query with loadfk
:param column: table's column
:param table_name: foreign table name
:param alias: table name's alias. Default is as same as table name.
:return: True, None
|
slim/base/view.py
|
def add_soft_foreign_key(cls, column, table_name, alias=None):
"""
the column stores foreign table's primary key but isn't a foreign key (to avoid constraint)
warning: if the table not exists, will crash when query with loadfk
:param column: table's column
:param table_name: foreign table name
:param alias: table name's alias. Default is as same as table name.
:return: True, None
"""
if column in cls.fields:
table = SQLForeignKey(table_name, column, cls.fields[column], True)
if alias:
if alias in cls.foreign_keys_table_alias:
logger.warning("This alias of table is already exists, overwriting: %s.%s to %s" %
(cls.__name__, column, table_name))
cls.foreign_keys_table_alias[alias] = table
if column not in cls.foreign_keys:
cls.foreign_keys[column] = [table]
else:
if not alias:
logger.warning("The soft foreign key will not work, an alias required: %s.%s to %r" %
(cls.__name__, column, table_name))
cls.foreign_keys[column].append(table)
return True
|
def add_soft_foreign_key(cls, column, table_name, alias=None):
"""
the column stores foreign table's primary key but isn't a foreign key (to avoid constraint)
warning: if the table not exists, will crash when query with loadfk
:param column: table's column
:param table_name: foreign table name
:param alias: table name's alias. Default is as same as table name.
:return: True, None
"""
if column in cls.fields:
table = SQLForeignKey(table_name, column, cls.fields[column], True)
if alias:
if alias in cls.foreign_keys_table_alias:
logger.warning("This alias of table is already exists, overwriting: %s.%s to %s" %
(cls.__name__, column, table_name))
cls.foreign_keys_table_alias[alias] = table
if column not in cls.foreign_keys:
cls.foreign_keys[column] = [table]
else:
if not alias:
logger.warning("The soft foreign key will not work, an alias required: %s.%s to %r" %
(cls.__name__, column, table_name))
cls.foreign_keys[column].append(table)
return True
|
[
"the",
"column",
"stores",
"foreign",
"table",
"s",
"primary",
"key",
"but",
"isn",
"t",
"a",
"foreign",
"key",
"(",
"to",
"avoid",
"constraint",
")",
"warning",
":",
"if",
"the",
"table",
"not",
"exists",
"will",
"crash",
"when",
"query",
"with",
"loadfk",
":",
"param",
"column",
":",
"table",
"s",
"column",
":",
"param",
"table_name",
":",
"foreign",
"table",
"name",
":",
"param",
"alias",
":",
"table",
"name",
"s",
"alias",
".",
"Default",
"is",
"as",
"same",
"as",
"table",
"name",
".",
":",
"return",
":",
"True",
"None"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L428-L453
|
[
"def",
"add_soft_foreign_key",
"(",
"cls",
",",
"column",
",",
"table_name",
",",
"alias",
"=",
"None",
")",
":",
"if",
"column",
"in",
"cls",
".",
"fields",
":",
"table",
"=",
"SQLForeignKey",
"(",
"table_name",
",",
"column",
",",
"cls",
".",
"fields",
"[",
"column",
"]",
",",
"True",
")",
"if",
"alias",
":",
"if",
"alias",
"in",
"cls",
".",
"foreign_keys_table_alias",
":",
"logger",
".",
"warning",
"(",
"\"This alias of table is already exists, overwriting: %s.%s to %s\"",
"%",
"(",
"cls",
".",
"__name__",
",",
"column",
",",
"table_name",
")",
")",
"cls",
".",
"foreign_keys_table_alias",
"[",
"alias",
"]",
"=",
"table",
"if",
"column",
"not",
"in",
"cls",
".",
"foreign_keys",
":",
"cls",
".",
"foreign_keys",
"[",
"column",
"]",
"=",
"[",
"table",
"]",
"else",
":",
"if",
"not",
"alias",
":",
"logger",
".",
"warning",
"(",
"\"The soft foreign key will not work, an alias required: %s.%s to %r\"",
"%",
"(",
"cls",
".",
"__name__",
",",
"column",
",",
"table_name",
")",
")",
"cls",
".",
"foreign_keys",
"[",
"column",
"]",
".",
"append",
"(",
"table",
")",
"return",
"True"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLView.current_request_role
|
Current role requested by client.
:return:
|
slim/base/view.py
|
def current_request_role(self) -> [int, str]:
"""
Current role requested by client.
:return:
"""
role_val = self.headers.get('Role')
return int(role_val) if role_val and role_val.isdigit() else role_val
|
def current_request_role(self) -> [int, str]:
"""
Current role requested by client.
:return:
"""
role_val = self.headers.get('Role')
return int(role_val) if role_val and role_val.isdigit() else role_val
|
[
"Current",
"role",
"requested",
"by",
"client",
".",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L496-L502
|
[
"def",
"current_request_role",
"(",
"self",
")",
"->",
"[",
"int",
",",
"str",
"]",
":",
"role_val",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"'Role'",
")",
"return",
"int",
"(",
"role_val",
")",
"if",
"role_val",
"and",
"role_val",
".",
"isdigit",
"(",
")",
"else",
"role_val"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLView.load_fk
|
:param info:
:param records: the data got from database and filtered from permission
:return:
|
slim/base/view.py
|
async def load_fk(self, info: SQLQueryInfo, records: Iterable[DataRecord]) -> Union[List, Iterable]:
"""
:param info:
:param records: the data got from database and filtered from permission
:return:
"""
# if not items, items is probably [], so return itself.
# if not items: return items
# 1. get tables' instances
# table_map = {}
# for column in info['loadfk'].keys():
# tbl_name = self.foreign_keys[column][0]
# table_map[column] = self.app.tables[tbl_name]
# 2. get query parameters
async def check(data, records):
for column, fkvalues_lst in data.items():
for fkvalues in fkvalues_lst:
pks = []
all_ni = True
vcls = self.app.tables[fkvalues['table']]
for i in records:
val = i.get(column, NotImplemented)
if val != NotImplemented:
all_ni = False
pks.append(val)
if all_ni:
logger.debug("load foreign key failed, do you have read permission to the column %r?" % column)
continue
# 3. query foreign keys
v = vcls(self.app, self._request) # fake view
await v._prepare()
info2 = SQLQueryInfo()
info2.set_select(ALL_COLUMNS)
info2.add_condition(PRIMARY_KEY, SQL_OP.IN, pks)
info2.bind(v)
# ability = vcls.permission.request_role(self.current_user, fkvalues['role'])
# info2.check_query_permission_full(self.current_user, fktable, ability)
try:
fk_records, count = await v._sql.select_page(info2, size=-1)
except RecordNotFound:
# 外键没有找到值,也许全部都是null,这很常见
continue
# if not fk_records: continue
await v.check_records_permission(info2, fk_records)
fk_dict = {}
for i in fk_records:
# 主键: 数据
fk_dict[i[vcls.primary_key]] = i
column_to_set = fkvalues.get('as', column) or column
for _, record in enumerate(records):
k = record.get(column, NotImplemented)
if k in fk_dict:
record[column_to_set] = fk_dict[k]
if fkvalues['loadfk']:
await check(fkvalues['loadfk'], fk_records)
await check(info.loadfk, records)
return records
|
async def load_fk(self, info: SQLQueryInfo, records: Iterable[DataRecord]) -> Union[List, Iterable]:
"""
:param info:
:param records: the data got from database and filtered from permission
:return:
"""
# if not items, items is probably [], so return itself.
# if not items: return items
# 1. get tables' instances
# table_map = {}
# for column in info['loadfk'].keys():
# tbl_name = self.foreign_keys[column][0]
# table_map[column] = self.app.tables[tbl_name]
# 2. get query parameters
async def check(data, records):
for column, fkvalues_lst in data.items():
for fkvalues in fkvalues_lst:
pks = []
all_ni = True
vcls = self.app.tables[fkvalues['table']]
for i in records:
val = i.get(column, NotImplemented)
if val != NotImplemented:
all_ni = False
pks.append(val)
if all_ni:
logger.debug("load foreign key failed, do you have read permission to the column %r?" % column)
continue
# 3. query foreign keys
v = vcls(self.app, self._request) # fake view
await v._prepare()
info2 = SQLQueryInfo()
info2.set_select(ALL_COLUMNS)
info2.add_condition(PRIMARY_KEY, SQL_OP.IN, pks)
info2.bind(v)
# ability = vcls.permission.request_role(self.current_user, fkvalues['role'])
# info2.check_query_permission_full(self.current_user, fktable, ability)
try:
fk_records, count = await v._sql.select_page(info2, size=-1)
except RecordNotFound:
# 外键没有找到值,也许全部都是null,这很常见
continue
# if not fk_records: continue
await v.check_records_permission(info2, fk_records)
fk_dict = {}
for i in fk_records:
# 主键: 数据
fk_dict[i[vcls.primary_key]] = i
column_to_set = fkvalues.get('as', column) or column
for _, record in enumerate(records):
k = record.get(column, NotImplemented)
if k in fk_dict:
record[column_to_set] = fk_dict[k]
if fkvalues['loadfk']:
await check(fkvalues['loadfk'], fk_records)
await check(info.loadfk, records)
return records
|
[
":",
"param",
"info",
":",
":",
"param",
"records",
":",
"the",
"data",
"got",
"from",
"database",
"and",
"filtered",
"from",
"permission",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L515-L584
|
[
"async",
"def",
"load_fk",
"(",
"self",
",",
"info",
":",
"SQLQueryInfo",
",",
"records",
":",
"Iterable",
"[",
"DataRecord",
"]",
")",
"->",
"Union",
"[",
"List",
",",
"Iterable",
"]",
":",
"# if not items, items is probably [], so return itself.",
"# if not items: return items",
"# 1. get tables' instances",
"# table_map = {}",
"# for column in info['loadfk'].keys():",
"# tbl_name = self.foreign_keys[column][0]",
"# table_map[column] = self.app.tables[tbl_name]",
"# 2. get query parameters",
"async",
"def",
"check",
"(",
"data",
",",
"records",
")",
":",
"for",
"column",
",",
"fkvalues_lst",
"in",
"data",
".",
"items",
"(",
")",
":",
"for",
"fkvalues",
"in",
"fkvalues_lst",
":",
"pks",
"=",
"[",
"]",
"all_ni",
"=",
"True",
"vcls",
"=",
"self",
".",
"app",
".",
"tables",
"[",
"fkvalues",
"[",
"'table'",
"]",
"]",
"for",
"i",
"in",
"records",
":",
"val",
"=",
"i",
".",
"get",
"(",
"column",
",",
"NotImplemented",
")",
"if",
"val",
"!=",
"NotImplemented",
":",
"all_ni",
"=",
"False",
"pks",
".",
"append",
"(",
"val",
")",
"if",
"all_ni",
":",
"logger",
".",
"debug",
"(",
"\"load foreign key failed, do you have read permission to the column %r?\"",
"%",
"column",
")",
"continue",
"# 3. query foreign keys",
"v",
"=",
"vcls",
"(",
"self",
".",
"app",
",",
"self",
".",
"_request",
")",
"# fake view",
"await",
"v",
".",
"_prepare",
"(",
")",
"info2",
"=",
"SQLQueryInfo",
"(",
")",
"info2",
".",
"set_select",
"(",
"ALL_COLUMNS",
")",
"info2",
".",
"add_condition",
"(",
"PRIMARY_KEY",
",",
"SQL_OP",
".",
"IN",
",",
"pks",
")",
"info2",
".",
"bind",
"(",
"v",
")",
"# ability = vcls.permission.request_role(self.current_user, fkvalues['role'])",
"# info2.check_query_permission_full(self.current_user, fktable, ability)",
"try",
":",
"fk_records",
",",
"count",
"=",
"await",
"v",
".",
"_sql",
".",
"select_page",
"(",
"info2",
",",
"size",
"=",
"-",
"1",
")",
"except",
"RecordNotFound",
":",
"# 外键没有找到值,也许全部都是null,这很常见",
"continue",
"# if not fk_records: continue",
"await",
"v",
".",
"check_records_permission",
"(",
"info2",
",",
"fk_records",
")",
"fk_dict",
"=",
"{",
"}",
"for",
"i",
"in",
"fk_records",
":",
"# 主键: 数据",
"fk_dict",
"[",
"i",
"[",
"vcls",
".",
"primary_key",
"]",
"]",
"=",
"i",
"column_to_set",
"=",
"fkvalues",
".",
"get",
"(",
"'as'",
",",
"column",
")",
"or",
"column",
"for",
"_",
",",
"record",
"in",
"enumerate",
"(",
"records",
")",
":",
"k",
"=",
"record",
".",
"get",
"(",
"column",
",",
"NotImplemented",
")",
"if",
"k",
"in",
"fk_dict",
":",
"record",
"[",
"column_to_set",
"]",
"=",
"fk_dict",
"[",
"k",
"]",
"if",
"fkvalues",
"[",
"'loadfk'",
"]",
":",
"await",
"check",
"(",
"fkvalues",
"[",
"'loadfk'",
"]",
",",
"fk_records",
")",
"await",
"check",
"(",
"info",
".",
"loadfk",
",",
"records",
")",
"return",
"records"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLView._call_handle
|
call and check result of handle_query/read/insert/update
|
slim/base/view.py
|
async def _call_handle(self, func, *args):
""" call and check result of handle_query/read/insert/update """
await async_call(func, *args)
if self.is_finished:
raise FinishQuitException()
|
async def _call_handle(self, func, *args):
""" call and check result of handle_query/read/insert/update """
await async_call(func, *args)
if self.is_finished:
raise FinishQuitException()
|
[
"call",
"and",
"check",
"result",
"of",
"handle_query",
"/",
"read",
"/",
"insert",
"/",
"update"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L586-L591
|
[
"async",
"def",
"_call_handle",
"(",
"self",
",",
"func",
",",
"*",
"args",
")",
":",
"await",
"async_call",
"(",
"func",
",",
"*",
"args",
")",
"if",
"self",
".",
"is_finished",
":",
"raise",
"FinishQuitException",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLView.after_update
|
:param old_records:
:param raw_post:
:param values:
:param records:
:return:
|
slim/base/view.py
|
async def after_update(self, raw_post: Dict, values: SQLValuesToWrite,
old_records: List[DataRecord], records: List[DataRecord]):
"""
:param old_records:
:param raw_post:
:param values:
:param records:
:return:
"""
|
async def after_update(self, raw_post: Dict, values: SQLValuesToWrite,
old_records: List[DataRecord], records: List[DataRecord]):
"""
:param old_records:
:param raw_post:
:param values:
:param records:
:return:
"""
|
[
":",
"param",
"old_records",
":",
":",
"param",
"raw_post",
":",
":",
"param",
"values",
":",
":",
"param",
"records",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/view.py#L786-L794
|
[
"async",
"def",
"after_update",
"(",
"self",
",",
"raw_post",
":",
"Dict",
",",
"values",
":",
"SQLValuesToWrite",
",",
"old_records",
":",
"List",
"[",
"DataRecord",
"]",
",",
"records",
":",
"List",
"[",
"DataRecord",
"]",
")",
":"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
User.roles
|
BaseUser.roles 的实现,返回用户可用角色
:return:
|
slim_cli/template/model/user.py
|
def roles(self):
"""
BaseUser.roles 的实现,返回用户可用角色
:return:
"""
ret = {None}
if self.state == POST_STATE.DEL:
return ret
ret.add('user')
return ret
|
def roles(self):
"""
BaseUser.roles 的实现,返回用户可用角色
:return:
"""
ret = {None}
if self.state == POST_STATE.DEL:
return ret
ret.add('user')
return ret
|
[
"BaseUser",
".",
"roles",
"的实现,返回用户可用角色",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim_cli/template/model/user.py#L38-L47
|
[
"def",
"roles",
"(",
"self",
")",
":",
"ret",
"=",
"{",
"None",
"}",
"if",
"self",
".",
"state",
"==",
"POST_STATE",
".",
"DEL",
":",
"return",
"ret",
"ret",
".",
"add",
"(",
"'user'",
")",
"return",
"ret"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
User.gen_password_and_salt
|
生成加密后的密码和盐
|
slim_cli/template/model/user.py
|
def gen_password_and_salt(cls, password_text):
""" 生成加密后的密码和盐 """
salt = os.urandom(32)
dk = hashlib.pbkdf2_hmac(
config.PASSWORD_HASH_FUNC_NAME,
password_text.encode('utf-8'),
salt,
config.PASSWORD_HASH_ITERATIONS,
)
return {'password': dk, 'salt': salt}
|
def gen_password_and_salt(cls, password_text):
""" 生成加密后的密码和盐 """
salt = os.urandom(32)
dk = hashlib.pbkdf2_hmac(
config.PASSWORD_HASH_FUNC_NAME,
password_text.encode('utf-8'),
salt,
config.PASSWORD_HASH_ITERATIONS,
)
return {'password': dk, 'salt': salt}
|
[
"生成加密后的密码和盐"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim_cli/template/model/user.py#L50-L59
|
[
"def",
"gen_password_and_salt",
"(",
"cls",
",",
"password_text",
")",
":",
"salt",
"=",
"os",
".",
"urandom",
"(",
"32",
")",
"dk",
"=",
"hashlib",
".",
"pbkdf2_hmac",
"(",
"config",
".",
"PASSWORD_HASH_FUNC_NAME",
",",
"password_text",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"salt",
",",
"config",
".",
"PASSWORD_HASH_ITERATIONS",
",",
")",
"return",
"{",
"'password'",
":",
"dk",
",",
"'salt'",
":",
"salt",
"}"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
User.gen_token
|
生成 access_token
|
slim_cli/template/model/user.py
|
def gen_token(cls):
""" 生成 access_token """
token = os.urandom(16)
token_time = int(time.time())
return {'token': token, 'token_time': token_time}
|
def gen_token(cls):
""" 生成 access_token """
token = os.urandom(16)
token_time = int(time.time())
return {'token': token, 'token_time': token_time}
|
[
"生成",
"access_token"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim_cli/template/model/user.py#L62-L66
|
[
"def",
"gen_token",
"(",
"cls",
")",
":",
"token",
"=",
"os",
".",
"urandom",
"(",
"16",
")",
"token_time",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"return",
"{",
"'token'",
":",
"token",
",",
"'token_time'",
":",
"token_time",
"}"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
User.set_password
|
设置密码
|
slim_cli/template/model/user.py
|
def set_password(self, new_password):
""" 设置密码 """
info = self.gen_password_and_salt(new_password)
self.password = info['password']
self.salt = info['salt']
self.save()
|
def set_password(self, new_password):
""" 设置密码 """
info = self.gen_password_and_salt(new_password)
self.password = info['password']
self.salt = info['salt']
self.save()
|
[
"设置密码"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim_cli/template/model/user.py#L91-L96
|
[
"def",
"set_password",
"(",
"self",
",",
"new_password",
")",
":",
"info",
"=",
"self",
".",
"gen_password_and_salt",
"(",
"new_password",
")",
"self",
".",
"password",
"=",
"info",
"[",
"'password'",
"]",
"self",
".",
"salt",
"=",
"info",
"[",
"'salt'",
"]",
"self",
".",
"save",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
User._auth_base
|
已获取了用户对象,进行密码校验
:param password_text:
:return:
|
slim_cli/template/model/user.py
|
def _auth_base(self, password_text):
"""
已获取了用户对象,进行密码校验
:param password_text:
:return:
"""
dk = hashlib.pbkdf2_hmac(
config.PASSWORD_HASH_FUNC_NAME,
password_text.encode('utf-8'),
get_bytes_from_blob(self.salt),
config.PASSWORD_HASH_ITERATIONS
)
if self.password == dk:
return self
|
def _auth_base(self, password_text):
"""
已获取了用户对象,进行密码校验
:param password_text:
:return:
"""
dk = hashlib.pbkdf2_hmac(
config.PASSWORD_HASH_FUNC_NAME,
password_text.encode('utf-8'),
get_bytes_from_blob(self.salt),
config.PASSWORD_HASH_ITERATIONS
)
if self.password == dk:
return self
|
[
"已获取了用户对象,进行密码校验",
":",
"param",
"password_text",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim_cli/template/model/user.py#L98-L112
|
[
"def",
"_auth_base",
"(",
"self",
",",
"password_text",
")",
":",
"dk",
"=",
"hashlib",
".",
"pbkdf2_hmac",
"(",
"config",
".",
"PASSWORD_HASH_FUNC_NAME",
",",
"password_text",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"get_bytes_from_blob",
"(",
"self",
".",
"salt",
")",
",",
"config",
".",
"PASSWORD_HASH_ITERATIONS",
")",
"if",
"self",
".",
"password",
"==",
"dk",
":",
"return",
"self"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
BaseSession.get_session
|
Every request have a session instance
:param view:
:return:
|
slim/base/session.py
|
async def get_session(cls, view):
"""
Every request have a session instance
:param view:
:return:
"""
session = cls(view)
session.key = await session.get_key()
session._data = await session.load() or {}
return session
|
async def get_session(cls, view):
"""
Every request have a session instance
:param view:
:return:
"""
session = cls(view)
session.key = await session.get_key()
session._data = await session.load() or {}
return session
|
[
"Every",
"request",
"have",
"a",
"session",
"instance",
":",
"param",
"view",
":",
":",
"return",
":"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/session.py#L45-L54
|
[
"async",
"def",
"get_session",
"(",
"cls",
",",
"view",
")",
":",
"session",
"=",
"cls",
"(",
"view",
")",
"session",
".",
"key",
"=",
"await",
"session",
".",
"get_key",
"(",
")",
"session",
".",
"_data",
"=",
"await",
"session",
".",
"load",
"(",
")",
"or",
"{",
"}",
"return",
"session"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLFunctions.select_page
|
Select from database
:param info:
:param size: -1 means infinite
:param page:
:param need_count: if True, get count as second return value, otherwise -1
:return: records. count
|
slim/base/sqlfuncs.py
|
async def select_page(self, info: SQLQueryInfo, size=1, page=1) -> Tuple[Tuple[DataRecord, ...], int]:
"""
Select from database
:param info:
:param size: -1 means infinite
:param page:
:param need_count: if True, get count as second return value, otherwise -1
:return: records. count
"""
raise NotImplementedError()
|
async def select_page(self, info: SQLQueryInfo, size=1, page=1) -> Tuple[Tuple[DataRecord, ...], int]:
"""
Select from database
:param info:
:param size: -1 means infinite
:param page:
:param need_count: if True, get count as second return value, otherwise -1
:return: records. count
"""
raise NotImplementedError()
|
[
"Select",
"from",
"database",
":",
"param",
"info",
":",
":",
"param",
"size",
":",
"-",
"1",
"means",
"infinite",
":",
"param",
"page",
":",
":",
"param",
"need_count",
":",
"if",
"True",
"get",
"count",
"as",
"second",
"return",
"value",
"otherwise",
"-",
"1",
":",
"return",
":",
"records",
".",
"count"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/sqlfuncs.py#L24-L33
|
[
"async",
"def",
"select_page",
"(",
"self",
",",
"info",
":",
"SQLQueryInfo",
",",
"size",
"=",
"1",
",",
"page",
"=",
"1",
")",
"->",
"Tuple",
"[",
"Tuple",
"[",
"DataRecord",
",",
"...",
"]",
",",
"int",
"]",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLFunctions.update
|
:param records:
:param values:
:param returning:
:return: return count if returning is False, otherwise records
|
slim/base/sqlfuncs.py
|
async def update(self, records: Iterable[DataRecord], values: SQLValuesToWrite, returning=False) -> Union[int, Iterable[DataRecord]]:
"""
:param records:
:param values:
:param returning:
:return: return count if returning is False, otherwise records
"""
raise NotImplementedError()
|
async def update(self, records: Iterable[DataRecord], values: SQLValuesToWrite, returning=False) -> Union[int, Iterable[DataRecord]]:
"""
:param records:
:param values:
:param returning:
:return: return count if returning is False, otherwise records
"""
raise NotImplementedError()
|
[
":",
"param",
"records",
":",
":",
"param",
"values",
":",
":",
"param",
"returning",
":",
":",
"return",
":",
"return",
"count",
"if",
"returning",
"is",
"False",
"otherwise",
"records"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/sqlfuncs.py#L36-L43
|
[
"async",
"def",
"update",
"(",
"self",
",",
"records",
":",
"Iterable",
"[",
"DataRecord",
"]",
",",
"values",
":",
"SQLValuesToWrite",
",",
"returning",
"=",
"False",
")",
"->",
"Union",
"[",
"int",
",",
"Iterable",
"[",
"DataRecord",
"]",
"]",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
AbstractSQLFunctions.insert
|
:param values_lst:
:param returning:
:return: return count if returning is False, otherwise records
|
slim/base/sqlfuncs.py
|
async def insert(self, values_lst: Iterable[SQLValuesToWrite], returning=False) -> Union[int, List[DataRecord]]:
"""
:param values_lst:
:param returning:
:return: return count if returning is False, otherwise records
"""
raise NotImplementedError()
|
async def insert(self, values_lst: Iterable[SQLValuesToWrite], returning=False) -> Union[int, List[DataRecord]]:
"""
:param values_lst:
:param returning:
:return: return count if returning is False, otherwise records
"""
raise NotImplementedError()
|
[
":",
"param",
"values_lst",
":",
":",
"param",
"returning",
":",
":",
"return",
":",
"return",
"count",
"if",
"returning",
"is",
"False",
"otherwise",
"records"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/sqlfuncs.py#L46-L52
|
[
"async",
"def",
"insert",
"(",
"self",
",",
"values_lst",
":",
"Iterable",
"[",
"SQLValuesToWrite",
"]",
",",
"returning",
"=",
"False",
")",
"->",
"Union",
"[",
"int",
",",
"List",
"[",
"DataRecord",
"]",
"]",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
valid
|
SQLQueryInfo.parse_order
|
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
|
slim/base/sqlquery.py
|
def parse_order(text):
"""
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
"""
orders = []
for i in map(str.strip, text.split(',')):
items = i.split('.', 2)
if len(items) == 1: column, order = items[0], 'default'
elif len(items) == 2: column, order = items
else: raise InvalidParams("Invalid order syntax")
order = order.lower()
if order not in ('asc', 'desc', 'default'):
raise InvalidParams('Invalid order mode: %s' % order)
if order != 'default':
orders.append(SQLQueryOrder(column, order))
return orders
|
def parse_order(text):
"""
:param text: order=id.desc, xxx.asc
:return: [
[<column>, asc|desc|default],
[<column2>, asc|desc|default],
]
"""
orders = []
for i in map(str.strip, text.split(',')):
items = i.split('.', 2)
if len(items) == 1: column, order = items[0], 'default'
elif len(items) == 2: column, order = items
else: raise InvalidParams("Invalid order syntax")
order = order.lower()
if order not in ('asc', 'desc', 'default'):
raise InvalidParams('Invalid order mode: %s' % order)
if order != 'default':
orders.append(SQLQueryOrder(column, order))
return orders
|
[
":",
"param",
"text",
":",
"order",
"=",
"id",
".",
"desc",
"xxx",
".",
"asc",
":",
"return",
":",
"[",
"[",
"<column",
">",
"asc|desc|default",
"]",
"[",
"<column2",
">",
"asc|desc|default",
"]",
"]"
] |
fy0/slim
|
python
|
https://github.com/fy0/slim/blob/9951a910750888dbe7dd3e98acae9c40efae0689/slim/base/sqlquery.py#L186-L208
|
[
"def",
"parse_order",
"(",
"text",
")",
":",
"orders",
"=",
"[",
"]",
"for",
"i",
"in",
"map",
"(",
"str",
".",
"strip",
",",
"text",
".",
"split",
"(",
"','",
")",
")",
":",
"items",
"=",
"i",
".",
"split",
"(",
"'.'",
",",
"2",
")",
"if",
"len",
"(",
"items",
")",
"==",
"1",
":",
"column",
",",
"order",
"=",
"items",
"[",
"0",
"]",
",",
"'default'",
"elif",
"len",
"(",
"items",
")",
"==",
"2",
":",
"column",
",",
"order",
"=",
"items",
"else",
":",
"raise",
"InvalidParams",
"(",
"\"Invalid order syntax\"",
")",
"order",
"=",
"order",
".",
"lower",
"(",
")",
"if",
"order",
"not",
"in",
"(",
"'asc'",
",",
"'desc'",
",",
"'default'",
")",
":",
"raise",
"InvalidParams",
"(",
"'Invalid order mode: %s'",
"%",
"order",
")",
"if",
"order",
"!=",
"'default'",
":",
"orders",
".",
"append",
"(",
"SQLQueryOrder",
"(",
"column",
",",
"order",
")",
")",
"return",
"orders"
] |
9951a910750888dbe7dd3e98acae9c40efae0689
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.