partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
SdScanningClient.query_image_metadata
|
**Description**
Find the image with the tag <image> and return its metadata.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- metadata_type: The metadata type can be one of the types returned by running without a type specified
**Success Return Value**
A JSON object representing the image metadata.
|
sdcclient/_scanning.py
|
def query_image_metadata(self, image, metadata_type=""):
'''**Description**
Find the image with the tag <image> and return its metadata.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- metadata_type: The metadata type can be one of the types returned by running without a type specified
**Success Return Value**
A JSON object representing the image metadata.
'''
return self._query_image(image, query_group='metadata', query_type=metadata_type)
|
def query_image_metadata(self, image, metadata_type=""):
'''**Description**
Find the image with the tag <image> and return its metadata.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- metadata_type: The metadata type can be one of the types returned by running without a type specified
**Success Return Value**
A JSON object representing the image metadata.
'''
return self._query_image(image, query_group='metadata', query_type=metadata_type)
|
[
"**",
"Description",
"**",
"Find",
"the",
"image",
"with",
"the",
"tag",
"<image",
">",
"and",
"return",
"its",
"metadata",
"."
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L139-L150
|
[
"def",
"query_image_metadata",
"(",
"self",
",",
"image",
",",
"metadata_type",
"=",
"\"\"",
")",
":",
"return",
"self",
".",
"_query_image",
"(",
"image",
",",
"query_group",
"=",
"'metadata'",
",",
"query_type",
"=",
"metadata_type",
")"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.query_image_vuln
|
**Description**
Find the image with the tag <image> and return its vulnerabilities.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- vuln_type: Vulnerability type can be one of the following types:
- os: CVE/distro vulnerabilities against operating system packages
**Success Return Value**
A JSON object representing the image vulnerabilities.
|
sdcclient/_scanning.py
|
def query_image_vuln(self, image, vuln_type="", vendor_only=True):
'''**Description**
Find the image with the tag <image> and return its vulnerabilities.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- vuln_type: Vulnerability type can be one of the following types:
- os: CVE/distro vulnerabilities against operating system packages
**Success Return Value**
A JSON object representing the image vulnerabilities.
'''
return self._query_image(image, query_group='vuln', query_type=vuln_type, vendor_only=vendor_only)
|
def query_image_vuln(self, image, vuln_type="", vendor_only=True):
'''**Description**
Find the image with the tag <image> and return its vulnerabilities.
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- vuln_type: Vulnerability type can be one of the following types:
- os: CVE/distro vulnerabilities against operating system packages
**Success Return Value**
A JSON object representing the image vulnerabilities.
'''
return self._query_image(image, query_group='vuln', query_type=vuln_type, vendor_only=vendor_only)
|
[
"**",
"Description",
"**",
"Find",
"the",
"image",
"with",
"the",
"tag",
"<image",
">",
"and",
"return",
"its",
"vulnerabilities",
"."
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L152-L164
|
[
"def",
"query_image_vuln",
"(",
"self",
",",
"image",
",",
"vuln_type",
"=",
"\"\"",
",",
"vendor_only",
"=",
"True",
")",
":",
"return",
"self",
".",
"_query_image",
"(",
"image",
",",
"query_group",
"=",
"'vuln'",
",",
"query_type",
"=",
"vuln_type",
",",
"vendor_only",
"=",
"vendor_only",
")"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.delete_image
|
**Description**
Delete image from the scanner.
**Arguments**
- None
|
sdcclient/_scanning.py
|
def delete_image(self, image, force=False):
'''**Description**
Delete image from the scanner.
**Arguments**
- None
'''
_, _, image_digest = self._discover_inputimage(image)
if not image_digest:
return [False, "cannot use input image string: no discovered imageDigest"]
url = self.url + "/api/scanning/v1/anchore/images/" + image_digest
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def delete_image(self, image, force=False):
'''**Description**
Delete image from the scanner.
**Arguments**
- None
'''
_, _, image_digest = self._discover_inputimage(image)
if not image_digest:
return [False, "cannot use input image string: no discovered imageDigest"]
url = self.url + "/api/scanning/v1/anchore/images/" + image_digest
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Delete",
"image",
"from",
"the",
"scanner",
"."
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L187-L203
|
[
"def",
"delete_image",
"(",
"self",
",",
"image",
",",
"force",
"=",
"False",
")",
":",
"_",
",",
"_",
",",
"image_digest",
"=",
"self",
".",
"_discover_inputimage",
"(",
"image",
")",
"if",
"not",
"image_digest",
":",
"return",
"[",
"False",
",",
"\"cannot use input image string: no discovered imageDigest\"",
"]",
"url",
"=",
"self",
".",
"url",
"+",
"\"/api/scanning/v1/anchore/images/\"",
"+",
"image_digest",
"res",
"=",
"requests",
".",
"delete",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.check_image_evaluation
|
**Description**
Check the latest policy evaluation for an image
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- show_history: Show all previous policy evaluations
- detail: Show detailed policy evaluation report
- tag: Specify which TAG is evaluated for a given image ID or Image Digest
- policy: Specify which POLICY to use for evaluate (defaults currently active policy)
**Success Return Value**
A JSON object representing the evaluation status.
|
sdcclient/_scanning.py
|
def check_image_evaluation(self, image, show_history=False, detail=False, tag=None, policy=None):
'''**Description**
Check the latest policy evaluation for an image
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- show_history: Show all previous policy evaluations
- detail: Show detailed policy evaluation report
- tag: Specify which TAG is evaluated for a given image ID or Image Digest
- policy: Specify which POLICY to use for evaluate (defaults currently active policy)
**Success Return Value**
A JSON object representing the evaluation status.
'''
itype, _, image_digest = self._discover_inputimage(image)
if not image_digest:
return [False, "could not get image record from anchore"]
if not tag and itype != 'tag':
return [False, "input image name is not a tag, and no --tag is specified"]
thetag = tag if tag else image
url = "{base_url}/api/scanning/v1/anchore/images/{image_digest}/check?history={history}&detail={detail}&tag={tag}{policy_id}"
url = url.format(
base_url=self.url,
image_digest=image_digest,
history=str(show_history).lower(),
detail=str(detail).lower(),
tag=thetag,
policy_id=("&policyId=%s" % policy) if policy else "")
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def check_image_evaluation(self, image, show_history=False, detail=False, tag=None, policy=None):
'''**Description**
Check the latest policy evaluation for an image
**Arguments**
- image: Input image can be in the following formats: registry/repo:tag
- show_history: Show all previous policy evaluations
- detail: Show detailed policy evaluation report
- tag: Specify which TAG is evaluated for a given image ID or Image Digest
- policy: Specify which POLICY to use for evaluate (defaults currently active policy)
**Success Return Value**
A JSON object representing the evaluation status.
'''
itype, _, image_digest = self._discover_inputimage(image)
if not image_digest:
return [False, "could not get image record from anchore"]
if not tag and itype != 'tag':
return [False, "input image name is not a tag, and no --tag is specified"]
thetag = tag if tag else image
url = "{base_url}/api/scanning/v1/anchore/images/{image_digest}/check?history={history}&detail={detail}&tag={tag}{policy_id}"
url = url.format(
base_url=self.url,
image_digest=image_digest,
history=str(show_history).lower(),
detail=str(detail).lower(),
tag=thetag,
policy_id=("&policyId=%s" % policy) if policy else "")
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Check",
"the",
"latest",
"policy",
"evaluation",
"for",
"an",
"image"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L205-L240
|
[
"def",
"check_image_evaluation",
"(",
"self",
",",
"image",
",",
"show_history",
"=",
"False",
",",
"detail",
"=",
"False",
",",
"tag",
"=",
"None",
",",
"policy",
"=",
"None",
")",
":",
"itype",
",",
"_",
",",
"image_digest",
"=",
"self",
".",
"_discover_inputimage",
"(",
"image",
")",
"if",
"not",
"image_digest",
":",
"return",
"[",
"False",
",",
"\"could not get image record from anchore\"",
"]",
"if",
"not",
"tag",
"and",
"itype",
"!=",
"'tag'",
":",
"return",
"[",
"False",
",",
"\"input image name is not a tag, and no --tag is specified\"",
"]",
"thetag",
"=",
"tag",
"if",
"tag",
"else",
"image",
"url",
"=",
"\"{base_url}/api/scanning/v1/anchore/images/{image_digest}/check?history={history}&detail={detail}&tag={tag}{policy_id}\"",
"url",
"=",
"url",
".",
"format",
"(",
"base_url",
"=",
"self",
".",
"url",
",",
"image_digest",
"=",
"image_digest",
",",
"history",
"=",
"str",
"(",
"show_history",
")",
".",
"lower",
"(",
")",
",",
"detail",
"=",
"str",
"(",
"detail",
")",
".",
"lower",
"(",
")",
",",
"tag",
"=",
"thetag",
",",
"policy_id",
"=",
"(",
"\"&policyId=%s\"",
"%",
"policy",
")",
"if",
"policy",
"else",
"\"\"",
")",
"res",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.add_registry
|
**Description**
Add image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
|
sdcclient/_scanning.py
|
def add_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True):
'''**Description**
Add image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
'''
registry_types = ['docker_v2', 'awsecr']
if registry_type and registry_type not in registry_types:
return [False, "input registry type not supported (supported registry_types: " + str(registry_types)]
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
if not registry_type:
registry_type = self._get_registry_type(registry)
payload = {
'registry': registry,
'registry_user': registry_user,
'registry_pass': registry_pass,
'registry_type': registry_type,
'registry_verify': not insecure}
url = "{base_url}/api/scanning/v1/anchore/registries?validate={validate}".format(
base_url=self.url,
validate=validate)
res = requests.post(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def add_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True):
'''**Description**
Add image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
'''
registry_types = ['docker_v2', 'awsecr']
if registry_type and registry_type not in registry_types:
return [False, "input registry type not supported (supported registry_types: " + str(registry_types)]
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
if not registry_type:
registry_type = self._get_registry_type(registry)
payload = {
'registry': registry,
'registry_user': registry_user,
'registry_pass': registry_pass,
'registry_type': registry_type,
'registry_verify': not insecure}
url = "{base_url}/api/scanning/v1/anchore/registries?validate={validate}".format(
base_url=self.url,
validate=validate)
res = requests.post(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Add",
"image",
"registry"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L242-L280
|
[
"def",
"add_registry",
"(",
"self",
",",
"registry",
",",
"registry_user",
",",
"registry_pass",
",",
"insecure",
"=",
"False",
",",
"registry_type",
"=",
"\"docker_v2\"",
",",
"validate",
"=",
"True",
")",
":",
"registry_types",
"=",
"[",
"'docker_v2'",
",",
"'awsecr'",
"]",
"if",
"registry_type",
"and",
"registry_type",
"not",
"in",
"registry_types",
":",
"return",
"[",
"False",
",",
"\"input registry type not supported (supported registry_types: \"",
"+",
"str",
"(",
"registry_types",
")",
"]",
"if",
"self",
".",
"_registry_string_is_valid",
"(",
"registry",
")",
":",
"return",
"[",
"False",
",",
"\"input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional\"",
"]",
"if",
"not",
"registry_type",
":",
"registry_type",
"=",
"self",
".",
"_get_registry_type",
"(",
"registry",
")",
"payload",
"=",
"{",
"'registry'",
":",
"registry",
",",
"'registry_user'",
":",
"registry_user",
",",
"'registry_pass'",
":",
"registry_pass",
",",
"'registry_type'",
":",
"registry_type",
",",
"'registry_verify'",
":",
"not",
"insecure",
"}",
"url",
"=",
"\"{base_url}/api/scanning/v1/anchore/registries?validate={validate}\"",
".",
"format",
"(",
"base_url",
"=",
"self",
".",
"url",
",",
"validate",
"=",
"validate",
")",
"res",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.update_registry
|
**Description**
Update an existing image registry.
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
|
sdcclient/_scanning.py
|
def update_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True):
'''**Description**
Update an existing image registry.
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
'''
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
payload = {
'registry': registry,
'registry_user': registry_user,
'registry_pass': registry_pass,
'registry_type': registry_type,
'registry_verify': not insecure}
url = "{base_url}/api/scanning/v1/anchore/registries/{registry}?validate={validate}".format(
base_url=self.url,
registry=registry,
validate=validate)
res = requests.put(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def update_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True):
'''**Description**
Update an existing image registry.
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
- registry_user: Username
- registry_pass: Password
- insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)
- registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2')
- validate: If set to 'False' will not attempt to validate registry/creds on registry add
**Success Return Value**
A JSON object representing the registry.
'''
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
payload = {
'registry': registry,
'registry_user': registry_user,
'registry_pass': registry_pass,
'registry_type': registry_type,
'registry_verify': not insecure}
url = "{base_url}/api/scanning/v1/anchore/registries/{registry}?validate={validate}".format(
base_url=self.url,
registry=registry,
validate=validate)
res = requests.put(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Update",
"an",
"existing",
"image",
"registry",
"."
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L282-L315
|
[
"def",
"update_registry",
"(",
"self",
",",
"registry",
",",
"registry_user",
",",
"registry_pass",
",",
"insecure",
"=",
"False",
",",
"registry_type",
"=",
"\"docker_v2\"",
",",
"validate",
"=",
"True",
")",
":",
"if",
"self",
".",
"_registry_string_is_valid",
"(",
"registry",
")",
":",
"return",
"[",
"False",
",",
"\"input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional\"",
"]",
"payload",
"=",
"{",
"'registry'",
":",
"registry",
",",
"'registry_user'",
":",
"registry_user",
",",
"'registry_pass'",
":",
"registry_pass",
",",
"'registry_type'",
":",
"registry_type",
",",
"'registry_verify'",
":",
"not",
"insecure",
"}",
"url",
"=",
"\"{base_url}/api/scanning/v1/anchore/registries/{registry}?validate={validate}\"",
".",
"format",
"(",
"base_url",
"=",
"self",
".",
"url",
",",
"registry",
"=",
"registry",
",",
"validate",
"=",
"validate",
")",
"res",
"=",
"requests",
".",
"put",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.delete_registry
|
**Description**
Delete an existing image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
|
sdcclient/_scanning.py
|
def delete_registry(self, registry):
'''**Description**
Delete an existing image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
'''
# do some input string checking
if re.match(".*\\/.*", registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
url = self.url + "/api/scanning/v1/anchore/registries/" + registry
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def delete_registry(self, registry):
'''**Description**
Delete an existing image registry
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
'''
# do some input string checking
if re.match(".*\\/.*", registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
url = self.url + "/api/scanning/v1/anchore/registries/" + registry
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Delete",
"an",
"existing",
"image",
"registry"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L317-L333
|
[
"def",
"delete_registry",
"(",
"self",
",",
"registry",
")",
":",
"# do some input string checking",
"if",
"re",
".",
"match",
"(",
"\".*\\\\/.*\"",
",",
"registry",
")",
":",
"return",
"[",
"False",
",",
"\"input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional\"",
"]",
"url",
"=",
"self",
".",
"url",
"+",
"\"/api/scanning/v1/anchore/registries/\"",
"+",
"registry",
"res",
"=",
"requests",
".",
"delete",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.get_registry
|
**Description**
Find the registry and return its json description
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
**Success Return Value**
A JSON object representing the registry.
|
sdcclient/_scanning.py
|
def get_registry(self, registry):
'''**Description**
Find the registry and return its json description
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
**Success Return Value**
A JSON object representing the registry.
'''
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
url = self.url + "/api/scanning/v1/anchore/registries/" + registry
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def get_registry(self, registry):
'''**Description**
Find the registry and return its json description
**Arguments**
- registry: Full hostname/port of registry. Eg. myrepo.example.com:5000
**Success Return Value**
A JSON object representing the registry.
'''
if self._registry_string_is_valid(registry):
return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"]
url = self.url + "/api/scanning/v1/anchore/registries/" + registry
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Find",
"the",
"registry",
"and",
"return",
"its",
"json",
"description"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L352-L370
|
[
"def",
"get_registry",
"(",
"self",
",",
"registry",
")",
":",
"if",
"self",
".",
"_registry_string_is_valid",
"(",
"registry",
")",
":",
"return",
"[",
"False",
",",
"\"input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional\"",
"]",
"url",
"=",
"self",
".",
"url",
"+",
"\"/api/scanning/v1/anchore/registries/\"",
"+",
"registry",
"res",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.add_policy
|
**Description**
Create a new policy
**Arguments**
- name: The name of the policy.
- rules: A list of Anchore PolicyRule elements (while creating/updating a policy, new rule IDs will be created backend side)
- comment: A human-readable description.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
|
sdcclient/_scanning.py
|
def add_policy(self, name, rules, comment="", bundleid=None):
'''**Description**
Create a new policy
**Arguments**
- name: The name of the policy.
- rules: A list of Anchore PolicyRule elements (while creating/updating a policy, new rule IDs will be created backend side)
- comment: A human-readable description.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
'''
policy = {
'name': name,
'comment': comment,
'rules': rules,
'version': '1_0'
}
if bundleid:
policy['policyBundleId'] = bundleid
url = self.url + '/api/scanning/v1/policies'
data = json.dumps(policy)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def add_policy(self, name, rules, comment="", bundleid=None):
'''**Description**
Create a new policy
**Arguments**
- name: The name of the policy.
- rules: A list of Anchore PolicyRule elements (while creating/updating a policy, new rule IDs will be created backend side)
- comment: A human-readable description.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
'''
policy = {
'name': name,
'comment': comment,
'rules': rules,
'version': '1_0'
}
if bundleid:
policy['policyBundleId'] = bundleid
url = self.url + '/api/scanning/v1/policies'
data = json.dumps(policy)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Create",
"a",
"new",
"policy"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L380-L408
|
[
"def",
"add_policy",
"(",
"self",
",",
"name",
",",
"rules",
",",
"comment",
"=",
"\"\"",
",",
"bundleid",
"=",
"None",
")",
":",
"policy",
"=",
"{",
"'name'",
":",
"name",
",",
"'comment'",
":",
"comment",
",",
"'rules'",
":",
"rules",
",",
"'version'",
":",
"'1_0'",
"}",
"if",
"bundleid",
":",
"policy",
"[",
"'policyBundleId'",
"]",
"=",
"bundleid",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/policies'",
"data",
"=",
"json",
".",
"dumps",
"(",
"policy",
")",
"res",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.get_policy
|
**Description**
Retrieve the policy with the given id in the targeted policy bundle
**Arguments**
- policyid: Unique identifier associated with this policy.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
|
sdcclient/_scanning.py
|
def get_policy(self, policyid, bundleid=None):
'''**Description**
Retrieve the policy with the given id in the targeted policy bundle
**Arguments**
- policyid: Unique identifier associated with this policy.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
'''
url = self.url + '/api/scanning/v1/policies/' + policyid
if bundleid:
url += '?bundleId=' + bundleid
|
def get_policy(self, policyid, bundleid=None):
'''**Description**
Retrieve the policy with the given id in the targeted policy bundle
**Arguments**
- policyid: Unique identifier associated with this policy.
- bundleid: Target bundle. If not specified, the currently active bundle will be used.
**Success Return Value**
A JSON object containing the policy description.
'''
url = self.url + '/api/scanning/v1/policies/' + policyid
if bundleid:
url += '?bundleId=' + bundleid
|
[
"**",
"Description",
"**",
"Retrieve",
"the",
"policy",
"with",
"the",
"given",
"id",
"in",
"the",
"targeted",
"policy",
"bundle"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L440-L453
|
[
"def",
"get_policy",
"(",
"self",
",",
"policyid",
",",
"bundleid",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/policies/'",
"+",
"policyid",
"if",
"bundleid",
":",
"url",
"+=",
"'?bundleId='",
"+",
"bundleid"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.update_policy
|
**Description**
Update the policy with the given id
**Arguments**
- policyid: Unique identifier associated with this policy.
- policy_description: A dictionary with the policy description.
**Success Return Value**
A JSON object containing the policy description.
|
sdcclient/_scanning.py
|
def update_policy(self, policyid, policy_description):
'''**Description**
Update the policy with the given id
**Arguments**
- policyid: Unique identifier associated with this policy.
- policy_description: A dictionary with the policy description.
**Success Return Value**
A JSON object containing the policy description.
'''
url = self.url + '/api/scanning/v1/policies/' + policyid
data = json.dumps(policy_description)
res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def update_policy(self, policyid, policy_description):
'''**Description**
Update the policy with the given id
**Arguments**
- policyid: Unique identifier associated with this policy.
- policy_description: A dictionary with the policy description.
**Success Return Value**
A JSON object containing the policy description.
'''
url = self.url + '/api/scanning/v1/policies/' + policyid
data = json.dumps(policy_description)
res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Update",
"the",
"policy",
"with",
"the",
"given",
"id"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L455-L472
|
[
"def",
"update_policy",
"(",
"self",
",",
"policyid",
",",
"policy_description",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/policies/'",
"+",
"policyid",
"data",
"=",
"json",
".",
"dumps",
"(",
"policy_description",
")",
"res",
"=",
"requests",
".",
"put",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.add_alert
|
**Description**
Create a new alert
**Arguments**
- name: The name of the alert.
- description: The descprition of the alert.
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- tiggers: A dict {str: bool} indicating wich triggers should be enabled/disabled. (default: {'failed': True, 'unscanned': True})
- enabled: Whether this alert should actually be applied.
- notification_channels: A list of notification channel ids.
**Success Return Value**
A JSON object containing the alert description.
|
sdcclient/_scanning.py
|
def add_alert(self, name, description=None, scope="", triggers={'failed': True, 'unscanned': True},
enabled=False, notification_channels=[]):
'''**Description**
Create a new alert
**Arguments**
- name: The name of the alert.
- description: The descprition of the alert.
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- tiggers: A dict {str: bool} indicating wich triggers should be enabled/disabled. (default: {'failed': True, 'unscanned': True})
- enabled: Whether this alert should actually be applied.
- notification_channels: A list of notification channel ids.
**Success Return Value**
A JSON object containing the alert description.
'''
alert = {
'name': name,
'description': description,
'triggers': triggers,
'scope': scope,
'enabled': enabled,
'autoscan': True,
'notificationChannelIds': notification_channels,
}
url = self.url + '/api/scanning/v1/alerts'
data = json.dumps(alert)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def add_alert(self, name, description=None, scope="", triggers={'failed': True, 'unscanned': True},
enabled=False, notification_channels=[]):
'''**Description**
Create a new alert
**Arguments**
- name: The name of the alert.
- description: The descprition of the alert.
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- tiggers: A dict {str: bool} indicating wich triggers should be enabled/disabled. (default: {'failed': True, 'unscanned': True})
- enabled: Whether this alert should actually be applied.
- notification_channels: A list of notification channel ids.
**Success Return Value**
A JSON object containing the alert description.
'''
alert = {
'name': name,
'description': description,
'triggers': triggers,
'scope': scope,
'enabled': enabled,
'autoscan': True,
'notificationChannelIds': notification_channels,
}
url = self.url + '/api/scanning/v1/alerts'
data = json.dumps(alert)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Create",
"a",
"new",
"alert"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L492-L524
|
[
"def",
"add_alert",
"(",
"self",
",",
"name",
",",
"description",
"=",
"None",
",",
"scope",
"=",
"\"\"",
",",
"triggers",
"=",
"{",
"'failed'",
":",
"True",
",",
"'unscanned'",
":",
"True",
"}",
",",
"enabled",
"=",
"False",
",",
"notification_channels",
"=",
"[",
"]",
")",
":",
"alert",
"=",
"{",
"'name'",
":",
"name",
",",
"'description'",
":",
"description",
",",
"'triggers'",
":",
"triggers",
",",
"'scope'",
":",
"scope",
",",
"'enabled'",
":",
"enabled",
",",
"'autoscan'",
":",
"True",
",",
"'notificationChannelIds'",
":",
"notification_channels",
",",
"}",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/alerts'",
"data",
"=",
"json",
".",
"dumps",
"(",
"alert",
")",
"res",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.list_alerts
|
**Description**
List the current set of scanning alerts.
**Arguments**
- limit: Maximum number of alerts in the response.
- cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response.
**Success Return Value**
A JSON object containing the list of alerts.
|
sdcclient/_scanning.py
|
def list_alerts(self, limit=None, cursor=None):
'''**Description**
List the current set of scanning alerts.
**Arguments**
- limit: Maximum number of alerts in the response.
- cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response.
**Success Return Value**
A JSON object containing the list of alerts.
'''
url = self.url + '/api/scanning/v1/alerts'
if limit:
url += '?limit=' + str(limit)
if cursor:
url += '&cursor=' + cursor
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def list_alerts(self, limit=None, cursor=None):
'''**Description**
List the current set of scanning alerts.
**Arguments**
- limit: Maximum number of alerts in the response.
- cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response.
**Success Return Value**
A JSON object containing the list of alerts.
'''
url = self.url + '/api/scanning/v1/alerts'
if limit:
url += '?limit=' + str(limit)
if cursor:
url += '&cursor=' + cursor
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"List",
"the",
"current",
"set",
"of",
"scanning",
"alerts",
"."
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L526-L547
|
[
"def",
"list_alerts",
"(",
"self",
",",
"limit",
"=",
"None",
",",
"cursor",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/alerts'",
"if",
"limit",
":",
"url",
"+=",
"'?limit='",
"+",
"str",
"(",
"limit",
")",
"if",
"cursor",
":",
"url",
"+=",
"'&cursor='",
"+",
"cursor",
"res",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.update_alert
|
**Description**
Update the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
- alert_description: A dictionary with the alert description.
**Success Return Value**
A JSON object containing the alert description.
|
sdcclient/_scanning.py
|
def update_alert(self, alertid, alert_description):
'''**Description**
Update the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
- alert_description: A dictionary with the alert description.
**Success Return Value**
A JSON object containing the alert description.
'''
url = self.url + '/api/scanning/v1/alerts/' + alertid
data = json.dumps(alert_description)
res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def update_alert(self, alertid, alert_description):
'''**Description**
Update the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
- alert_description: A dictionary with the alert description.
**Success Return Value**
A JSON object containing the alert description.
'''
url = self.url + '/api/scanning/v1/alerts/' + alertid
data = json.dumps(alert_description)
res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"Update",
"the",
"alert",
"with",
"the",
"given",
"id"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L566-L583
|
[
"def",
"update_alert",
"(",
"self",
",",
"alertid",
",",
"alert_description",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/alerts/'",
"+",
"alertid",
"data",
"=",
"json",
".",
"dumps",
"(",
"alert_description",
")",
"res",
"=",
"requests",
".",
"put",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.delete_alert
|
**Description**
Delete the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
|
sdcclient/_scanning.py
|
def delete_alert(self, policyid):
'''**Description**
Delete the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
'''
url = self.url + '/api/scanning/v1/alerts/' + policyid
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.text]
|
def delete_alert(self, policyid):
'''**Description**
Delete the alert with the given id
**Arguments**
- alertid: Unique identifier associated with this alert.
'''
url = self.url + '/api/scanning/v1/alerts/' + policyid
res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.text]
|
[
"**",
"Description",
"**",
"Delete",
"the",
"alert",
"with",
"the",
"given",
"id"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L585-L597
|
[
"def",
"delete_alert",
"(",
"self",
",",
"policyid",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/alerts/'",
"+",
"policyid",
"res",
"=",
"requests",
".",
"delete",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"text",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.list_subscription
|
**Description**
List all subscriptions
**Arguments**
- None
**Success Return Value**
A JSON object representing the list of subscriptions.
|
sdcclient/_scanning.py
|
def list_subscription(self):
'''**Description**
List all subscriptions
**Arguments**
- None
**Success Return Value**
A JSON object representing the list of subscriptions.
'''
url = self.url + "/api/scanning/v1/anchore/subscriptions"
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def list_subscription(self):
'''**Description**
List all subscriptions
**Arguments**
- None
**Success Return Value**
A JSON object representing the list of subscriptions.
'''
url = self.url + "/api/scanning/v1/anchore/subscriptions"
res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"List",
"all",
"subscriptions"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L637-L652
|
[
"def",
"list_subscription",
"(",
"self",
")",
":",
"url",
"=",
"self",
".",
"url",
"+",
"\"/api/scanning/v1/anchore/subscriptions\"",
"res",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
SdScanningClient.list_runtime
|
**Description**
List runtime containers
**Arguments**
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- skip_policy_evaluation: If true, no policy evaluations will be triggered for the images.
- start_time: Start of the time range (integer of unix time).
- end_time: End of the time range (integer of unix time).
**Success Return Value**
A JSON object representing the list of runtime containers.
|
sdcclient/_scanning.py
|
def list_runtime(self, scope="", skip_policy_evaluation=True, start_time=None, end_time=None):
'''**Description**
List runtime containers
**Arguments**
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- skip_policy_evaluation: If true, no policy evaluations will be triggered for the images.
- start_time: Start of the time range (integer of unix time).
- end_time: End of the time range (integer of unix time).
**Success Return Value**
A JSON object representing the list of runtime containers.
'''
containers = {
'scope': scope,
'skipPolicyEvaluation': skip_policy_evaluation
}
if start_time or end_time:
containers['time'] = {}
containers['time']['from'] = int(start_time * 100000) if start_time else 0
end_time = end_time if end_time else time.time()
containers['time']['to'] = int(end_time * 1000000)
url = self.url + '/api/scanning/v1/query/containers'
data = json.dumps(containers)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
def list_runtime(self, scope="", skip_policy_evaluation=True, start_time=None, end_time=None):
'''**Description**
List runtime containers
**Arguments**
- scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"')
- skip_policy_evaluation: If true, no policy evaluations will be triggered for the images.
- start_time: Start of the time range (integer of unix time).
- end_time: End of the time range (integer of unix time).
**Success Return Value**
A JSON object representing the list of runtime containers.
'''
containers = {
'scope': scope,
'skipPolicyEvaluation': skip_policy_evaluation
}
if start_time or end_time:
containers['time'] = {}
containers['time']['from'] = int(start_time * 100000) if start_time else 0
end_time = end_time if end_time else time.time()
containers['time']['to'] = int(end_time * 1000000)
url = self.url + '/api/scanning/v1/query/containers'
data = json.dumps(containers)
res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify)
if not self._checkResponse(res):
return [False, self.lasterr]
return [True, res.json()]
|
[
"**",
"Description",
"**",
"List",
"runtime",
"containers"
] |
draios/python-sdc-client
|
python
|
https://github.com/draios/python-sdc-client/blob/47f83415842048778939b90944f64386a3bcb205/sdcclient/_scanning.py#L654-L683
|
[
"def",
"list_runtime",
"(",
"self",
",",
"scope",
"=",
"\"\"",
",",
"skip_policy_evaluation",
"=",
"True",
",",
"start_time",
"=",
"None",
",",
"end_time",
"=",
"None",
")",
":",
"containers",
"=",
"{",
"'scope'",
":",
"scope",
",",
"'skipPolicyEvaluation'",
":",
"skip_policy_evaluation",
"}",
"if",
"start_time",
"or",
"end_time",
":",
"containers",
"[",
"'time'",
"]",
"=",
"{",
"}",
"containers",
"[",
"'time'",
"]",
"[",
"'from'",
"]",
"=",
"int",
"(",
"start_time",
"*",
"100000",
")",
"if",
"start_time",
"else",
"0",
"end_time",
"=",
"end_time",
"if",
"end_time",
"else",
"time",
".",
"time",
"(",
")",
"containers",
"[",
"'time'",
"]",
"[",
"'to'",
"]",
"=",
"int",
"(",
"end_time",
"*",
"1000000",
")",
"url",
"=",
"self",
".",
"url",
"+",
"'/api/scanning/v1/query/containers'",
"data",
"=",
"json",
".",
"dumps",
"(",
"containers",
")",
"res",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"data",
"=",
"data",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"if",
"not",
"self",
".",
"_checkResponse",
"(",
"res",
")",
":",
"return",
"[",
"False",
",",
"self",
".",
"lasterr",
"]",
"return",
"[",
"True",
",",
"res",
".",
"json",
"(",
")",
"]"
] |
47f83415842048778939b90944f64386a3bcb205
|
test
|
NetworkingThread.addSourceAddr
|
None means 'system default
|
wsdiscovery/daemon.py
|
def addSourceAddr(self, addr):
"""None means 'system default'"""
try:
self._multiInSocket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._makeMreq(addr))
except socket.error: # if 1 interface has more than 1 address, exception is raised for the second
pass
sock = self._createMulticastOutSocket(addr, self._observer.ttl)
self._multiOutUniInSockets[addr] = sock
self._poll.register(sock, select.POLLIN)
|
def addSourceAddr(self, addr):
"""None means 'system default'"""
try:
self._multiInSocket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._makeMreq(addr))
except socket.error: # if 1 interface has more than 1 address, exception is raised for the second
pass
sock = self._createMulticastOutSocket(addr, self._observer.ttl)
self._multiOutUniInSockets[addr] = sock
self._poll.register(sock, select.POLLIN)
|
[
"None",
"means",
"system",
"default"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L126-L135
|
[
"def",
"addSourceAddr",
"(",
"self",
",",
"addr",
")",
":",
"try",
":",
"self",
".",
"_multiInSocket",
".",
"setsockopt",
"(",
"socket",
".",
"IPPROTO_IP",
",",
"socket",
".",
"IP_ADD_MEMBERSHIP",
",",
"self",
".",
"_makeMreq",
"(",
"addr",
")",
")",
"except",
"socket",
".",
"error",
":",
"# if 1 interface has more than 1 address, exception is raised for the second",
"pass",
"sock",
"=",
"self",
".",
"_createMulticastOutSocket",
"(",
"addr",
",",
"self",
".",
"_observer",
".",
"ttl",
")",
"self",
".",
"_multiOutUniInSockets",
"[",
"addr",
"]",
"=",
"sock",
"self",
".",
"_poll",
".",
"register",
"(",
"sock",
",",
"select",
".",
"POLLIN",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
NetworkingThread._sendPendingMessages
|
Method sleeps, if nothing to do
|
wsdiscovery/daemon.py
|
def _sendPendingMessages(self):
"""Method sleeps, if nothing to do"""
if len(self._queue) == 0:
time.sleep(0.1)
return
msg = self._queue.pop(0)
if msg.canSend():
self._sendMsg(msg)
msg.refresh()
if not (msg.isFinished()):
self._queue.append(msg)
else:
self._queue.append(msg)
time.sleep(0.01)
|
def _sendPendingMessages(self):
"""Method sleeps, if nothing to do"""
if len(self._queue) == 0:
time.sleep(0.1)
return
msg = self._queue.pop(0)
if msg.canSend():
self._sendMsg(msg)
msg.refresh()
if not (msg.isFinished()):
self._queue.append(msg)
else:
self._queue.append(msg)
time.sleep(0.01)
|
[
"Method",
"sleeps",
"if",
"nothing",
"to",
"do"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L232-L245
|
[
"def",
"_sendPendingMessages",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"_queue",
")",
"==",
"0",
":",
"time",
".",
"sleep",
"(",
"0.1",
")",
"return",
"msg",
"=",
"self",
".",
"_queue",
".",
"pop",
"(",
"0",
")",
"if",
"msg",
".",
"canSend",
"(",
")",
":",
"self",
".",
"_sendMsg",
"(",
"msg",
")",
"msg",
".",
"refresh",
"(",
")",
"if",
"not",
"(",
"msg",
".",
"isFinished",
"(",
")",
")",
":",
"self",
".",
"_queue",
".",
"append",
"(",
"msg",
")",
"else",
":",
"self",
".",
"_queue",
".",
"append",
"(",
"msg",
")",
"time",
".",
"sleep",
"(",
"0.01",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
WSDiscovery.setRemoteServiceHelloCallback
|
Set callback, which will be called when new service appeared online
and sent Hi message
typesFilter and scopesFilter might be list of types and scopes.
If filter is set, callback is called only for Hello messages,
which match filter
Set None to disable callback
|
wsdiscovery/daemon.py
|
def setRemoteServiceHelloCallback(self, cb, types=None, scopes=None):
"""Set callback, which will be called when new service appeared online
and sent Hi message
typesFilter and scopesFilter might be list of types and scopes.
If filter is set, callback is called only for Hello messages,
which match filter
Set None to disable callback
"""
self._remoteServiceHelloCallback = cb
self._remoteServiceHelloCallbackTypesFilter = types
self._remoteServiceHelloCallbackScopesFilter = scopes
|
def setRemoteServiceHelloCallback(self, cb, types=None, scopes=None):
"""Set callback, which will be called when new service appeared online
and sent Hi message
typesFilter and scopesFilter might be list of types and scopes.
If filter is set, callback is called only for Hello messages,
which match filter
Set None to disable callback
"""
self._remoteServiceHelloCallback = cb
self._remoteServiceHelloCallbackTypesFilter = types
self._remoteServiceHelloCallbackScopesFilter = scopes
|
[
"Set",
"callback",
"which",
"will",
"be",
"called",
"when",
"new",
"service",
"appeared",
"online",
"and",
"sent",
"Hi",
"message"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L291-L303
|
[
"def",
"setRemoteServiceHelloCallback",
"(",
"self",
",",
"cb",
",",
"types",
"=",
"None",
",",
"scopes",
"=",
"None",
")",
":",
"self",
".",
"_remoteServiceHelloCallback",
"=",
"cb",
"self",
".",
"_remoteServiceHelloCallbackTypesFilter",
"=",
"types",
"self",
".",
"_remoteServiceHelloCallbackScopesFilter",
"=",
"scopes"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
WSDiscovery.stop
|
cleans up and stops the discovery server
|
wsdiscovery/daemon.py
|
def stop(self):
'cleans up and stops the discovery server'
self.clearRemoteServices()
self.clearLocalServices()
self._stopThreads()
self._serverStarted = False
|
def stop(self):
'cleans up and stops the discovery server'
self.clearRemoteServices()
self.clearLocalServices()
self._stopThreads()
self._serverStarted = False
|
[
"cleans",
"up",
"and",
"stops",
"the",
"discovery",
"server"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L473-L480
|
[
"def",
"stop",
"(",
"self",
")",
":",
"self",
".",
"clearRemoteServices",
"(",
")",
"self",
".",
"clearLocalServices",
"(",
")",
"self",
".",
"_stopThreads",
"(",
")",
"self",
".",
"_serverStarted",
"=",
"False"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
WSDiscovery.clearLocalServices
|
send Bye messages for the services and remove them
|
wsdiscovery/daemon.py
|
def clearLocalServices(self):
'send Bye messages for the services and remove them'
for service in list(self._localServices.values()):
self._sendBye(service)
self._localServices.clear()
|
def clearLocalServices(self):
'send Bye messages for the services and remove them'
for service in list(self._localServices.values()):
self._sendBye(service)
self._localServices.clear()
|
[
"send",
"Bye",
"messages",
"for",
"the",
"services",
"and",
"remove",
"them"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L547-L553
|
[
"def",
"clearLocalServices",
"(",
"self",
")",
":",
"for",
"service",
"in",
"list",
"(",
"self",
".",
"_localServices",
".",
"values",
"(",
")",
")",
":",
"self",
".",
"_sendBye",
"(",
"service",
")",
"self",
".",
"_localServices",
".",
"clear",
"(",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
WSDiscovery.searchServices
|
search for services given the TYPES and SCOPES in a given TIMEOUT
|
wsdiscovery/daemon.py
|
def searchServices(self, types=None, scopes=None, timeout=3):
'search for services given the TYPES and SCOPES in a given TIMEOUT'
if not self._serverStarted:
raise Exception("Server not started")
self._sendProbe(types, scopes)
time.sleep(timeout)
return self._filterServices(list(self._remoteServices.values()), types, scopes)
|
def searchServices(self, types=None, scopes=None, timeout=3):
'search for services given the TYPES and SCOPES in a given TIMEOUT'
if not self._serverStarted:
raise Exception("Server not started")
self._sendProbe(types, scopes)
time.sleep(timeout)
return self._filterServices(list(self._remoteServices.values()), types, scopes)
|
[
"search",
"for",
"services",
"given",
"the",
"TYPES",
"and",
"SCOPES",
"in",
"a",
"given",
"TIMEOUT"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L555-L565
|
[
"def",
"searchServices",
"(",
"self",
",",
"types",
"=",
"None",
",",
"scopes",
"=",
"None",
",",
"timeout",
"=",
"3",
")",
":",
"if",
"not",
"self",
".",
"_serverStarted",
":",
"raise",
"Exception",
"(",
"\"Server not started\"",
")",
"self",
".",
"_sendProbe",
"(",
"types",
",",
"scopes",
")",
"time",
".",
"sleep",
"(",
"timeout",
")",
"return",
"self",
".",
"_filterServices",
"(",
"list",
"(",
"self",
".",
"_remoteServices",
".",
"values",
"(",
")",
")",
",",
"types",
",",
"scopes",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
WSDiscovery.publishService
|
Publish a service with the given TYPES, SCOPES and XAddrs (service addresses)
if xAddrs contains item, which includes {ip} pattern, one item per IP addres will be sent
|
wsdiscovery/daemon.py
|
def publishService(self, types, scopes, xAddrs):
"""Publish a service with the given TYPES, SCOPES and XAddrs (service addresses)
if xAddrs contains item, which includes {ip} pattern, one item per IP addres will be sent
"""
if not self._serverStarted:
raise Exception("Server not started")
instanceId = _generateInstanceId()
service = Service(types, scopes, xAddrs, self.uuid, instanceId)
self._localServices[self.uuid] = service
self._sendHello(service)
time.sleep(0.001)
|
def publishService(self, types, scopes, xAddrs):
"""Publish a service with the given TYPES, SCOPES and XAddrs (service addresses)
if xAddrs contains item, which includes {ip} pattern, one item per IP addres will be sent
"""
if not self._serverStarted:
raise Exception("Server not started")
instanceId = _generateInstanceId()
service = Service(types, scopes, xAddrs, self.uuid, instanceId)
self._localServices[self.uuid] = service
self._sendHello(service)
time.sleep(0.001)
|
[
"Publish",
"a",
"service",
"with",
"the",
"given",
"TYPES",
"SCOPES",
"and",
"XAddrs",
"(",
"service",
"addresses",
")"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/daemon.py#L567-L582
|
[
"def",
"publishService",
"(",
"self",
",",
"types",
",",
"scopes",
",",
"xAddrs",
")",
":",
"if",
"not",
"self",
".",
"_serverStarted",
":",
"raise",
"Exception",
"(",
"\"Server not started\"",
")",
"instanceId",
"=",
"_generateInstanceId",
"(",
")",
"service",
"=",
"Service",
"(",
"types",
",",
"scopes",
",",
"xAddrs",
",",
"self",
".",
"uuid",
",",
"instanceId",
")",
"self",
".",
"_localServices",
"[",
"self",
".",
"uuid",
"]",
"=",
"service",
"self",
".",
"_sendHello",
"(",
"service",
")",
"time",
".",
"sleep",
"(",
"0.001",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
createSOAPMessage
|
construct a a raw SOAP XML string, given a prepared SoapEnvelope object
|
wsdiscovery/message.py
|
def createSOAPMessage(env):
"construct a a raw SOAP XML string, given a prepared SoapEnvelope object"
if env.getAction() == ACTION_PROBE:
return createProbeMessage(env)
if env.getAction() == ACTION_PROBE_MATCH:
return createProbeMatchMessage(env)
if env.getAction() == ACTION_RESOLVE:
return createResolveMessage(env)
if env.getAction() == ACTION_RESOLVE_MATCH:
return createResolveMatchMessage(env)
if env.getAction() == ACTION_HELLO:
return createHelloMessage(env)
if env.getAction() == ACTION_BYE:
return createByeMessage(env)
|
def createSOAPMessage(env):
"construct a a raw SOAP XML string, given a prepared SoapEnvelope object"
if env.getAction() == ACTION_PROBE:
return createProbeMessage(env)
if env.getAction() == ACTION_PROBE_MATCH:
return createProbeMatchMessage(env)
if env.getAction() == ACTION_RESOLVE:
return createResolveMessage(env)
if env.getAction() == ACTION_RESOLVE_MATCH:
return createResolveMatchMessage(env)
if env.getAction() == ACTION_HELLO:
return createHelloMessage(env)
if env.getAction() == ACTION_BYE:
return createByeMessage(env)
|
[
"construct",
"a",
"a",
"raw",
"SOAP",
"XML",
"string",
"given",
"a",
"prepared",
"SoapEnvelope",
"object"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/message.py#L11-L24
|
[
"def",
"createSOAPMessage",
"(",
"env",
")",
":",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_PROBE",
":",
"return",
"createProbeMessage",
"(",
"env",
")",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_PROBE_MATCH",
":",
"return",
"createProbeMatchMessage",
"(",
"env",
")",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_RESOLVE",
":",
"return",
"createResolveMessage",
"(",
"env",
")",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_RESOLVE_MATCH",
":",
"return",
"createResolveMatchMessage",
"(",
"env",
")",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_HELLO",
":",
"return",
"createHelloMessage",
"(",
"env",
")",
"if",
"env",
".",
"getAction",
"(",
")",
"==",
"ACTION_BYE",
":",
"return",
"createByeMessage",
"(",
"env",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
parseSOAPMessage
|
parse raw XML data string, return a (minidom) xml document
|
wsdiscovery/message.py
|
def parseSOAPMessage(data, ipAddr):
"parse raw XML data string, return a (minidom) xml document"
try:
dom = minidom.parseString(data)
except Exception:
#print('Failed to parse message from %s\n"%s": %s' % (ipAddr, data, ex), file=sys.stderr)
return None
if dom.getElementsByTagNameNS(NS_S, "Fault"):
#print('Fault received from %s:' % (ipAddr, data), file=sys.stderr)
return None
soapAction = dom.getElementsByTagNameNS(NS_A, "Action")[0].firstChild.data.strip()
if soapAction == ACTION_PROBE:
return parseProbeMessage(dom)
elif soapAction == ACTION_PROBE_MATCH:
return parseProbeMatchMessage(dom)
elif soapAction == ACTION_RESOLVE:
return parseResolveMessage(dom)
elif soapAction == ACTION_RESOLVE_MATCH:
return parseResolveMatchMessage(dom)
elif soapAction == ACTION_BYE:
return parseByeMessage(dom)
elif soapAction == ACTION_HELLO:
return parseHelloMessage(dom)
|
def parseSOAPMessage(data, ipAddr):
"parse raw XML data string, return a (minidom) xml document"
try:
dom = minidom.parseString(data)
except Exception:
#print('Failed to parse message from %s\n"%s": %s' % (ipAddr, data, ex), file=sys.stderr)
return None
if dom.getElementsByTagNameNS(NS_S, "Fault"):
#print('Fault received from %s:' % (ipAddr, data), file=sys.stderr)
return None
soapAction = dom.getElementsByTagNameNS(NS_A, "Action")[0].firstChild.data.strip()
if soapAction == ACTION_PROBE:
return parseProbeMessage(dom)
elif soapAction == ACTION_PROBE_MATCH:
return parseProbeMatchMessage(dom)
elif soapAction == ACTION_RESOLVE:
return parseResolveMessage(dom)
elif soapAction == ACTION_RESOLVE_MATCH:
return parseResolveMatchMessage(dom)
elif soapAction == ACTION_BYE:
return parseByeMessage(dom)
elif soapAction == ACTION_HELLO:
return parseHelloMessage(dom)
|
[
"parse",
"raw",
"XML",
"data",
"string",
"return",
"a",
"(",
"minidom",
")",
"xml",
"document"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/message.py#L27-L52
|
[
"def",
"parseSOAPMessage",
"(",
"data",
",",
"ipAddr",
")",
":",
"try",
":",
"dom",
"=",
"minidom",
".",
"parseString",
"(",
"data",
")",
"except",
"Exception",
":",
"#print('Failed to parse message from %s\\n\"%s\": %s' % (ipAddr, data, ex), file=sys.stderr)",
"return",
"None",
"if",
"dom",
".",
"getElementsByTagNameNS",
"(",
"NS_S",
",",
"\"Fault\"",
")",
":",
"#print('Fault received from %s:' % (ipAddr, data), file=sys.stderr)",
"return",
"None",
"soapAction",
"=",
"dom",
".",
"getElementsByTagNameNS",
"(",
"NS_A",
",",
"\"Action\"",
")",
"[",
"0",
"]",
".",
"firstChild",
".",
"data",
".",
"strip",
"(",
")",
"if",
"soapAction",
"==",
"ACTION_PROBE",
":",
"return",
"parseProbeMessage",
"(",
"dom",
")",
"elif",
"soapAction",
"==",
"ACTION_PROBE_MATCH",
":",
"return",
"parseProbeMatchMessage",
"(",
"dom",
")",
"elif",
"soapAction",
"==",
"ACTION_RESOLVE",
":",
"return",
"parseResolveMessage",
"(",
"dom",
")",
"elif",
"soapAction",
"==",
"ACTION_RESOLVE_MATCH",
":",
"return",
"parseResolveMatchMessage",
"(",
"dom",
")",
"elif",
"soapAction",
"==",
"ACTION_BYE",
":",
"return",
"parseByeMessage",
"(",
"dom",
")",
"elif",
"soapAction",
"==",
"ACTION_HELLO",
":",
"return",
"parseHelloMessage",
"(",
"dom",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
discover
|
Discover systems using WS-Discovery
|
wsdiscovery/cmdline.py
|
def discover(scope, loglevel, capture):
"Discover systems using WS-Discovery"
if loglevel:
level = getattr(logging, loglevel, None)
if not level:
print("Invalid log level '%s'" % loglevel)
return
logger.setLevel(level)
run(scope=scope, capture=capture)
|
def discover(scope, loglevel, capture):
"Discover systems using WS-Discovery"
if loglevel:
level = getattr(logging, loglevel, None)
if not level:
print("Invalid log level '%s'" % loglevel)
return
logger.setLevel(level)
run(scope=scope, capture=capture)
|
[
"Discover",
"systems",
"using",
"WS",
"-",
"Discovery"
] |
andreikop/python-ws-discovery
|
python
|
https://github.com/andreikop/python-ws-discovery/blob/a7b852cf43115c6f986e509b1870d6963e76687f/wsdiscovery/cmdline.py#L64-L74
|
[
"def",
"discover",
"(",
"scope",
",",
"loglevel",
",",
"capture",
")",
":",
"if",
"loglevel",
":",
"level",
"=",
"getattr",
"(",
"logging",
",",
"loglevel",
",",
"None",
")",
"if",
"not",
"level",
":",
"print",
"(",
"\"Invalid log level '%s'\"",
"%",
"loglevel",
")",
"return",
"logger",
".",
"setLevel",
"(",
"level",
")",
"run",
"(",
"scope",
"=",
"scope",
",",
"capture",
"=",
"capture",
")"
] |
a7b852cf43115c6f986e509b1870d6963e76687f
|
test
|
_ClusterTaggableManager.get_tagged_item_manager
|
Return the manager that handles the relation from this instance to the tagged_item class.
If content_object on the tagged_item class is defined as a ParentalKey, this will be a
DeferringRelatedManager which allows writing related objects without committing them
to the database.
|
modelcluster/contrib/taggit.py
|
def get_tagged_item_manager(self):
"""Return the manager that handles the relation from this instance to the tagged_item class.
If content_object on the tagged_item class is defined as a ParentalKey, this will be a
DeferringRelatedManager which allows writing related objects without committing them
to the database.
"""
rel_name = self.through._meta.get_field('content_object').remote_field.get_accessor_name()
return getattr(self.instance, rel_name)
|
def get_tagged_item_manager(self):
"""Return the manager that handles the relation from this instance to the tagged_item class.
If content_object on the tagged_item class is defined as a ParentalKey, this will be a
DeferringRelatedManager which allows writing related objects without committing them
to the database.
"""
rel_name = self.through._meta.get_field('content_object').remote_field.get_accessor_name()
return getattr(self.instance, rel_name)
|
[
"Return",
"the",
"manager",
"that",
"handles",
"the",
"relation",
"from",
"this",
"instance",
"to",
"the",
"tagged_item",
"class",
".",
"If",
"content_object",
"on",
"the",
"tagged_item",
"class",
"is",
"defined",
"as",
"a",
"ParentalKey",
"this",
"will",
"be",
"a",
"DeferringRelatedManager",
"which",
"allows",
"writing",
"related",
"objects",
"without",
"committing",
"them",
"to",
"the",
"database",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/contrib/taggit.py#L17-L24
|
[
"def",
"get_tagged_item_manager",
"(",
"self",
")",
":",
"rel_name",
"=",
"self",
".",
"through",
".",
"_meta",
".",
"get_field",
"(",
"'content_object'",
")",
".",
"remote_field",
".",
"get_accessor_name",
"(",
")",
"return",
"getattr",
"(",
"self",
".",
"instance",
",",
"rel_name",
")"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
get_serializable_data_for_fields
|
Return a serialised version of the model's fields which exist as local database
columns (i.e. excluding m2m and incoming foreign key relations)
|
modelcluster/models.py
|
def get_serializable_data_for_fields(model):
"""
Return a serialised version of the model's fields which exist as local database
columns (i.e. excluding m2m and incoming foreign key relations)
"""
pk_field = model._meta.pk
# If model is a child via multitable inheritance, use parent's pk
while pk_field.remote_field and pk_field.remote_field.parent_link:
pk_field = pk_field.remote_field.model._meta.pk
obj = {'pk': get_field_value(pk_field, model)}
for field in model._meta.fields:
if field.serialize:
obj[field.name] = get_field_value(field, model)
return obj
|
def get_serializable_data_for_fields(model):
"""
Return a serialised version of the model's fields which exist as local database
columns (i.e. excluding m2m and incoming foreign key relations)
"""
pk_field = model._meta.pk
# If model is a child via multitable inheritance, use parent's pk
while pk_field.remote_field and pk_field.remote_field.parent_link:
pk_field = pk_field.remote_field.model._meta.pk
obj = {'pk': get_field_value(pk_field, model)}
for field in model._meta.fields:
if field.serialize:
obj[field.name] = get_field_value(field, model)
return obj
|
[
"Return",
"a",
"serialised",
"version",
"of",
"the",
"model",
"s",
"fields",
"which",
"exist",
"as",
"local",
"database",
"columns",
"(",
"i",
".",
"e",
".",
"excluding",
"m2m",
"and",
"incoming",
"foreign",
"key",
"relations",
")"
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/models.py#L38-L54
|
[
"def",
"get_serializable_data_for_fields",
"(",
"model",
")",
":",
"pk_field",
"=",
"model",
".",
"_meta",
".",
"pk",
"# If model is a child via multitable inheritance, use parent's pk",
"while",
"pk_field",
".",
"remote_field",
"and",
"pk_field",
".",
"remote_field",
".",
"parent_link",
":",
"pk_field",
"=",
"pk_field",
".",
"remote_field",
".",
"model",
".",
"_meta",
".",
"pk",
"obj",
"=",
"{",
"'pk'",
":",
"get_field_value",
"(",
"pk_field",
",",
"model",
")",
"}",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"fields",
":",
"if",
"field",
".",
"serialize",
":",
"obj",
"[",
"field",
".",
"name",
"]",
"=",
"get_field_value",
"(",
"field",
",",
"model",
")",
"return",
"obj"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
get_all_child_relations
|
Return a list of RelatedObject records for child relations of the given model,
including ones attached to ancestors of the model
|
modelcluster/models.py
|
def get_all_child_relations(model):
"""
Return a list of RelatedObject records for child relations of the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field.remote_field, ParentalKey)
]
|
def get_all_child_relations(model):
"""
Return a list of RelatedObject records for child relations of the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field.remote_field, ParentalKey)
]
|
[
"Return",
"a",
"list",
"of",
"RelatedObject",
"records",
"for",
"child",
"relations",
"of",
"the",
"given",
"model",
"including",
"ones",
"attached",
"to",
"ancestors",
"of",
"the",
"model"
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/models.py#L124-L132
|
[
"def",
"get_all_child_relations",
"(",
"model",
")",
":",
"return",
"[",
"field",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"get_fields",
"(",
")",
"if",
"isinstance",
"(",
"field",
".",
"remote_field",
",",
"ParentalKey",
")",
"]"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
get_all_child_m2m_relations
|
Return a list of ParentalManyToManyFields on the given model,
including ones attached to ancestors of the model
|
modelcluster/models.py
|
def get_all_child_m2m_relations(model):
"""
Return a list of ParentalManyToManyFields on the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field, ParentalManyToManyField)
]
|
def get_all_child_m2m_relations(model):
"""
Return a list of ParentalManyToManyFields on the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field, ParentalManyToManyField)
]
|
[
"Return",
"a",
"list",
"of",
"ParentalManyToManyFields",
"on",
"the",
"given",
"model",
"including",
"ones",
"attached",
"to",
"ancestors",
"of",
"the",
"model"
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/models.py#L135-L143
|
[
"def",
"get_all_child_m2m_relations",
"(",
"model",
")",
":",
"return",
"[",
"field",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"get_fields",
"(",
")",
"if",
"isinstance",
"(",
"field",
",",
"ParentalManyToManyField",
")",
"]"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
ClusterableModel.save
|
Save the model and commit all child relations.
|
modelcluster/models.py
|
def save(self, **kwargs):
"""
Save the model and commit all child relations.
"""
child_relation_names = [rel.get_accessor_name() for rel in get_all_child_relations(self)]
child_m2m_field_names = [field.name for field in get_all_child_m2m_relations(self)]
update_fields = kwargs.pop('update_fields', None)
if update_fields is None:
real_update_fields = None
relations_to_commit = child_relation_names
m2m_fields_to_commit = child_m2m_field_names
else:
real_update_fields = []
relations_to_commit = []
m2m_fields_to_commit = []
for field in update_fields:
if field in child_relation_names:
relations_to_commit.append(field)
elif field in child_m2m_field_names:
m2m_fields_to_commit.append(field)
else:
real_update_fields.append(field)
super(ClusterableModel, self).save(update_fields=real_update_fields, **kwargs)
for relation in relations_to_commit:
getattr(self, relation).commit()
for field in m2m_fields_to_commit:
getattr(self, field).commit()
|
def save(self, **kwargs):
"""
Save the model and commit all child relations.
"""
child_relation_names = [rel.get_accessor_name() for rel in get_all_child_relations(self)]
child_m2m_field_names = [field.name for field in get_all_child_m2m_relations(self)]
update_fields = kwargs.pop('update_fields', None)
if update_fields is None:
real_update_fields = None
relations_to_commit = child_relation_names
m2m_fields_to_commit = child_m2m_field_names
else:
real_update_fields = []
relations_to_commit = []
m2m_fields_to_commit = []
for field in update_fields:
if field in child_relation_names:
relations_to_commit.append(field)
elif field in child_m2m_field_names:
m2m_fields_to_commit.append(field)
else:
real_update_fields.append(field)
super(ClusterableModel, self).save(update_fields=real_update_fields, **kwargs)
for relation in relations_to_commit:
getattr(self, relation).commit()
for field in m2m_fields_to_commit:
getattr(self, field).commit()
|
[
"Save",
"the",
"model",
"and",
"commit",
"all",
"child",
"relations",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/models.py#L172-L202
|
[
"def",
"save",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"child_relation_names",
"=",
"[",
"rel",
".",
"get_accessor_name",
"(",
")",
"for",
"rel",
"in",
"get_all_child_relations",
"(",
"self",
")",
"]",
"child_m2m_field_names",
"=",
"[",
"field",
".",
"name",
"for",
"field",
"in",
"get_all_child_m2m_relations",
"(",
"self",
")",
"]",
"update_fields",
"=",
"kwargs",
".",
"pop",
"(",
"'update_fields'",
",",
"None",
")",
"if",
"update_fields",
"is",
"None",
":",
"real_update_fields",
"=",
"None",
"relations_to_commit",
"=",
"child_relation_names",
"m2m_fields_to_commit",
"=",
"child_m2m_field_names",
"else",
":",
"real_update_fields",
"=",
"[",
"]",
"relations_to_commit",
"=",
"[",
"]",
"m2m_fields_to_commit",
"=",
"[",
"]",
"for",
"field",
"in",
"update_fields",
":",
"if",
"field",
"in",
"child_relation_names",
":",
"relations_to_commit",
".",
"append",
"(",
"field",
")",
"elif",
"field",
"in",
"child_m2m_field_names",
":",
"m2m_fields_to_commit",
".",
"append",
"(",
"field",
")",
"else",
":",
"real_update_fields",
".",
"append",
"(",
"field",
")",
"super",
"(",
"ClusterableModel",
",",
"self",
")",
".",
"save",
"(",
"update_fields",
"=",
"real_update_fields",
",",
"*",
"*",
"kwargs",
")",
"for",
"relation",
"in",
"relations_to_commit",
":",
"getattr",
"(",
"self",
",",
"relation",
")",
".",
"commit",
"(",
")",
"for",
"field",
"in",
"m2m_fields_to_commit",
":",
"getattr",
"(",
"self",
",",
"field",
")",
".",
"commit",
"(",
")"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
ClusterableModel.from_serializable_data
|
Build an instance of this model from the JSON-like structure passed in,
recursing into related objects as required.
If check_fks is true, it will check whether referenced foreign keys still
exist in the database.
- dangling foreign keys on related objects are dealt with by either nullifying the key or
dropping the related object, according to the 'on_delete' setting.
- dangling foreign keys on the base object will be nullified, unless strict_fks is true,
in which case any dangling foreign keys with on_delete=CASCADE will cause None to be
returned for the entire object.
|
modelcluster/models.py
|
def from_serializable_data(cls, data, check_fks=True, strict_fks=False):
"""
Build an instance of this model from the JSON-like structure passed in,
recursing into related objects as required.
If check_fks is true, it will check whether referenced foreign keys still
exist in the database.
- dangling foreign keys on related objects are dealt with by either nullifying the key or
dropping the related object, according to the 'on_delete' setting.
- dangling foreign keys on the base object will be nullified, unless strict_fks is true,
in which case any dangling foreign keys with on_delete=CASCADE will cause None to be
returned for the entire object.
"""
obj = model_from_serializable_data(cls, data, check_fks=check_fks, strict_fks=strict_fks)
if obj is None:
return None
child_relations = get_all_child_relations(cls)
for rel in child_relations:
rel_name = rel.get_accessor_name()
try:
child_data_list = data[rel_name]
except KeyError:
continue
related_model = rel.related_model
if hasattr(related_model, 'from_serializable_data'):
children = [
related_model.from_serializable_data(child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
else:
children = [
model_from_serializable_data(related_model, child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
children = filter(lambda child: child is not None, children)
setattr(obj, rel_name, children)
return obj
|
def from_serializable_data(cls, data, check_fks=True, strict_fks=False):
"""
Build an instance of this model from the JSON-like structure passed in,
recursing into related objects as required.
If check_fks is true, it will check whether referenced foreign keys still
exist in the database.
- dangling foreign keys on related objects are dealt with by either nullifying the key or
dropping the related object, according to the 'on_delete' setting.
- dangling foreign keys on the base object will be nullified, unless strict_fks is true,
in which case any dangling foreign keys with on_delete=CASCADE will cause None to be
returned for the entire object.
"""
obj = model_from_serializable_data(cls, data, check_fks=check_fks, strict_fks=strict_fks)
if obj is None:
return None
child_relations = get_all_child_relations(cls)
for rel in child_relations:
rel_name = rel.get_accessor_name()
try:
child_data_list = data[rel_name]
except KeyError:
continue
related_model = rel.related_model
if hasattr(related_model, 'from_serializable_data'):
children = [
related_model.from_serializable_data(child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
else:
children = [
model_from_serializable_data(related_model, child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
children = filter(lambda child: child is not None, children)
setattr(obj, rel_name, children)
return obj
|
[
"Build",
"an",
"instance",
"of",
"this",
"model",
"from",
"the",
"JSON",
"-",
"like",
"structure",
"passed",
"in",
"recursing",
"into",
"related",
"objects",
"as",
"required",
".",
"If",
"check_fks",
"is",
"true",
"it",
"will",
"check",
"whether",
"referenced",
"foreign",
"keys",
"still",
"exist",
"in",
"the",
"database",
".",
"-",
"dangling",
"foreign",
"keys",
"on",
"related",
"objects",
"are",
"dealt",
"with",
"by",
"either",
"nullifying",
"the",
"key",
"or",
"dropping",
"the",
"related",
"object",
"according",
"to",
"the",
"on_delete",
"setting",
".",
"-",
"dangling",
"foreign",
"keys",
"on",
"the",
"base",
"object",
"will",
"be",
"nullified",
"unless",
"strict_fks",
"is",
"true",
"in",
"which",
"case",
"any",
"dangling",
"foreign",
"keys",
"with",
"on_delete",
"=",
"CASCADE",
"will",
"cause",
"None",
"to",
"be",
"returned",
"for",
"the",
"entire",
"object",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/models.py#L227-L268
|
[
"def",
"from_serializable_data",
"(",
"cls",
",",
"data",
",",
"check_fks",
"=",
"True",
",",
"strict_fks",
"=",
"False",
")",
":",
"obj",
"=",
"model_from_serializable_data",
"(",
"cls",
",",
"data",
",",
"check_fks",
"=",
"check_fks",
",",
"strict_fks",
"=",
"strict_fks",
")",
"if",
"obj",
"is",
"None",
":",
"return",
"None",
"child_relations",
"=",
"get_all_child_relations",
"(",
"cls",
")",
"for",
"rel",
"in",
"child_relations",
":",
"rel_name",
"=",
"rel",
".",
"get_accessor_name",
"(",
")",
"try",
":",
"child_data_list",
"=",
"data",
"[",
"rel_name",
"]",
"except",
"KeyError",
":",
"continue",
"related_model",
"=",
"rel",
".",
"related_model",
"if",
"hasattr",
"(",
"related_model",
",",
"'from_serializable_data'",
")",
":",
"children",
"=",
"[",
"related_model",
".",
"from_serializable_data",
"(",
"child_data",
",",
"check_fks",
"=",
"check_fks",
",",
"strict_fks",
"=",
"True",
")",
"for",
"child_data",
"in",
"child_data_list",
"]",
"else",
":",
"children",
"=",
"[",
"model_from_serializable_data",
"(",
"related_model",
",",
"child_data",
",",
"check_fks",
"=",
"check_fks",
",",
"strict_fks",
"=",
"True",
")",
"for",
"child_data",
"in",
"child_data_list",
"]",
"children",
"=",
"filter",
"(",
"lambda",
"child",
":",
"child",
"is",
"not",
"None",
",",
"children",
")",
"setattr",
"(",
"obj",
",",
"rel_name",
",",
"children",
")",
"return",
"obj"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
BaseChildFormSet.validate_unique
|
This clean method will check for unique_together condition
|
modelcluster/forms.py
|
def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors)
|
def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors)
|
[
"This",
"clean",
"method",
"will",
"check",
"for",
"unique_together",
"condition"
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/forms.py#L125-L165
|
[
"def",
"validate_unique",
"(",
"self",
")",
":",
"# Collect unique_checks and to run from all the forms.",
"all_unique_checks",
"=",
"set",
"(",
")",
"all_date_checks",
"=",
"set",
"(",
")",
"forms_to_delete",
"=",
"self",
".",
"deleted_forms",
"valid_forms",
"=",
"[",
"form",
"for",
"form",
"in",
"self",
".",
"forms",
"if",
"form",
".",
"is_valid",
"(",
")",
"and",
"form",
"not",
"in",
"forms_to_delete",
"]",
"for",
"form",
"in",
"valid_forms",
":",
"unique_checks",
",",
"date_checks",
"=",
"form",
".",
"instance",
".",
"_get_unique_checks",
"(",
")",
"all_unique_checks",
".",
"update",
"(",
"unique_checks",
")",
"all_date_checks",
".",
"update",
"(",
"date_checks",
")",
"errors",
"=",
"[",
"]",
"# Do each of the unique checks (unique and unique_together)",
"for",
"uclass",
",",
"unique_check",
"in",
"all_unique_checks",
":",
"seen_data",
"=",
"set",
"(",
")",
"for",
"form",
"in",
"valid_forms",
":",
"# Get the data for the set of fields that must be unique among the forms.",
"row_data",
"=",
"(",
"field",
"if",
"field",
"in",
"self",
".",
"unique_fields",
"else",
"form",
".",
"cleaned_data",
"[",
"field",
"]",
"for",
"field",
"in",
"unique_check",
"if",
"field",
"in",
"form",
".",
"cleaned_data",
")",
"# Reduce Model instances to their primary key values",
"row_data",
"=",
"tuple",
"(",
"d",
".",
"_get_pk_val",
"(",
")",
"if",
"hasattr",
"(",
"d",
",",
"'_get_pk_val'",
")",
"else",
"d",
"for",
"d",
"in",
"row_data",
")",
"if",
"row_data",
"and",
"None",
"not",
"in",
"row_data",
":",
"# if we've already seen it then we have a uniqueness failure",
"if",
"row_data",
"in",
"seen_data",
":",
"# poke error messages into the right places and mark",
"# the form as invalid",
"errors",
".",
"append",
"(",
"self",
".",
"get_unique_error_message",
"(",
"unique_check",
")",
")",
"form",
".",
"_errors",
"[",
"NON_FIELD_ERRORS",
"]",
"=",
"self",
".",
"error_class",
"(",
"[",
"self",
".",
"get_form_error",
"(",
")",
"]",
")",
"# remove the data from the cleaned_data dict since it was invalid",
"for",
"field",
"in",
"unique_check",
":",
"if",
"field",
"in",
"form",
".",
"cleaned_data",
":",
"del",
"form",
".",
"cleaned_data",
"[",
"field",
"]",
"# mark the data as seen",
"seen_data",
".",
"add",
"(",
"row_data",
")",
"if",
"errors",
":",
"raise",
"ValidationError",
"(",
"errors",
")"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
ClusterForm.has_changed
|
Return True if data differs from initial.
|
modelcluster/forms.py
|
def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self.formsets.values():
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data)
|
def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self.formsets.values():
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data)
|
[
"Return",
"True",
"if",
"data",
"differs",
"from",
"initial",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/forms.py#L361-L371
|
[
"def",
"has_changed",
"(",
"self",
")",
":",
"# Need to recurse over nested formsets so that the form is saved if there are changes",
"# to child forms but not the parent",
"if",
"self",
".",
"formsets",
":",
"for",
"formset",
"in",
"self",
".",
"formsets",
".",
"values",
"(",
")",
":",
"for",
"form",
"in",
"formset",
".",
"forms",
":",
"if",
"form",
".",
"has_changed",
"(",
")",
":",
"return",
"True",
"return",
"bool",
"(",
"self",
".",
"changed_data",
")"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
create_deferring_foreign_related_manager
|
Create a DeferringRelatedManager class that wraps an ordinary RelatedManager
with 'deferring' behaviour: any updates to the object set (via e.g. add() or clear())
are written to a holding area rather than committed to the database immediately.
Writing to the database is deferred until the model is saved.
|
modelcluster/fields.py
|
def create_deferring_foreign_related_manager(related, original_manager_cls):
"""
Create a DeferringRelatedManager class that wraps an ordinary RelatedManager
with 'deferring' behaviour: any updates to the object set (via e.g. add() or clear())
are written to a holding area rather than committed to the database immediately.
Writing to the database is deferred until the model is saved.
"""
relation_name = related.get_accessor_name()
rel_field = related.field
rel_model = related.related_model
superclass = rel_model._default_manager.__class__
class DeferringRelatedManager(superclass):
def __init__(self, instance):
super(DeferringRelatedManager, self).__init__()
self.model = rel_model
self.instance = instance
def _get_cluster_related_objects(self):
# Helper to retrieve the instance's _cluster_related_objects dict,
# creating it if it does not already exist
try:
return self.instance._cluster_related_objects
except AttributeError:
cluster_related_objects = {}
self.instance._cluster_related_objects = cluster_related_objects
return cluster_related_objects
def get_live_query_set(self):
# deprecated; renamed to get_live_queryset to match the move from
# get_query_set to get_queryset in Django 1.6
return self.get_live_queryset()
def get_live_queryset(self):
"""
return the original manager's queryset, which reflects the live database
"""
return original_manager_cls(self.instance).get_queryset()
def get_queryset(self):
"""
return the current object set with any updates applied,
wrapped up in a FakeQuerySet if it doesn't match the database state
"""
try:
results = self.instance._cluster_related_objects[relation_name]
except (AttributeError, KeyError):
return self.get_live_queryset()
return FakeQuerySet(related.related_model, results)
def _apply_rel_filters(self, queryset):
# Implemented as empty for compatibility sake
# But there is probably a better implementation of this function
return queryset._next_is_sticky()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
db = self._db or router.db_for_read(self.model, instance=instances[0])
queryset = super(DeferringRelatedManager, self).get_queryset().using(db)
rel_obj_attr = rel_field.get_local_related_value
instance_attr = rel_field.get_foreign_related_value
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
query = {'%s__in' % rel_field.name: instances}
qs = queryset.filter(**query)
# Since we just bypassed this class' get_queryset(), we must manage
# the reverse relation manually.
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_field.name, instance)
cache_name = rel_field.related_query_name()
return qs, rel_obj_attr, instance_attr, False, cache_name, False
def get_object_list(self):
"""
return the mutable list that forms the current in-memory state of
this relation. If there is no such list (i.e. the manager is returning
querysets from the live database instead), one is created, populating it
with the live database state
"""
cluster_related_objects = self._get_cluster_related_objects()
try:
object_list = cluster_related_objects[relation_name]
except KeyError:
object_list = list(self.get_live_queryset())
cluster_related_objects[relation_name] = object_list
return object_list
def add(self, *new_items):
"""
Add the passed items to the stored object set, but do not commit them
to the database
"""
items = self.get_object_list()
for target in new_items:
item_matched = False
for i, item in enumerate(items):
if item == target:
# Replace the matched item with the new one. This ensures that any
# modifications to that item's fields take effect within the recordset -
# i.e. we can perform a virtual UPDATE to an object in the list
# by calling add(updated_object). Which is semantically a bit dubious,
# but it does the job...
items[i] = target
item_matched = True
break
if not item_matched:
items.append(target)
# update the foreign key on the added item to point back to the parent instance
setattr(target, related.field.name, self.instance)
# Sort list
if rel_model._meta.ordering and len(items) > 1:
sort_by_fields(items, rel_model._meta.ordering)
def remove(self, *items_to_remove):
"""
Remove the passed items from the stored object set, but do not commit the change
to the database
"""
items = self.get_object_list()
# filter items list in place: see http://stackoverflow.com/a/1208792/1853523
items[:] = [item for item in items if item not in items_to_remove]
def create(self, **kwargs):
items = self.get_object_list()
new_item = related.related_model(**kwargs)
items.append(new_item)
return new_item
def clear(self):
"""
Clear the stored object set, without affecting the database
"""
self.set([])
def set(self, objs, bulk=True, clear=False):
# cast objs to a list so that:
# 1) we can call len() on it (which we can't do on, say, a queryset)
# 2) if we need to sort it, we can do so without mutating the original
objs = list(objs)
cluster_related_objects = self._get_cluster_related_objects()
for obj in objs:
# update the foreign key on the added item to point back to the parent instance
setattr(obj, related.field.name, self.instance)
# Clone and sort the 'objs' list, if necessary
if rel_model._meta.ordering and len(objs) > 1:
sort_by_fields(objs, rel_model._meta.ordering)
cluster_related_objects[relation_name] = objs
def commit(self):
"""
Apply any changes made to the stored object set to the database.
Any objects removed from the initial set will be deleted entirely
from the database.
"""
if self.instance.pk is None:
raise IntegrityError("Cannot commit relation %r on an unsaved model" % relation_name)
try:
final_items = self.instance._cluster_related_objects[relation_name]
except (AttributeError, KeyError):
# _cluster_related_objects entry never created => no changes to make
return
original_manager = original_manager_cls(self.instance)
live_items = list(original_manager.get_queryset())
for item in live_items:
if item not in final_items:
item.delete()
for item in final_items:
# Django 1.9+ bulk updates items by default which assumes
# that they have already been saved to the database.
# Disable this behaviour.
# https://code.djangoproject.com/ticket/18556
# https://github.com/django/django/commit/adc0c4fbac98f9cb975e8fa8220323b2de638b46
original_manager.add(item, bulk=False)
# purge the _cluster_related_objects entry, so we switch back to live SQL
del self.instance._cluster_related_objects[relation_name]
return DeferringRelatedManager
|
def create_deferring_foreign_related_manager(related, original_manager_cls):
"""
Create a DeferringRelatedManager class that wraps an ordinary RelatedManager
with 'deferring' behaviour: any updates to the object set (via e.g. add() or clear())
are written to a holding area rather than committed to the database immediately.
Writing to the database is deferred until the model is saved.
"""
relation_name = related.get_accessor_name()
rel_field = related.field
rel_model = related.related_model
superclass = rel_model._default_manager.__class__
class DeferringRelatedManager(superclass):
def __init__(self, instance):
super(DeferringRelatedManager, self).__init__()
self.model = rel_model
self.instance = instance
def _get_cluster_related_objects(self):
# Helper to retrieve the instance's _cluster_related_objects dict,
# creating it if it does not already exist
try:
return self.instance._cluster_related_objects
except AttributeError:
cluster_related_objects = {}
self.instance._cluster_related_objects = cluster_related_objects
return cluster_related_objects
def get_live_query_set(self):
# deprecated; renamed to get_live_queryset to match the move from
# get_query_set to get_queryset in Django 1.6
return self.get_live_queryset()
def get_live_queryset(self):
"""
return the original manager's queryset, which reflects the live database
"""
return original_manager_cls(self.instance).get_queryset()
def get_queryset(self):
"""
return the current object set with any updates applied,
wrapped up in a FakeQuerySet if it doesn't match the database state
"""
try:
results = self.instance._cluster_related_objects[relation_name]
except (AttributeError, KeyError):
return self.get_live_queryset()
return FakeQuerySet(related.related_model, results)
def _apply_rel_filters(self, queryset):
# Implemented as empty for compatibility sake
# But there is probably a better implementation of this function
return queryset._next_is_sticky()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
db = self._db or router.db_for_read(self.model, instance=instances[0])
queryset = super(DeferringRelatedManager, self).get_queryset().using(db)
rel_obj_attr = rel_field.get_local_related_value
instance_attr = rel_field.get_foreign_related_value
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
query = {'%s__in' % rel_field.name: instances}
qs = queryset.filter(**query)
# Since we just bypassed this class' get_queryset(), we must manage
# the reverse relation manually.
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_field.name, instance)
cache_name = rel_field.related_query_name()
return qs, rel_obj_attr, instance_attr, False, cache_name, False
def get_object_list(self):
"""
return the mutable list that forms the current in-memory state of
this relation. If there is no such list (i.e. the manager is returning
querysets from the live database instead), one is created, populating it
with the live database state
"""
cluster_related_objects = self._get_cluster_related_objects()
try:
object_list = cluster_related_objects[relation_name]
except KeyError:
object_list = list(self.get_live_queryset())
cluster_related_objects[relation_name] = object_list
return object_list
def add(self, *new_items):
"""
Add the passed items to the stored object set, but do not commit them
to the database
"""
items = self.get_object_list()
for target in new_items:
item_matched = False
for i, item in enumerate(items):
if item == target:
# Replace the matched item with the new one. This ensures that any
# modifications to that item's fields take effect within the recordset -
# i.e. we can perform a virtual UPDATE to an object in the list
# by calling add(updated_object). Which is semantically a bit dubious,
# but it does the job...
items[i] = target
item_matched = True
break
if not item_matched:
items.append(target)
# update the foreign key on the added item to point back to the parent instance
setattr(target, related.field.name, self.instance)
# Sort list
if rel_model._meta.ordering and len(items) > 1:
sort_by_fields(items, rel_model._meta.ordering)
def remove(self, *items_to_remove):
"""
Remove the passed items from the stored object set, but do not commit the change
to the database
"""
items = self.get_object_list()
# filter items list in place: see http://stackoverflow.com/a/1208792/1853523
items[:] = [item for item in items if item not in items_to_remove]
def create(self, **kwargs):
items = self.get_object_list()
new_item = related.related_model(**kwargs)
items.append(new_item)
return new_item
def clear(self):
"""
Clear the stored object set, without affecting the database
"""
self.set([])
def set(self, objs, bulk=True, clear=False):
# cast objs to a list so that:
# 1) we can call len() on it (which we can't do on, say, a queryset)
# 2) if we need to sort it, we can do so without mutating the original
objs = list(objs)
cluster_related_objects = self._get_cluster_related_objects()
for obj in objs:
# update the foreign key on the added item to point back to the parent instance
setattr(obj, related.field.name, self.instance)
# Clone and sort the 'objs' list, if necessary
if rel_model._meta.ordering and len(objs) > 1:
sort_by_fields(objs, rel_model._meta.ordering)
cluster_related_objects[relation_name] = objs
def commit(self):
"""
Apply any changes made to the stored object set to the database.
Any objects removed from the initial set will be deleted entirely
from the database.
"""
if self.instance.pk is None:
raise IntegrityError("Cannot commit relation %r on an unsaved model" % relation_name)
try:
final_items = self.instance._cluster_related_objects[relation_name]
except (AttributeError, KeyError):
# _cluster_related_objects entry never created => no changes to make
return
original_manager = original_manager_cls(self.instance)
live_items = list(original_manager.get_queryset())
for item in live_items:
if item not in final_items:
item.delete()
for item in final_items:
# Django 1.9+ bulk updates items by default which assumes
# that they have already been saved to the database.
# Disable this behaviour.
# https://code.djangoproject.com/ticket/18556
# https://github.com/django/django/commit/adc0c4fbac98f9cb975e8fa8220323b2de638b46
original_manager.add(item, bulk=False)
# purge the _cluster_related_objects entry, so we switch back to live SQL
del self.instance._cluster_related_objects[relation_name]
return DeferringRelatedManager
|
[
"Create",
"a",
"DeferringRelatedManager",
"class",
"that",
"wraps",
"an",
"ordinary",
"RelatedManager",
"with",
"deferring",
"behaviour",
":",
"any",
"updates",
"to",
"the",
"object",
"set",
"(",
"via",
"e",
".",
"g",
".",
"add",
"()",
"or",
"clear",
"()",
")",
"are",
"written",
"to",
"a",
"holding",
"area",
"rather",
"than",
"committed",
"to",
"the",
"database",
"immediately",
".",
"Writing",
"to",
"the",
"database",
"is",
"deferred",
"until",
"the",
"model",
"is",
"saved",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/fields.py#L17-L212
|
[
"def",
"create_deferring_foreign_related_manager",
"(",
"related",
",",
"original_manager_cls",
")",
":",
"relation_name",
"=",
"related",
".",
"get_accessor_name",
"(",
")",
"rel_field",
"=",
"related",
".",
"field",
"rel_model",
"=",
"related",
".",
"related_model",
"superclass",
"=",
"rel_model",
".",
"_default_manager",
".",
"__class__",
"class",
"DeferringRelatedManager",
"(",
"superclass",
")",
":",
"def",
"__init__",
"(",
"self",
",",
"instance",
")",
":",
"super",
"(",
"DeferringRelatedManager",
",",
"self",
")",
".",
"__init__",
"(",
")",
"self",
".",
"model",
"=",
"rel_model",
"self",
".",
"instance",
"=",
"instance",
"def",
"_get_cluster_related_objects",
"(",
"self",
")",
":",
"# Helper to retrieve the instance's _cluster_related_objects dict,",
"# creating it if it does not already exist",
"try",
":",
"return",
"self",
".",
"instance",
".",
"_cluster_related_objects",
"except",
"AttributeError",
":",
"cluster_related_objects",
"=",
"{",
"}",
"self",
".",
"instance",
".",
"_cluster_related_objects",
"=",
"cluster_related_objects",
"return",
"cluster_related_objects",
"def",
"get_live_query_set",
"(",
"self",
")",
":",
"# deprecated; renamed to get_live_queryset to match the move from",
"# get_query_set to get_queryset in Django 1.6",
"return",
"self",
".",
"get_live_queryset",
"(",
")",
"def",
"get_live_queryset",
"(",
"self",
")",
":",
"\"\"\"\n return the original manager's queryset, which reflects the live database\n \"\"\"",
"return",
"original_manager_cls",
"(",
"self",
".",
"instance",
")",
".",
"get_queryset",
"(",
")",
"def",
"get_queryset",
"(",
"self",
")",
":",
"\"\"\"\n return the current object set with any updates applied,\n wrapped up in a FakeQuerySet if it doesn't match the database state\n \"\"\"",
"try",
":",
"results",
"=",
"self",
".",
"instance",
".",
"_cluster_related_objects",
"[",
"relation_name",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"return",
"self",
".",
"get_live_queryset",
"(",
")",
"return",
"FakeQuerySet",
"(",
"related",
".",
"related_model",
",",
"results",
")",
"def",
"_apply_rel_filters",
"(",
"self",
",",
"queryset",
")",
":",
"# Implemented as empty for compatibility sake",
"# But there is probably a better implementation of this function",
"return",
"queryset",
".",
"_next_is_sticky",
"(",
")",
"def",
"get_prefetch_queryset",
"(",
"self",
",",
"instances",
",",
"queryset",
"=",
"None",
")",
":",
"if",
"queryset",
"is",
"None",
":",
"db",
"=",
"self",
".",
"_db",
"or",
"router",
".",
"db_for_read",
"(",
"self",
".",
"model",
",",
"instance",
"=",
"instances",
"[",
"0",
"]",
")",
"queryset",
"=",
"super",
"(",
"DeferringRelatedManager",
",",
"self",
")",
".",
"get_queryset",
"(",
")",
".",
"using",
"(",
"db",
")",
"rel_obj_attr",
"=",
"rel_field",
".",
"get_local_related_value",
"instance_attr",
"=",
"rel_field",
".",
"get_foreign_related_value",
"instances_dict",
"=",
"dict",
"(",
"(",
"instance_attr",
"(",
"inst",
")",
",",
"inst",
")",
"for",
"inst",
"in",
"instances",
")",
"query",
"=",
"{",
"'%s__in'",
"%",
"rel_field",
".",
"name",
":",
"instances",
"}",
"qs",
"=",
"queryset",
".",
"filter",
"(",
"*",
"*",
"query",
")",
"# Since we just bypassed this class' get_queryset(), we must manage",
"# the reverse relation manually.",
"for",
"rel_obj",
"in",
"qs",
":",
"instance",
"=",
"instances_dict",
"[",
"rel_obj_attr",
"(",
"rel_obj",
")",
"]",
"setattr",
"(",
"rel_obj",
",",
"rel_field",
".",
"name",
",",
"instance",
")",
"cache_name",
"=",
"rel_field",
".",
"related_query_name",
"(",
")",
"return",
"qs",
",",
"rel_obj_attr",
",",
"instance_attr",
",",
"False",
",",
"cache_name",
",",
"False",
"def",
"get_object_list",
"(",
"self",
")",
":",
"\"\"\"\n return the mutable list that forms the current in-memory state of\n this relation. If there is no such list (i.e. the manager is returning\n querysets from the live database instead), one is created, populating it\n with the live database state\n \"\"\"",
"cluster_related_objects",
"=",
"self",
".",
"_get_cluster_related_objects",
"(",
")",
"try",
":",
"object_list",
"=",
"cluster_related_objects",
"[",
"relation_name",
"]",
"except",
"KeyError",
":",
"object_list",
"=",
"list",
"(",
"self",
".",
"get_live_queryset",
"(",
")",
")",
"cluster_related_objects",
"[",
"relation_name",
"]",
"=",
"object_list",
"return",
"object_list",
"def",
"add",
"(",
"self",
",",
"*",
"new_items",
")",
":",
"\"\"\"\n Add the passed items to the stored object set, but do not commit them\n to the database\n \"\"\"",
"items",
"=",
"self",
".",
"get_object_list",
"(",
")",
"for",
"target",
"in",
"new_items",
":",
"item_matched",
"=",
"False",
"for",
"i",
",",
"item",
"in",
"enumerate",
"(",
"items",
")",
":",
"if",
"item",
"==",
"target",
":",
"# Replace the matched item with the new one. This ensures that any",
"# modifications to that item's fields take effect within the recordset -",
"# i.e. we can perform a virtual UPDATE to an object in the list",
"# by calling add(updated_object). Which is semantically a bit dubious,",
"# but it does the job...",
"items",
"[",
"i",
"]",
"=",
"target",
"item_matched",
"=",
"True",
"break",
"if",
"not",
"item_matched",
":",
"items",
".",
"append",
"(",
"target",
")",
"# update the foreign key on the added item to point back to the parent instance",
"setattr",
"(",
"target",
",",
"related",
".",
"field",
".",
"name",
",",
"self",
".",
"instance",
")",
"# Sort list",
"if",
"rel_model",
".",
"_meta",
".",
"ordering",
"and",
"len",
"(",
"items",
")",
">",
"1",
":",
"sort_by_fields",
"(",
"items",
",",
"rel_model",
".",
"_meta",
".",
"ordering",
")",
"def",
"remove",
"(",
"self",
",",
"*",
"items_to_remove",
")",
":",
"\"\"\"\n Remove the passed items from the stored object set, but do not commit the change\n to the database\n \"\"\"",
"items",
"=",
"self",
".",
"get_object_list",
"(",
")",
"# filter items list in place: see http://stackoverflow.com/a/1208792/1853523",
"items",
"[",
":",
"]",
"=",
"[",
"item",
"for",
"item",
"in",
"items",
"if",
"item",
"not",
"in",
"items_to_remove",
"]",
"def",
"create",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"items",
"=",
"self",
".",
"get_object_list",
"(",
")",
"new_item",
"=",
"related",
".",
"related_model",
"(",
"*",
"*",
"kwargs",
")",
"items",
".",
"append",
"(",
"new_item",
")",
"return",
"new_item",
"def",
"clear",
"(",
"self",
")",
":",
"\"\"\"\n Clear the stored object set, without affecting the database\n \"\"\"",
"self",
".",
"set",
"(",
"[",
"]",
")",
"def",
"set",
"(",
"self",
",",
"objs",
",",
"bulk",
"=",
"True",
",",
"clear",
"=",
"False",
")",
":",
"# cast objs to a list so that:",
"# 1) we can call len() on it (which we can't do on, say, a queryset)",
"# 2) if we need to sort it, we can do so without mutating the original",
"objs",
"=",
"list",
"(",
"objs",
")",
"cluster_related_objects",
"=",
"self",
".",
"_get_cluster_related_objects",
"(",
")",
"for",
"obj",
"in",
"objs",
":",
"# update the foreign key on the added item to point back to the parent instance",
"setattr",
"(",
"obj",
",",
"related",
".",
"field",
".",
"name",
",",
"self",
".",
"instance",
")",
"# Clone and sort the 'objs' list, if necessary",
"if",
"rel_model",
".",
"_meta",
".",
"ordering",
"and",
"len",
"(",
"objs",
")",
">",
"1",
":",
"sort_by_fields",
"(",
"objs",
",",
"rel_model",
".",
"_meta",
".",
"ordering",
")",
"cluster_related_objects",
"[",
"relation_name",
"]",
"=",
"objs",
"def",
"commit",
"(",
"self",
")",
":",
"\"\"\"\n Apply any changes made to the stored object set to the database.\n Any objects removed from the initial set will be deleted entirely\n from the database.\n \"\"\"",
"if",
"self",
".",
"instance",
".",
"pk",
"is",
"None",
":",
"raise",
"IntegrityError",
"(",
"\"Cannot commit relation %r on an unsaved model\"",
"%",
"relation_name",
")",
"try",
":",
"final_items",
"=",
"self",
".",
"instance",
".",
"_cluster_related_objects",
"[",
"relation_name",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"# _cluster_related_objects entry never created => no changes to make",
"return",
"original_manager",
"=",
"original_manager_cls",
"(",
"self",
".",
"instance",
")",
"live_items",
"=",
"list",
"(",
"original_manager",
".",
"get_queryset",
"(",
")",
")",
"for",
"item",
"in",
"live_items",
":",
"if",
"item",
"not",
"in",
"final_items",
":",
"item",
".",
"delete",
"(",
")",
"for",
"item",
"in",
"final_items",
":",
"# Django 1.9+ bulk updates items by default which assumes",
"# that they have already been saved to the database.",
"# Disable this behaviour.",
"# https://code.djangoproject.com/ticket/18556",
"# https://github.com/django/django/commit/adc0c4fbac98f9cb975e8fa8220323b2de638b46",
"original_manager",
".",
"add",
"(",
"item",
",",
"bulk",
"=",
"False",
")",
"# purge the _cluster_related_objects entry, so we switch back to live SQL",
"del",
"self",
".",
"instance",
".",
"_cluster_related_objects",
"[",
"relation_name",
"]",
"return",
"DeferringRelatedManager"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
sort_by_fields
|
Sort a list of objects on the given fields. The field list works analogously to
queryset.order_by(*fields): each field is either a property of the object,
or is prefixed by '-' (e.g. '-name') to indicate reverse ordering.
|
modelcluster/utils.py
|
def sort_by_fields(items, fields):
"""
Sort a list of objects on the given fields. The field list works analogously to
queryset.order_by(*fields): each field is either a property of the object,
or is prefixed by '-' (e.g. '-name') to indicate reverse ordering.
"""
# To get the desired behaviour, we need to order by keys in reverse order
# See: https://docs.python.org/2/howto/sorting.html#sort-stability-and-complex-sorts
for key in reversed(fields):
# Check if this key has been reversed
reverse = False
if key[0] == '-':
reverse = True
key = key[1:]
# Sort
# Use a tuple of (v is not None, v) as the key, to ensure that None sorts before other values,
# as comparing directly with None breaks on python3
items.sort(key=lambda x: (getattr(x, key) is not None, getattr(x, key)), reverse=reverse)
|
def sort_by_fields(items, fields):
"""
Sort a list of objects on the given fields. The field list works analogously to
queryset.order_by(*fields): each field is either a property of the object,
or is prefixed by '-' (e.g. '-name') to indicate reverse ordering.
"""
# To get the desired behaviour, we need to order by keys in reverse order
# See: https://docs.python.org/2/howto/sorting.html#sort-stability-and-complex-sorts
for key in reversed(fields):
# Check if this key has been reversed
reverse = False
if key[0] == '-':
reverse = True
key = key[1:]
# Sort
# Use a tuple of (v is not None, v) as the key, to ensure that None sorts before other values,
# as comparing directly with None breaks on python3
items.sort(key=lambda x: (getattr(x, key) is not None, getattr(x, key)), reverse=reverse)
|
[
"Sort",
"a",
"list",
"of",
"objects",
"on",
"the",
"given",
"fields",
".",
"The",
"field",
"list",
"works",
"analogously",
"to",
"queryset",
".",
"order_by",
"(",
"*",
"fields",
")",
":",
"each",
"field",
"is",
"either",
"a",
"property",
"of",
"the",
"object",
"or",
"is",
"prefixed",
"by",
"-",
"(",
"e",
".",
"g",
".",
"-",
"name",
")",
"to",
"indicate",
"reverse",
"ordering",
"."
] |
wagtail/django-modelcluster
|
python
|
https://github.com/wagtail/django-modelcluster/blob/bfc8bd755af0ddd49e2aee2f2ca126921573d38b/modelcluster/utils.py#L1-L19
|
[
"def",
"sort_by_fields",
"(",
"items",
",",
"fields",
")",
":",
"# To get the desired behaviour, we need to order by keys in reverse order",
"# See: https://docs.python.org/2/howto/sorting.html#sort-stability-and-complex-sorts",
"for",
"key",
"in",
"reversed",
"(",
"fields",
")",
":",
"# Check if this key has been reversed",
"reverse",
"=",
"False",
"if",
"key",
"[",
"0",
"]",
"==",
"'-'",
":",
"reverse",
"=",
"True",
"key",
"=",
"key",
"[",
"1",
":",
"]",
"# Sort",
"# Use a tuple of (v is not None, v) as the key, to ensure that None sorts before other values,",
"# as comparing directly with None breaks on python3",
"items",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"(",
"getattr",
"(",
"x",
",",
"key",
")",
"is",
"not",
"None",
",",
"getattr",
"(",
"x",
",",
"key",
")",
")",
",",
"reverse",
"=",
"reverse",
")"
] |
bfc8bd755af0ddd49e2aee2f2ca126921573d38b
|
test
|
Address.with_valid_checksum
|
Returns the address with a valid checksum attached.
|
iota/types.py
|
def with_valid_checksum(self):
# type: () -> Address
"""
Returns the address with a valid checksum attached.
"""
return Address(
trytes=self.address + self._generate_checksum(),
# Make sure to copy all of the ancillary attributes, too!
balance=self.balance,
key_index=self.key_index,
security_level=self.security_level,
)
|
def with_valid_checksum(self):
# type: () -> Address
"""
Returns the address with a valid checksum attached.
"""
return Address(
trytes=self.address + self._generate_checksum(),
# Make sure to copy all of the ancillary attributes, too!
balance=self.balance,
key_index=self.key_index,
security_level=self.security_level,
)
|
[
"Returns",
"the",
"address",
"with",
"a",
"valid",
"checksum",
"attached",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/types.py#L844-L856
|
[
"def",
"with_valid_checksum",
"(",
"self",
")",
":",
"# type: () -> Address",
"return",
"Address",
"(",
"trytes",
"=",
"self",
".",
"address",
"+",
"self",
".",
"_generate_checksum",
"(",
")",
",",
"# Make sure to copy all of the ancillary attributes, too!",
"balance",
"=",
"self",
".",
"balance",
",",
"key_index",
"=",
"self",
".",
"key_index",
",",
"security_level",
"=",
"self",
".",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Address._generate_checksum
|
Generates the correct checksum for this address.
|
iota/types.py
|
def _generate_checksum(self):
# type: () -> AddressChecksum
"""
Generates the correct checksum for this address.
"""
checksum_trits = [] # type: MutableSequence[int]
sponge = Kerl()
sponge.absorb(self.address.as_trits())
sponge.squeeze(checksum_trits)
checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE
return AddressChecksum.from_trits(checksum_trits[-checksum_length:])
|
def _generate_checksum(self):
# type: () -> AddressChecksum
"""
Generates the correct checksum for this address.
"""
checksum_trits = [] # type: MutableSequence[int]
sponge = Kerl()
sponge.absorb(self.address.as_trits())
sponge.squeeze(checksum_trits)
checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE
return AddressChecksum.from_trits(checksum_trits[-checksum_length:])
|
[
"Generates",
"the",
"correct",
"checksum",
"for",
"this",
"address",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/types.py#L858-L871
|
[
"def",
"_generate_checksum",
"(",
"self",
")",
":",
"# type: () -> AddressChecksum",
"checksum_trits",
"=",
"[",
"]",
"# type: MutableSequence[int]",
"sponge",
"=",
"Kerl",
"(",
")",
"sponge",
".",
"absorb",
"(",
"self",
".",
"address",
".",
"as_trits",
"(",
")",
")",
"sponge",
".",
"squeeze",
"(",
"checksum_trits",
")",
"checksum_length",
"=",
"AddressChecksum",
".",
"LEN",
"*",
"TRITS_PER_TRYTE",
"return",
"AddressChecksum",
".",
"from_trits",
"(",
"checksum_trits",
"[",
"-",
"checksum_length",
":",
"]",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
IotaCommandLineApp.execute
|
Executes the command and (optionally) returns an exit code (used by
the shell to determine if the application exited cleanly).
:param api:
The API object used to communicate with the node.
:param arguments:
Command-line arguments parsed by the argument parser.
|
iota/bin/__init__.py
|
def execute(self, api, **arguments):
# type: (Iota, **Any) -> Optional[int]
"""
Executes the command and (optionally) returns an exit code (used by
the shell to determine if the application exited cleanly).
:param api:
The API object used to communicate with the node.
:param arguments:
Command-line arguments parsed by the argument parser.
"""
raise NotImplementedError(
'Not implemented in {cls}.'.format(cls=type(self).__name__),
)
|
def execute(self, api, **arguments):
# type: (Iota, **Any) -> Optional[int]
"""
Executes the command and (optionally) returns an exit code (used by
the shell to determine if the application exited cleanly).
:param api:
The API object used to communicate with the node.
:param arguments:
Command-line arguments parsed by the argument parser.
"""
raise NotImplementedError(
'Not implemented in {cls}.'.format(cls=type(self).__name__),
)
|
[
"Executes",
"the",
"command",
"and",
"(",
"optionally",
")",
"returns",
"an",
"exit",
"code",
"(",
"used",
"by",
"the",
"shell",
"to",
"determine",
"if",
"the",
"application",
"exited",
"cleanly",
")",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/bin/__init__.py#L41-L55
|
[
"def",
"execute",
"(",
"self",
",",
"api",
",",
"*",
"*",
"arguments",
")",
":",
"# type: (Iota, **Any) -> Optional[int]",
"raise",
"NotImplementedError",
"(",
"'Not implemented in {cls}.'",
".",
"format",
"(",
"cls",
"=",
"type",
"(",
"self",
")",
".",
"__name__",
")",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
IotaCommandLineApp.run_from_argv
|
Executes the command from a collection of arguments (e.g.,
:py:data`sys.argv`) and returns the exit code.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
|
iota/bin/__init__.py
|
def run_from_argv(self, argv=None):
# type: (Optional[tuple]) -> int
"""
Executes the command from a collection of arguments (e.g.,
:py:data`sys.argv`) and returns the exit code.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
exit_code = self.execute(**self.parse_argv(argv))
if exit_code is None:
exit_code = 0
return exit_code
|
def run_from_argv(self, argv=None):
# type: (Optional[tuple]) -> int
"""
Executes the command from a collection of arguments (e.g.,
:py:data`sys.argv`) and returns the exit code.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
exit_code = self.execute(**self.parse_argv(argv))
if exit_code is None:
exit_code = 0
return exit_code
|
[
"Executes",
"the",
"command",
"from",
"a",
"collection",
"of",
"arguments",
"(",
"e",
".",
"g",
".",
":",
"py",
":",
"data",
"sys",
".",
"argv",
")",
"and",
"returns",
"the",
"exit",
"code",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/bin/__init__.py#L63-L78
|
[
"def",
"run_from_argv",
"(",
"self",
",",
"argv",
"=",
"None",
")",
":",
"# type: (Optional[tuple]) -> int",
"exit_code",
"=",
"self",
".",
"execute",
"(",
"*",
"*",
"self",
".",
"parse_argv",
"(",
"argv",
")",
")",
"if",
"exit_code",
"is",
"None",
":",
"exit_code",
"=",
"0",
"return",
"exit_code"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
IotaCommandLineApp.parse_argv
|
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
|
iota/bin/__init__.py
|
def parse_argv(self, argv=None):
# type: (Optional[tuple]) -> dict
"""
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
arguments = vars(self.create_argument_parser().parse_args(argv))
seed = None
if self.requires_seed:
seed_filepath = arguments.pop('seed_file')
seed = (
self.seed_from_filepath(seed_filepath)
if seed_filepath
else self.prompt_for_seed()
)
arguments['api'] = Iota(
adapter=arguments.pop('uri'),
seed=seed,
testnet=arguments.pop('testnet'),
)
return arguments
|
def parse_argv(self, argv=None):
# type: (Optional[tuple]) -> dict
"""
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
arguments = vars(self.create_argument_parser().parse_args(argv))
seed = None
if self.requires_seed:
seed_filepath = arguments.pop('seed_file')
seed = (
self.seed_from_filepath(seed_filepath)
if seed_filepath
else self.prompt_for_seed()
)
arguments['api'] = Iota(
adapter=arguments.pop('uri'),
seed=seed,
testnet=arguments.pop('testnet'),
)
return arguments
|
[
"Parses",
"arguments",
"for",
"the",
"command",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/bin/__init__.py#L80-L107
|
[
"def",
"parse_argv",
"(",
"self",
",",
"argv",
"=",
"None",
")",
":",
"# type: (Optional[tuple]) -> dict",
"arguments",
"=",
"vars",
"(",
"self",
".",
"create_argument_parser",
"(",
")",
".",
"parse_args",
"(",
"argv",
")",
")",
"seed",
"=",
"None",
"if",
"self",
".",
"requires_seed",
":",
"seed_filepath",
"=",
"arguments",
".",
"pop",
"(",
"'seed_file'",
")",
"seed",
"=",
"(",
"self",
".",
"seed_from_filepath",
"(",
"seed_filepath",
")",
"if",
"seed_filepath",
"else",
"self",
".",
"prompt_for_seed",
"(",
")",
")",
"arguments",
"[",
"'api'",
"]",
"=",
"Iota",
"(",
"adapter",
"=",
"arguments",
".",
"pop",
"(",
"'uri'",
")",
",",
"seed",
"=",
"seed",
",",
"testnet",
"=",
"arguments",
".",
"pop",
"(",
"'testnet'",
")",
",",
")",
"return",
"arguments"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
IotaCommandLineApp.create_argument_parser
|
Returns the argument parser that will be used to interpret
arguments and options from argv.
|
iota/bin/__init__.py
|
def create_argument_parser(self):
# type: () -> ArgumentParser
"""
Returns the argument parser that will be used to interpret
arguments and options from argv.
"""
parser = ArgumentParser(
description=self.__doc__,
epilog='PyOTA v{version}'.format(version=__version__),
)
parser.add_argument(
'--uri',
type=text_type,
default='http://localhost:14265/',
help=(
'URI of the node to connect to '
'(defaults to http://localhost:14265/).'
),
)
if self.requires_seed:
parser.add_argument(
'--seed-file',
type=text_type,
dest='seed_file',
help=(
'Path to a file containing your seed in cleartext. '
'If not provided, you will be prompted to enter '
'your seed via stdin.'
),
)
parser.add_argument(
'--testnet',
action='store_true',
default=False,
help='If set, use testnet settings (e.g., for PoW).',
)
return parser
|
def create_argument_parser(self):
# type: () -> ArgumentParser
"""
Returns the argument parser that will be used to interpret
arguments and options from argv.
"""
parser = ArgumentParser(
description=self.__doc__,
epilog='PyOTA v{version}'.format(version=__version__),
)
parser.add_argument(
'--uri',
type=text_type,
default='http://localhost:14265/',
help=(
'URI of the node to connect to '
'(defaults to http://localhost:14265/).'
),
)
if self.requires_seed:
parser.add_argument(
'--seed-file',
type=text_type,
dest='seed_file',
help=(
'Path to a file containing your seed in cleartext. '
'If not provided, you will be prompted to enter '
'your seed via stdin.'
),
)
parser.add_argument(
'--testnet',
action='store_true',
default=False,
help='If set, use testnet settings (e.g., for PoW).',
)
return parser
|
[
"Returns",
"the",
"argument",
"parser",
"that",
"will",
"be",
"used",
"to",
"interpret",
"arguments",
"and",
"options",
"from",
"argv",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/bin/__init__.py#L109-L151
|
[
"def",
"create_argument_parser",
"(",
"self",
")",
":",
"# type: () -> ArgumentParser",
"parser",
"=",
"ArgumentParser",
"(",
"description",
"=",
"self",
".",
"__doc__",
",",
"epilog",
"=",
"'PyOTA v{version}'",
".",
"format",
"(",
"version",
"=",
"__version__",
")",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--uri'",
",",
"type",
"=",
"text_type",
",",
"default",
"=",
"'http://localhost:14265/'",
",",
"help",
"=",
"(",
"'URI of the node to connect to '",
"'(defaults to http://localhost:14265/).'",
")",
",",
")",
"if",
"self",
".",
"requires_seed",
":",
"parser",
".",
"add_argument",
"(",
"'--seed-file'",
",",
"type",
"=",
"text_type",
",",
"dest",
"=",
"'seed_file'",
",",
"help",
"=",
"(",
"'Path to a file containing your seed in cleartext. '",
"'If not provided, you will be prompted to enter '",
"'your seed via stdin.'",
")",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--testnet'",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"'If set, use testnet settings (e.g., for PoW).'",
",",
")",
"return",
"parser"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
IotaCommandLineApp.prompt_for_seed
|
Prompts the user to enter their seed via stdin.
|
iota/bin/__init__.py
|
def prompt_for_seed():
# type: () -> Seed
"""
Prompts the user to enter their seed via stdin.
"""
seed = secure_input(
'Enter seed and press return (typing will not be shown).\n'
'If no seed is specified, a random one will be used instead.\n'
)
if isinstance(seed, text_type):
seed = seed.encode('ascii')
return Seed(seed) if seed else Seed.random()
|
def prompt_for_seed():
# type: () -> Seed
"""
Prompts the user to enter their seed via stdin.
"""
seed = secure_input(
'Enter seed and press return (typing will not be shown).\n'
'If no seed is specified, a random one will be used instead.\n'
)
if isinstance(seed, text_type):
seed = seed.encode('ascii')
return Seed(seed) if seed else Seed.random()
|
[
"Prompts",
"the",
"user",
"to",
"enter",
"their",
"seed",
"via",
"stdin",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/bin/__init__.py#L165-L178
|
[
"def",
"prompt_for_seed",
"(",
")",
":",
"# type: () -> Seed",
"seed",
"=",
"secure_input",
"(",
"'Enter seed and press return (typing will not be shown).\\n'",
"'If no seed is specified, a random one will be used instead.\\n'",
")",
"if",
"isinstance",
"(",
"seed",
",",
"text_type",
")",
":",
"seed",
"=",
"seed",
".",
"encode",
"(",
"'ascii'",
")",
"return",
"Seed",
"(",
"seed",
")",
"if",
"seed",
"else",
"Seed",
".",
"random",
"(",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
normalize
|
"Normalizes" a hash, converting it into a sequence of integers
(not trits!) suitable for use in signature generation/validation.
The hash is divided up into 3 parts, each of which is "balanced"
(sum of all the values is equal to zero).
|
iota/crypto/signing.py
|
def normalize(hash_):
# type: (Hash) -> List[List[int]]
"""
"Normalizes" a hash, converting it into a sequence of integers
(not trits!) suitable for use in signature generation/validation.
The hash is divided up into 3 parts, each of which is "balanced"
(sum of all the values is equal to zero).
"""
normalized = []
source = hash_.as_integers()
chunk_size = 27
for i in range(Hash.LEN // chunk_size):
start = i * chunk_size
stop = start + chunk_size
chunk = source[start:stop]
chunk_sum = sum(chunk)
while chunk_sum > 0:
chunk_sum -= 1
for j in range(chunk_size):
if chunk[j] > -13:
chunk[j] -= 1
break
while chunk_sum < 0:
chunk_sum += 1
for j in range(chunk_size):
if chunk[j] < 13:
chunk[j] += 1
break
normalized.append(chunk)
return normalized
|
def normalize(hash_):
# type: (Hash) -> List[List[int]]
"""
"Normalizes" a hash, converting it into a sequence of integers
(not trits!) suitable for use in signature generation/validation.
The hash is divided up into 3 parts, each of which is "balanced"
(sum of all the values is equal to zero).
"""
normalized = []
source = hash_.as_integers()
chunk_size = 27
for i in range(Hash.LEN // chunk_size):
start = i * chunk_size
stop = start + chunk_size
chunk = source[start:stop]
chunk_sum = sum(chunk)
while chunk_sum > 0:
chunk_sum -= 1
for j in range(chunk_size):
if chunk[j] > -13:
chunk[j] -= 1
break
while chunk_sum < 0:
chunk_sum += 1
for j in range(chunk_size):
if chunk[j] < 13:
chunk[j] += 1
break
normalized.append(chunk)
return normalized
|
[
"Normalizes",
"a",
"hash",
"converting",
"it",
"into",
"a",
"sequence",
"of",
"integers",
"(",
"not",
"trits!",
")",
"suitable",
"for",
"use",
"in",
"signature",
"generation",
"/",
"validation",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L25-L62
|
[
"def",
"normalize",
"(",
"hash_",
")",
":",
"# type: (Hash) -> List[List[int]]",
"normalized",
"=",
"[",
"]",
"source",
"=",
"hash_",
".",
"as_integers",
"(",
")",
"chunk_size",
"=",
"27",
"for",
"i",
"in",
"range",
"(",
"Hash",
".",
"LEN",
"//",
"chunk_size",
")",
":",
"start",
"=",
"i",
"*",
"chunk_size",
"stop",
"=",
"start",
"+",
"chunk_size",
"chunk",
"=",
"source",
"[",
"start",
":",
"stop",
"]",
"chunk_sum",
"=",
"sum",
"(",
"chunk",
")",
"while",
"chunk_sum",
">",
"0",
":",
"chunk_sum",
"-=",
"1",
"for",
"j",
"in",
"range",
"(",
"chunk_size",
")",
":",
"if",
"chunk",
"[",
"j",
"]",
">",
"-",
"13",
":",
"chunk",
"[",
"j",
"]",
"-=",
"1",
"break",
"while",
"chunk_sum",
"<",
"0",
":",
"chunk_sum",
"+=",
"1",
"for",
"j",
"in",
"range",
"(",
"chunk_size",
")",
":",
"if",
"chunk",
"[",
"j",
"]",
"<",
"13",
":",
"chunk",
"[",
"j",
"]",
"+=",
"1",
"break",
"normalized",
".",
"append",
"(",
"chunk",
")",
"return",
"normalized"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
validate_signature_fragments
|
Returns whether a sequence of signature fragments is valid.
:param fragments:
Sequence of signature fragments (usually
:py:class:`iota.transaction.Fragment` instances).
:param hash_:
Hash used to generate the signature fragments (usually a
:py:class:`iota.transaction.BundleHash` instance).
:param public_key:
The public key value used to verify the signature digest (usually a
:py:class:`iota.types.Address` instance).
:param sponge_type:
The class used to create the cryptographic sponge (i.e., Curl or Kerl).
|
iota/crypto/signing.py
|
def validate_signature_fragments(
fragments,
hash_,
public_key,
sponge_type=Kerl,
):
# type: (Sequence[TryteString], Hash, TryteString, type) -> bool
"""
Returns whether a sequence of signature fragments is valid.
:param fragments:
Sequence of signature fragments (usually
:py:class:`iota.transaction.Fragment` instances).
:param hash_:
Hash used to generate the signature fragments (usually a
:py:class:`iota.transaction.BundleHash` instance).
:param public_key:
The public key value used to verify the signature digest (usually a
:py:class:`iota.types.Address` instance).
:param sponge_type:
The class used to create the cryptographic sponge (i.e., Curl or Kerl).
"""
checksum = [0] * (HASH_LENGTH * len(fragments))
normalized_hash = normalize(hash_)
for i, fragment in enumerate(fragments):
outer_sponge = sponge_type()
# If there are more than 3 iterations, loop back around to the
# start.
normalized_chunk = normalized_hash[i % len(normalized_hash)]
buffer = []
for j, hash_trytes in enumerate(fragment.iter_chunks(Hash.LEN)):
buffer = hash_trytes.as_trits() # type: List[int]
inner_sponge = sponge_type()
# Note the sign flip compared to
# :py;class:`SignatureFragmentGenerator`.
for _ in range(13 + normalized_chunk[j]):
inner_sponge.reset()
inner_sponge.absorb(buffer)
inner_sponge.squeeze(buffer)
outer_sponge.absorb(buffer)
outer_sponge.squeeze(buffer)
checksum[i * HASH_LENGTH:(i + 1) * HASH_LENGTH] = buffer
actual_public_key = [0] * HASH_LENGTH
addy_sponge = sponge_type()
addy_sponge.absorb(checksum)
addy_sponge.squeeze(actual_public_key)
return actual_public_key == public_key.as_trits()
|
def validate_signature_fragments(
fragments,
hash_,
public_key,
sponge_type=Kerl,
):
# type: (Sequence[TryteString], Hash, TryteString, type) -> bool
"""
Returns whether a sequence of signature fragments is valid.
:param fragments:
Sequence of signature fragments (usually
:py:class:`iota.transaction.Fragment` instances).
:param hash_:
Hash used to generate the signature fragments (usually a
:py:class:`iota.transaction.BundleHash` instance).
:param public_key:
The public key value used to verify the signature digest (usually a
:py:class:`iota.types.Address` instance).
:param sponge_type:
The class used to create the cryptographic sponge (i.e., Curl or Kerl).
"""
checksum = [0] * (HASH_LENGTH * len(fragments))
normalized_hash = normalize(hash_)
for i, fragment in enumerate(fragments):
outer_sponge = sponge_type()
# If there are more than 3 iterations, loop back around to the
# start.
normalized_chunk = normalized_hash[i % len(normalized_hash)]
buffer = []
for j, hash_trytes in enumerate(fragment.iter_chunks(Hash.LEN)):
buffer = hash_trytes.as_trits() # type: List[int]
inner_sponge = sponge_type()
# Note the sign flip compared to
# :py;class:`SignatureFragmentGenerator`.
for _ in range(13 + normalized_chunk[j]):
inner_sponge.reset()
inner_sponge.absorb(buffer)
inner_sponge.squeeze(buffer)
outer_sponge.absorb(buffer)
outer_sponge.squeeze(buffer)
checksum[i * HASH_LENGTH:(i + 1) * HASH_LENGTH] = buffer
actual_public_key = [0] * HASH_LENGTH
addy_sponge = sponge_type()
addy_sponge.absorb(checksum)
addy_sponge.squeeze(actual_public_key)
return actual_public_key == public_key.as_trits()
|
[
"Returns",
"whether",
"a",
"sequence",
"of",
"signature",
"fragments",
"is",
"valid",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L403-L460
|
[
"def",
"validate_signature_fragments",
"(",
"fragments",
",",
"hash_",
",",
"public_key",
",",
"sponge_type",
"=",
"Kerl",
",",
")",
":",
"# type: (Sequence[TryteString], Hash, TryteString, type) -> bool",
"checksum",
"=",
"[",
"0",
"]",
"*",
"(",
"HASH_LENGTH",
"*",
"len",
"(",
"fragments",
")",
")",
"normalized_hash",
"=",
"normalize",
"(",
"hash_",
")",
"for",
"i",
",",
"fragment",
"in",
"enumerate",
"(",
"fragments",
")",
":",
"outer_sponge",
"=",
"sponge_type",
"(",
")",
"# If there are more than 3 iterations, loop back around to the",
"# start.",
"normalized_chunk",
"=",
"normalized_hash",
"[",
"i",
"%",
"len",
"(",
"normalized_hash",
")",
"]",
"buffer",
"=",
"[",
"]",
"for",
"j",
",",
"hash_trytes",
"in",
"enumerate",
"(",
"fragment",
".",
"iter_chunks",
"(",
"Hash",
".",
"LEN",
")",
")",
":",
"buffer",
"=",
"hash_trytes",
".",
"as_trits",
"(",
")",
"# type: List[int]",
"inner_sponge",
"=",
"sponge_type",
"(",
")",
"# Note the sign flip compared to",
"# :py;class:`SignatureFragmentGenerator`.",
"for",
"_",
"in",
"range",
"(",
"13",
"+",
"normalized_chunk",
"[",
"j",
"]",
")",
":",
"inner_sponge",
".",
"reset",
"(",
")",
"inner_sponge",
".",
"absorb",
"(",
"buffer",
")",
"inner_sponge",
".",
"squeeze",
"(",
"buffer",
")",
"outer_sponge",
".",
"absorb",
"(",
"buffer",
")",
"outer_sponge",
".",
"squeeze",
"(",
"buffer",
")",
"checksum",
"[",
"i",
"*",
"HASH_LENGTH",
":",
"(",
"i",
"+",
"1",
")",
"*",
"HASH_LENGTH",
"]",
"=",
"buffer",
"actual_public_key",
"=",
"[",
"0",
"]",
"*",
"HASH_LENGTH",
"addy_sponge",
"=",
"sponge_type",
"(",
")",
"addy_sponge",
".",
"absorb",
"(",
"checksum",
")",
"addy_sponge",
".",
"squeeze",
"(",
"actual_public_key",
")",
"return",
"actual_public_key",
"==",
"public_key",
".",
"as_trits",
"(",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
KeyGenerator.get_key
|
Generates a single key.
:param index:
The key index.
:param iterations:
Number of transform iterations to apply to the key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
|
iota/crypto/signing.py
|
def get_key(self, index, iterations):
# type: (int, int) -> PrivateKey
"""
Generates a single key.
:param index:
The key index.
:param iterations:
Number of transform iterations to apply to the key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return (
self.get_keys(
start=index,
count=1,
step=1,
iterations=iterations,
)[0]
)
|
def get_key(self, index, iterations):
# type: (int, int) -> PrivateKey
"""
Generates a single key.
:param index:
The key index.
:param iterations:
Number of transform iterations to apply to the key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return (
self.get_keys(
start=index,
count=1,
step=1,
iterations=iterations,
)[0]
)
|
[
"Generates",
"a",
"single",
"key",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L76-L99
|
[
"def",
"get_key",
"(",
"self",
",",
"index",
",",
"iterations",
")",
":",
"# type: (int, int) -> PrivateKey",
"return",
"(",
"self",
".",
"get_keys",
"(",
"start",
"=",
"index",
",",
"count",
"=",
"1",
",",
"step",
"=",
"1",
",",
"iterations",
"=",
"iterations",
",",
")",
"[",
"0",
"]",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
KeyGenerator.get_key_for
|
Generates the key associated with the specified address.
Note that this method will generate the wrong key if the input
address was generated from a different key!
|
iota/crypto/signing.py
|
def get_key_for(self, address):
"""
Generates the key associated with the specified address.
Note that this method will generate the wrong key if the input
address was generated from a different key!
"""
return self.get_key(
index=address.key_index,
iterations=address.security_level,
)
|
def get_key_for(self, address):
"""
Generates the key associated with the specified address.
Note that this method will generate the wrong key if the input
address was generated from a different key!
"""
return self.get_key(
index=address.key_index,
iterations=address.security_level,
)
|
[
"Generates",
"the",
"key",
"associated",
"with",
"the",
"specified",
"address",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L101-L111
|
[
"def",
"get_key_for",
"(",
"self",
",",
"address",
")",
":",
"return",
"self",
".",
"get_key",
"(",
"index",
"=",
"address",
".",
"key_index",
",",
"iterations",
"=",
"address",
".",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
KeyGenerator.get_keys
|
Generates and returns one or more keys at the specified
index(es).
This is a one-time operation; if you want to create lots of keys
across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested keys is a large number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of keys to generate.
Must be > 0.
:param step:
Number of indexes to advance after each key.
This may be any non-zero (positive or negative) integer.
:param iterations:
Number of transform iterations to apply to each key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
:return:
Always returns a list, even if only one key is generated.
The returned list will contain ``count`` keys, except when
``step * count < start`` (only applies when ``step`` is
negative).
|
iota/crypto/signing.py
|
def get_keys(self, start, count=1, step=1, iterations=1):
# type: (int, int, int, int) -> List[PrivateKey]
"""
Generates and returns one or more keys at the specified
index(es).
This is a one-time operation; if you want to create lots of keys
across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested keys is a large number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of keys to generate.
Must be > 0.
:param step:
Number of indexes to advance after each key.
This may be any non-zero (positive or negative) integer.
:param iterations:
Number of transform iterations to apply to each key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
:return:
Always returns a list, even if only one key is generated.
The returned list will contain ``count`` keys, except when
``step * count < start`` (only applies when ``step`` is
negative).
"""
if count < 1:
raise with_context(
exc=ValueError('``count`` must be positive.'),
context={
'start': start,
'count': count,
'step': step,
'iterations': iterations,
},
)
if not step:
raise with_context(
exc=ValueError('``step`` must not be zero.'),
context={
'start': start,
'count': count,
'step': step,
'iterations': iterations,
},
)
iterator = self.create_iterator(start, step, iterations)
keys = []
for _ in range(count):
try:
next_key = next(iterator)
except StopIteration:
break
else:
keys.append(next_key)
return keys
|
def get_keys(self, start, count=1, step=1, iterations=1):
# type: (int, int, int, int) -> List[PrivateKey]
"""
Generates and returns one or more keys at the specified
index(es).
This is a one-time operation; if you want to create lots of keys
across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested keys is a large number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of keys to generate.
Must be > 0.
:param step:
Number of indexes to advance after each key.
This may be any non-zero (positive or negative) integer.
:param iterations:
Number of transform iterations to apply to each key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
:return:
Always returns a list, even if only one key is generated.
The returned list will contain ``count`` keys, except when
``step * count < start`` (only applies when ``step`` is
negative).
"""
if count < 1:
raise with_context(
exc=ValueError('``count`` must be positive.'),
context={
'start': start,
'count': count,
'step': step,
'iterations': iterations,
},
)
if not step:
raise with_context(
exc=ValueError('``step`` must not be zero.'),
context={
'start': start,
'count': count,
'step': step,
'iterations': iterations,
},
)
iterator = self.create_iterator(start, step, iterations)
keys = []
for _ in range(count):
try:
next_key = next(iterator)
except StopIteration:
break
else:
keys.append(next_key)
return keys
|
[
"Generates",
"and",
"returns",
"one",
"or",
"more",
"keys",
"at",
"the",
"specified",
"index",
"(",
"es",
")",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L113-L189
|
[
"def",
"get_keys",
"(",
"self",
",",
"start",
",",
"count",
"=",
"1",
",",
"step",
"=",
"1",
",",
"iterations",
"=",
"1",
")",
":",
"# type: (int, int, int, int) -> List[PrivateKey]",
"if",
"count",
"<",
"1",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'``count`` must be positive.'",
")",
",",
"context",
"=",
"{",
"'start'",
":",
"start",
",",
"'count'",
":",
"count",
",",
"'step'",
":",
"step",
",",
"'iterations'",
":",
"iterations",
",",
"}",
",",
")",
"if",
"not",
"step",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'``step`` must not be zero.'",
")",
",",
"context",
"=",
"{",
"'start'",
":",
"start",
",",
"'count'",
":",
"count",
",",
"'step'",
":",
"step",
",",
"'iterations'",
":",
"iterations",
",",
"}",
",",
")",
"iterator",
"=",
"self",
".",
"create_iterator",
"(",
"start",
",",
"step",
",",
"iterations",
")",
"keys",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"count",
")",
":",
"try",
":",
"next_key",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"break",
"else",
":",
"keys",
".",
"append",
"(",
"next_key",
")",
"return",
"keys"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
KeyGenerator.create_iterator
|
Creates a generator that can be used to progressively generate
new keys.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each key.
This value can be negative; the generator will exit if it
reaches an index < 0.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
:param security_level:
Number of _transform iterations to apply to each key.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
|
iota/crypto/signing.py
|
def create_iterator(self, start=0, step=1, security_level=1):
# type: (int, int, int) -> KeyIterator
"""
Creates a generator that can be used to progressively generate
new keys.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each key.
This value can be negative; the generator will exit if it
reaches an index < 0.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
:param security_level:
Number of _transform iterations to apply to each key.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return KeyIterator(self.seed, start, step, security_level)
|
def create_iterator(self, start=0, step=1, security_level=1):
# type: (int, int, int) -> KeyIterator
"""
Creates a generator that can be used to progressively generate
new keys.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each key.
This value can be negative; the generator will exit if it
reaches an index < 0.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
:param security_level:
Number of _transform iterations to apply to each key.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return KeyIterator(self.seed, start, step, security_level)
|
[
"Creates",
"a",
"generator",
"that",
"can",
"be",
"used",
"to",
"progressively",
"generate",
"new",
"keys",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L191-L219
|
[
"def",
"create_iterator",
"(",
"self",
",",
"start",
"=",
"0",
",",
"step",
"=",
"1",
",",
"security_level",
"=",
"1",
")",
":",
"# type: (int, int, int) -> KeyIterator",
"return",
"KeyIterator",
"(",
"self",
".",
"seed",
",",
"start",
",",
"step",
",",
"security_level",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
KeyIterator._create_sponge
|
Prepares the hash sponge for the generator.
|
iota/crypto/signing.py
|
def _create_sponge(self, index):
# type: (int) -> Kerl
"""
Prepares the hash sponge for the generator.
"""
seed = self.seed_as_trits[:]
sponge = Kerl()
sponge.absorb(add_trits(seed, trits_from_int(index)))
# Squeeze all of the trits out of the sponge and re-absorb them.
# Note that the sponge transforms several times per operation,
# so this sequence is not as redundant as it looks at first
# glance.
sponge.squeeze(seed)
sponge.reset()
sponge.absorb(seed)
return sponge
|
def _create_sponge(self, index):
# type: (int) -> Kerl
"""
Prepares the hash sponge for the generator.
"""
seed = self.seed_as_trits[:]
sponge = Kerl()
sponge.absorb(add_trits(seed, trits_from_int(index)))
# Squeeze all of the trits out of the sponge and re-absorb them.
# Note that the sponge transforms several times per operation,
# so this sequence is not as redundant as it looks at first
# glance.
sponge.squeeze(seed)
sponge.reset()
sponge.absorb(seed)
return sponge
|
[
"Prepares",
"the",
"hash",
"sponge",
"for",
"the",
"generator",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/signing.py#L316-L334
|
[
"def",
"_create_sponge",
"(",
"self",
",",
"index",
")",
":",
"# type: (int) -> Kerl",
"seed",
"=",
"self",
".",
"seed_as_trits",
"[",
":",
"]",
"sponge",
"=",
"Kerl",
"(",
")",
"sponge",
".",
"absorb",
"(",
"add_trits",
"(",
"seed",
",",
"trits_from_int",
"(",
"index",
")",
")",
")",
"# Squeeze all of the trits out of the sponge and re-absorb them.",
"# Note that the sponge transforms several times per operation,",
"# so this sequence is not as redundant as it looks at first",
"# glance.",
"sponge",
".",
"squeeze",
"(",
"seed",
")",
"sponge",
".",
"reset",
"(",
")",
"sponge",
".",
"absorb",
"(",
"seed",
")",
"return",
"sponge"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Curl.absorb
|
Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``.
|
iota/crypto/pycurl.py
|
def absorb(self, trits, offset=0, length=None):
# type: (Sequence[int], Optional[int], Optional[int]) -> None
"""
Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``.
"""
pad = ((len(trits) % HASH_LENGTH) or HASH_LENGTH)
trits += [0] * (HASH_LENGTH - pad)
if length is None:
length = len(trits)
if length < 1:
raise with_context(
exc=ValueError('Invalid length passed to ``absorb``.'),
context={
'trits': trits,
'offset': offset,
'length': length,
},
)
# Copy trits from ``trits`` into internal state, one hash at a
# time, transforming internal state in between hashes.
while offset < length:
start = offset
stop = min(start + HASH_LENGTH, length)
# Copy the next hash worth of trits to internal state.
#
# Note that we always copy the trits to the start of the
# state. ``self._state`` is 3 hashes long, but only the
# first hash is "public"; the other 2 are only accessible to
# :py:meth:`_transform`.
self._state[0:stop - start] = trits[start:stop]
# Transform.
self._transform()
# Move on to the next hash.
offset += HASH_LENGTH
|
def absorb(self, trits, offset=0, length=None):
# type: (Sequence[int], Optional[int], Optional[int]) -> None
"""
Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``.
"""
pad = ((len(trits) % HASH_LENGTH) or HASH_LENGTH)
trits += [0] * (HASH_LENGTH - pad)
if length is None:
length = len(trits)
if length < 1:
raise with_context(
exc=ValueError('Invalid length passed to ``absorb``.'),
context={
'trits': trits,
'offset': offset,
'length': length,
},
)
# Copy trits from ``trits`` into internal state, one hash at a
# time, transforming internal state in between hashes.
while offset < length:
start = offset
stop = min(start + HASH_LENGTH, length)
# Copy the next hash worth of trits to internal state.
#
# Note that we always copy the trits to the start of the
# state. ``self._state`` is 3 hashes long, but only the
# first hash is "public"; the other 2 are only accessible to
# :py:meth:`_transform`.
self._state[0:stop - start] = trits[start:stop]
# Transform.
self._transform()
# Move on to the next hash.
offset += HASH_LENGTH
|
[
"Absorb",
"trits",
"into",
"the",
"sponge",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/pycurl.py#L64-L113
|
[
"def",
"absorb",
"(",
"self",
",",
"trits",
",",
"offset",
"=",
"0",
",",
"length",
"=",
"None",
")",
":",
"# type: (Sequence[int], Optional[int], Optional[int]) -> None",
"pad",
"=",
"(",
"(",
"len",
"(",
"trits",
")",
"%",
"HASH_LENGTH",
")",
"or",
"HASH_LENGTH",
")",
"trits",
"+=",
"[",
"0",
"]",
"*",
"(",
"HASH_LENGTH",
"-",
"pad",
")",
"if",
"length",
"is",
"None",
":",
"length",
"=",
"len",
"(",
"trits",
")",
"if",
"length",
"<",
"1",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'Invalid length passed to ``absorb``.'",
")",
",",
"context",
"=",
"{",
"'trits'",
":",
"trits",
",",
"'offset'",
":",
"offset",
",",
"'length'",
":",
"length",
",",
"}",
",",
")",
"# Copy trits from ``trits`` into internal state, one hash at a",
"# time, transforming internal state in between hashes.",
"while",
"offset",
"<",
"length",
":",
"start",
"=",
"offset",
"stop",
"=",
"min",
"(",
"start",
"+",
"HASH_LENGTH",
",",
"length",
")",
"# Copy the next hash worth of trits to internal state.",
"#",
"# Note that we always copy the trits to the start of the",
"# state. ``self._state`` is 3 hashes long, but only the",
"# first hash is \"public\"; the other 2 are only accessible to",
"# :py:meth:`_transform`.",
"self",
".",
"_state",
"[",
"0",
":",
"stop",
"-",
"start",
"]",
"=",
"trits",
"[",
"start",
":",
"stop",
"]",
"# Transform.",
"self",
".",
"_transform",
"(",
")",
"# Move on to the next hash.",
"offset",
"+=",
"HASH_LENGTH"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Curl.squeeze
|
Squeeze trits from the sponge.
:param trits:
Sequence that the squeezed trits will be copied to.
Note: this object will be modified!
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to squeeze, default to ``HASH_LENGTH``
|
iota/crypto/pycurl.py
|
def squeeze(self, trits, offset=0, length=HASH_LENGTH):
# type: (MutableSequence[int], Optional[int], Optional[int]) -> None
"""
Squeeze trits from the sponge.
:param trits:
Sequence that the squeezed trits will be copied to.
Note: this object will be modified!
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to squeeze, default to ``HASH_LENGTH``
"""
# Squeeze is kind of like the opposite of absorb; it copies
# trits from internal state to the ``trits`` parameter, one hash
# at a time, and transforming internal state in between hashes.
#
# However, only the first hash of the state is "public", so we
# can simplify the implementation somewhat.
# Ensure length can be mod by HASH_LENGTH
if length % HASH_LENGTH != 0:
raise with_context(
exc=ValueError('Invalid length passed to ``squeeze`.'),
context={
'trits': trits,
'offset': offset,
'length': length,
})
# Ensure that ``trits`` can hold at least one hash worth of
# trits.
trits.extend([0] * max(0, length - len(trits)))
# Check trits with offset can handle hash length
if len(trits) - offset < HASH_LENGTH:
raise with_context(
exc=ValueError('Invalid offset passed to ``squeeze``.'),
context={
'trits': trits,
'offset': offset,
'length': length
},
)
while length >= HASH_LENGTH:
# Copy exactly one hash.
trits[offset:offset + HASH_LENGTH] = self._state[0:HASH_LENGTH]
# One hash worth of trits copied; now transform.
self._transform()
offset += HASH_LENGTH
length -= HASH_LENGTH
|
def squeeze(self, trits, offset=0, length=HASH_LENGTH):
# type: (MutableSequence[int], Optional[int], Optional[int]) -> None
"""
Squeeze trits from the sponge.
:param trits:
Sequence that the squeezed trits will be copied to.
Note: this object will be modified!
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to squeeze, default to ``HASH_LENGTH``
"""
# Squeeze is kind of like the opposite of absorb; it copies
# trits from internal state to the ``trits`` parameter, one hash
# at a time, and transforming internal state in between hashes.
#
# However, only the first hash of the state is "public", so we
# can simplify the implementation somewhat.
# Ensure length can be mod by HASH_LENGTH
if length % HASH_LENGTH != 0:
raise with_context(
exc=ValueError('Invalid length passed to ``squeeze`.'),
context={
'trits': trits,
'offset': offset,
'length': length,
})
# Ensure that ``trits`` can hold at least one hash worth of
# trits.
trits.extend([0] * max(0, length - len(trits)))
# Check trits with offset can handle hash length
if len(trits) - offset < HASH_LENGTH:
raise with_context(
exc=ValueError('Invalid offset passed to ``squeeze``.'),
context={
'trits': trits,
'offset': offset,
'length': length
},
)
while length >= HASH_LENGTH:
# Copy exactly one hash.
trits[offset:offset + HASH_LENGTH] = self._state[0:HASH_LENGTH]
# One hash worth of trits copied; now transform.
self._transform()
offset += HASH_LENGTH
length -= HASH_LENGTH
|
[
"Squeeze",
"trits",
"from",
"the",
"sponge",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/pycurl.py#L115-L172
|
[
"def",
"squeeze",
"(",
"self",
",",
"trits",
",",
"offset",
"=",
"0",
",",
"length",
"=",
"HASH_LENGTH",
")",
":",
"# type: (MutableSequence[int], Optional[int], Optional[int]) -> None",
"# Squeeze is kind of like the opposite of absorb; it copies",
"# trits from internal state to the ``trits`` parameter, one hash",
"# at a time, and transforming internal state in between hashes.",
"#",
"# However, only the first hash of the state is \"public\", so we",
"# can simplify the implementation somewhat.",
"# Ensure length can be mod by HASH_LENGTH",
"if",
"length",
"%",
"HASH_LENGTH",
"!=",
"0",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'Invalid length passed to ``squeeze`.'",
")",
",",
"context",
"=",
"{",
"'trits'",
":",
"trits",
",",
"'offset'",
":",
"offset",
",",
"'length'",
":",
"length",
",",
"}",
")",
"# Ensure that ``trits`` can hold at least one hash worth of",
"# trits.",
"trits",
".",
"extend",
"(",
"[",
"0",
"]",
"*",
"max",
"(",
"0",
",",
"length",
"-",
"len",
"(",
"trits",
")",
")",
")",
"# Check trits with offset can handle hash length",
"if",
"len",
"(",
"trits",
")",
"-",
"offset",
"<",
"HASH_LENGTH",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'Invalid offset passed to ``squeeze``.'",
")",
",",
"context",
"=",
"{",
"'trits'",
":",
"trits",
",",
"'offset'",
":",
"offset",
",",
"'length'",
":",
"length",
"}",
",",
")",
"while",
"length",
">=",
"HASH_LENGTH",
":",
"# Copy exactly one hash.",
"trits",
"[",
"offset",
":",
"offset",
"+",
"HASH_LENGTH",
"]",
"=",
"self",
".",
"_state",
"[",
"0",
":",
"HASH_LENGTH",
"]",
"# One hash worth of trits copied; now transform.",
"self",
".",
"_transform",
"(",
")",
"offset",
"+=",
"HASH_LENGTH",
"length",
"-=",
"HASH_LENGTH"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Curl._transform
|
Transforms internal state.
|
iota/crypto/pycurl.py
|
def _transform(self):
# type: () -> None
"""
Transforms internal state.
"""
# Copy some values locally so we can avoid global lookups in the
# inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Local_Variables
state_length = STATE_LENGTH
truth_table = TRUTH_TABLE
# Operate on a copy of ``self._state`` to eliminate dot lookups
# in the inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Avoiding_dots...
# - http://stackoverflow.com/a/2612990/
prev_state = self._state[:]
new_state = prev_state[:]
# Note: This code looks significantly different from the C
# implementation because it has been optimized to limit the
# number of list item lookups (these are relatively slow in
# Python).
index = 0
for _ in range(NUMBER_OF_ROUNDS):
prev_trit = prev_state[index]
for pos in range(state_length):
index += (364 if index < 365 else -365)
new_trit = prev_state[index]
new_state[pos] = truth_table[prev_trit + (3 * new_trit) + 4]
prev_trit = new_trit
prev_state = new_state
new_state = new_state[:]
self._state = new_state
|
def _transform(self):
# type: () -> None
"""
Transforms internal state.
"""
# Copy some values locally so we can avoid global lookups in the
# inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Local_Variables
state_length = STATE_LENGTH
truth_table = TRUTH_TABLE
# Operate on a copy of ``self._state`` to eliminate dot lookups
# in the inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Avoiding_dots...
# - http://stackoverflow.com/a/2612990/
prev_state = self._state[:]
new_state = prev_state[:]
# Note: This code looks significantly different from the C
# implementation because it has been optimized to limit the
# number of list item lookups (these are relatively slow in
# Python).
index = 0
for _ in range(NUMBER_OF_ROUNDS):
prev_trit = prev_state[index]
for pos in range(state_length):
index += (364 if index < 365 else -365)
new_trit = prev_state[index]
new_state[pos] = truth_table[prev_trit + (3 * new_trit) + 4]
prev_trit = new_trit
prev_state = new_state
new_state = new_state[:]
self._state = new_state
|
[
"Transforms",
"internal",
"state",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/pycurl.py#L174-L218
|
[
"def",
"_transform",
"(",
"self",
")",
":",
"# type: () -> None",
"# Copy some values locally so we can avoid global lookups in the",
"# inner loop.",
"#",
"# References:",
"#",
"# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Local_Variables",
"state_length",
"=",
"STATE_LENGTH",
"truth_table",
"=",
"TRUTH_TABLE",
"# Operate on a copy of ``self._state`` to eliminate dot lookups",
"# in the inner loop.",
"#",
"# References:",
"#",
"# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Avoiding_dots...",
"# - http://stackoverflow.com/a/2612990/",
"prev_state",
"=",
"self",
".",
"_state",
"[",
":",
"]",
"new_state",
"=",
"prev_state",
"[",
":",
"]",
"# Note: This code looks significantly different from the C",
"# implementation because it has been optimized to limit the",
"# number of list item lookups (these are relatively slow in",
"# Python).",
"index",
"=",
"0",
"for",
"_",
"in",
"range",
"(",
"NUMBER_OF_ROUNDS",
")",
":",
"prev_trit",
"=",
"prev_state",
"[",
"index",
"]",
"for",
"pos",
"in",
"range",
"(",
"state_length",
")",
":",
"index",
"+=",
"(",
"364",
"if",
"index",
"<",
"365",
"else",
"-",
"365",
")",
"new_trit",
"=",
"prev_state",
"[",
"index",
"]",
"new_state",
"[",
"pos",
"]",
"=",
"truth_table",
"[",
"prev_trit",
"+",
"(",
"3",
"*",
"new_trit",
")",
"+",
"4",
"]",
"prev_trit",
"=",
"new_trit",
"prev_state",
"=",
"new_state",
"new_state",
"=",
"new_state",
"[",
":",
"]",
"self",
".",
"_state",
"=",
"new_state"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MultisigIota.get_digests
|
Generates one or more key digests from the seed.
Digests are safe to share; use them to generate multisig
addresses.
:param index:
The starting key index.
:param count:
Number of digests to generate.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'digests': List[Digest],
Always contains a list, even if only one digest
was generated.
}
|
iota/multisig/api.py
|
def get_digests(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more key digests from the seed.
Digests are safe to share; use them to generate multisig
addresses.
:param index:
The starting key index.
:param count:
Number of digests to generate.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'digests': List[Digest],
Always contains a list, even if only one digest
was generated.
}
"""
return commands.GetDigestsCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
)
|
def get_digests(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more key digests from the seed.
Digests are safe to share; use them to generate multisig
addresses.
:param index:
The starting key index.
:param count:
Number of digests to generate.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'digests': List[Digest],
Always contains a list, even if only one digest
was generated.
}
"""
return commands.GetDigestsCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
)
|
[
"Generates",
"one",
"or",
"more",
"key",
"digests",
"from",
"the",
"seed",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/api.py#L61-L102
|
[
"def",
"get_digests",
"(",
"self",
",",
"index",
"=",
"0",
",",
"count",
"=",
"1",
",",
"security_level",
"=",
"AddressGenerator",
".",
"DEFAULT_SECURITY_LEVEL",
",",
")",
":",
"# type: (int, int, int) -> dict",
"return",
"commands",
".",
"GetDigestsCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"index",
"=",
"index",
",",
"count",
"=",
"count",
",",
"securityLevel",
"=",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MultisigIota.get_private_keys
|
Generates one or more private keys from the seed.
As the name implies, private keys should not be shared.
However, in a few cases it may be necessary (e.g., for M-of-N
transactions).
:param index:
The starting key index.
:param count:
Number of keys to generate.
:param security_level:
Number of iterations to use when generating new keys.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'keys': List[PrivateKey],
Always contains a list, even if only one key was
generated.
}
References:
- :py:class:`iota.crypto.signing.KeyGenerator`
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#how-m-of-n-works
|
iota/multisig/api.py
|
def get_private_keys(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more private keys from the seed.
As the name implies, private keys should not be shared.
However, in a few cases it may be necessary (e.g., for M-of-N
transactions).
:param index:
The starting key index.
:param count:
Number of keys to generate.
:param security_level:
Number of iterations to use when generating new keys.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'keys': List[PrivateKey],
Always contains a list, even if only one key was
generated.
}
References:
- :py:class:`iota.crypto.signing.KeyGenerator`
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#how-m-of-n-works
"""
return commands.GetPrivateKeysCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
)
|
def get_private_keys(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more private keys from the seed.
As the name implies, private keys should not be shared.
However, in a few cases it may be necessary (e.g., for M-of-N
transactions).
:param index:
The starting key index.
:param count:
Number of keys to generate.
:param security_level:
Number of iterations to use when generating new keys.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'keys': List[PrivateKey],
Always contains a list, even if only one key was
generated.
}
References:
- :py:class:`iota.crypto.signing.KeyGenerator`
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#how-m-of-n-works
"""
return commands.GetPrivateKeysCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
)
|
[
"Generates",
"one",
"or",
"more",
"private",
"keys",
"from",
"the",
"seed",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/api.py#L104-L151
|
[
"def",
"get_private_keys",
"(",
"self",
",",
"index",
"=",
"0",
",",
"count",
"=",
"1",
",",
"security_level",
"=",
"AddressGenerator",
".",
"DEFAULT_SECURITY_LEVEL",
",",
")",
":",
"# type: (int, int, int) -> dict",
"return",
"commands",
".",
"GetPrivateKeysCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"index",
"=",
"index",
",",
"count",
"=",
"count",
",",
"securityLevel",
"=",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MultisigIota.prepare_multisig_transfer
|
Prepares a bundle that authorizes the spending of IOTAs from a
multisig address.
.. note::
This method is used exclusively to spend IOTAs from a
multisig address.
If you want to spend IOTAs from non-multisig addresses, or
if you want to create 0-value transfers (i.e., that don't
require inputs), use
:py:meth:`iota.api.Iota.prepare_transfer` instead.
:param transfers:
Transaction objects to prepare.
.. important::
Must include at least one transaction that spends IOTAs
(i.e., has a nonzero ``value``). If you want to prepare
a bundle that does not spend any IOTAs, use
:py:meth:`iota.api.prepare_transfer` instead.
:param multisig_input:
The multisig address to use as the input for the transfers.
.. note::
This method only supports creating a bundle with a
single multisig input.
If you would like to spend from multiple multisig
addresses in the same bundle, create the
:py:class:`iota.multisig.transaction.ProposedMultisigBundle`
object manually.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If the bundle has no unspent inputs, ``change_address` is
ignored.
.. important::
Unlike :py:meth:`iota.api.Iota.prepare_transfer`, this
method will NOT generate a change address automatically.
If there are unspent inputs and ``change_address`` is
empty, an exception will be raised.
This is because multisig transactions typically involve
multiple individuals, and it would be unfair to the
participants if we generated a change address
automatically using the seed of whoever happened to run
the ``prepare_multisig_transfer`` method!
.. danger::
Note that this protective measure is not a
substitute for due diligence!
Always verify the details of every transaction in a
bundle (including the change transaction) before
signing the input(s)!
:return:
Dict containing the following values::
{
'trytes': List[TransactionTrytes],
Finalized bundle, as trytes.
The input transactions are not signed.
}
In order to authorize the spending of IOTAs from the multisig
input, you must generate the correct private keys and invoke
the :py:meth:`iota.crypto.types.PrivateKey.sign_input_at`
method for each key, in the correct order.
Once the correct signatures are applied, you can then perform
proof of work (``attachToTangle``) and broadcast the bundle
using :py:meth:`iota.api.Iota.send_trytes`.
|
iota/multisig/api.py
|
def prepare_multisig_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
multisig_input, # type: MultisigAddress
change_address=None, # type: Optional[Address]
):
# type: (...) -> dict
"""
Prepares a bundle that authorizes the spending of IOTAs from a
multisig address.
.. note::
This method is used exclusively to spend IOTAs from a
multisig address.
If you want to spend IOTAs from non-multisig addresses, or
if you want to create 0-value transfers (i.e., that don't
require inputs), use
:py:meth:`iota.api.Iota.prepare_transfer` instead.
:param transfers:
Transaction objects to prepare.
.. important::
Must include at least one transaction that spends IOTAs
(i.e., has a nonzero ``value``). If you want to prepare
a bundle that does not spend any IOTAs, use
:py:meth:`iota.api.prepare_transfer` instead.
:param multisig_input:
The multisig address to use as the input for the transfers.
.. note::
This method only supports creating a bundle with a
single multisig input.
If you would like to spend from multiple multisig
addresses in the same bundle, create the
:py:class:`iota.multisig.transaction.ProposedMultisigBundle`
object manually.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If the bundle has no unspent inputs, ``change_address` is
ignored.
.. important::
Unlike :py:meth:`iota.api.Iota.prepare_transfer`, this
method will NOT generate a change address automatically.
If there are unspent inputs and ``change_address`` is
empty, an exception will be raised.
This is because multisig transactions typically involve
multiple individuals, and it would be unfair to the
participants if we generated a change address
automatically using the seed of whoever happened to run
the ``prepare_multisig_transfer`` method!
.. danger::
Note that this protective measure is not a
substitute for due diligence!
Always verify the details of every transaction in a
bundle (including the change transaction) before
signing the input(s)!
:return:
Dict containing the following values::
{
'trytes': List[TransactionTrytes],
Finalized bundle, as trytes.
The input transactions are not signed.
}
In order to authorize the spending of IOTAs from the multisig
input, you must generate the correct private keys and invoke
the :py:meth:`iota.crypto.types.PrivateKey.sign_input_at`
method for each key, in the correct order.
Once the correct signatures are applied, you can then perform
proof of work (``attachToTangle``) and broadcast the bundle
using :py:meth:`iota.api.Iota.send_trytes`.
"""
return commands.PrepareMultisigTransferCommand(self.adapter)(
changeAddress=change_address,
multisigInput=multisig_input,
transfers=transfers,
)
|
def prepare_multisig_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
multisig_input, # type: MultisigAddress
change_address=None, # type: Optional[Address]
):
# type: (...) -> dict
"""
Prepares a bundle that authorizes the spending of IOTAs from a
multisig address.
.. note::
This method is used exclusively to spend IOTAs from a
multisig address.
If you want to spend IOTAs from non-multisig addresses, or
if you want to create 0-value transfers (i.e., that don't
require inputs), use
:py:meth:`iota.api.Iota.prepare_transfer` instead.
:param transfers:
Transaction objects to prepare.
.. important::
Must include at least one transaction that spends IOTAs
(i.e., has a nonzero ``value``). If you want to prepare
a bundle that does not spend any IOTAs, use
:py:meth:`iota.api.prepare_transfer` instead.
:param multisig_input:
The multisig address to use as the input for the transfers.
.. note::
This method only supports creating a bundle with a
single multisig input.
If you would like to spend from multiple multisig
addresses in the same bundle, create the
:py:class:`iota.multisig.transaction.ProposedMultisigBundle`
object manually.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If the bundle has no unspent inputs, ``change_address` is
ignored.
.. important::
Unlike :py:meth:`iota.api.Iota.prepare_transfer`, this
method will NOT generate a change address automatically.
If there are unspent inputs and ``change_address`` is
empty, an exception will be raised.
This is because multisig transactions typically involve
multiple individuals, and it would be unfair to the
participants if we generated a change address
automatically using the seed of whoever happened to run
the ``prepare_multisig_transfer`` method!
.. danger::
Note that this protective measure is not a
substitute for due diligence!
Always verify the details of every transaction in a
bundle (including the change transaction) before
signing the input(s)!
:return:
Dict containing the following values::
{
'trytes': List[TransactionTrytes],
Finalized bundle, as trytes.
The input transactions are not signed.
}
In order to authorize the spending of IOTAs from the multisig
input, you must generate the correct private keys and invoke
the :py:meth:`iota.crypto.types.PrivateKey.sign_input_at`
method for each key, in the correct order.
Once the correct signatures are applied, you can then perform
proof of work (``attachToTangle``) and broadcast the bundle
using :py:meth:`iota.api.Iota.send_trytes`.
"""
return commands.PrepareMultisigTransferCommand(self.adapter)(
changeAddress=change_address,
multisigInput=multisig_input,
transfers=transfers,
)
|
[
"Prepares",
"a",
"bundle",
"that",
"authorizes",
"the",
"spending",
"of",
"IOTAs",
"from",
"a",
"multisig",
"address",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/api.py#L153-L243
|
[
"def",
"prepare_multisig_transfer",
"(",
"self",
",",
"transfers",
",",
"# type: Iterable[ProposedTransaction]",
"multisig_input",
",",
"# type: MultisigAddress",
"change_address",
"=",
"None",
",",
"# type: Optional[Address]",
")",
":",
"# type: (...) -> dict",
"return",
"commands",
".",
"PrepareMultisigTransferCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"changeAddress",
"=",
"change_address",
",",
"multisigInput",
"=",
"multisig_input",
",",
"transfers",
"=",
"transfers",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
add_trits
|
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
|
iota/trits.py
|
def add_trits(left, right):
# type: (Sequence[int], Sequence[int]) -> List[int]
"""
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
"""
target_len = max(len(left), len(right))
res = [0] * target_len
left += [0] * (target_len - len(left))
right += [0] * (target_len - len(right))
carry = 0
for i in range(len(res)):
res[i], carry = _full_add_trits(left[i], right[i], carry)
return res
|
def add_trits(left, right):
# type: (Sequence[int], Sequence[int]) -> List[int]
"""
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
"""
target_len = max(len(left), len(right))
res = [0] * target_len
left += [0] * (target_len - len(left))
right += [0] * (target_len - len(right))
carry = 0
for i in range(len(res)):
res[i], carry = _full_add_trits(left[i], right[i], carry)
return res
|
[
"Adds",
"two",
"sequences",
"of",
"trits",
"together",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/trits.py#L21-L44
|
[
"def",
"add_trits",
"(",
"left",
",",
"right",
")",
":",
"# type: (Sequence[int], Sequence[int]) -> List[int]",
"target_len",
"=",
"max",
"(",
"len",
"(",
"left",
")",
",",
"len",
"(",
"right",
")",
")",
"res",
"=",
"[",
"0",
"]",
"*",
"target_len",
"left",
"+=",
"[",
"0",
"]",
"*",
"(",
"target_len",
"-",
"len",
"(",
"left",
")",
")",
"right",
"+=",
"[",
"0",
"]",
"*",
"(",
"target_len",
"-",
"len",
"(",
"right",
")",
")",
"carry",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"res",
")",
")",
":",
"res",
"[",
"i",
"]",
",",
"carry",
"=",
"_full_add_trits",
"(",
"left",
"[",
"i",
"]",
",",
"right",
"[",
"i",
"]",
",",
"carry",
")",
"return",
"res"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
trits_from_int
|
Returns a trit representation of an integer value.
:param n:
Integer value to convert.
:param pad:
Ensure the result has at least this many trits.
References:
- https://dev.to/buntine/the-balanced-ternary-machines-of-soviet-russia
- https://en.wikipedia.org/wiki/Balanced_ternary
- https://rosettacode.org/wiki/Balanced_ternary#Python
|
iota/trits.py
|
def trits_from_int(n, pad=1):
# type: (int, Optional[int]) -> List[int]
"""
Returns a trit representation of an integer value.
:param n:
Integer value to convert.
:param pad:
Ensure the result has at least this many trits.
References:
- https://dev.to/buntine/the-balanced-ternary-machines-of-soviet-russia
- https://en.wikipedia.org/wiki/Balanced_ternary
- https://rosettacode.org/wiki/Balanced_ternary#Python
"""
if n == 0:
trits = []
else:
quotient, remainder = divmod(n, 3)
if remainder == 2:
# Lend 1 to the next place so we can make this trit
# negative.
quotient += 1
remainder = -1
trits = [remainder] + trits_from_int(quotient, pad=0)
if pad:
trits += [0] * max(0, pad - len(trits))
return trits
|
def trits_from_int(n, pad=1):
# type: (int, Optional[int]) -> List[int]
"""
Returns a trit representation of an integer value.
:param n:
Integer value to convert.
:param pad:
Ensure the result has at least this many trits.
References:
- https://dev.to/buntine/the-balanced-ternary-machines-of-soviet-russia
- https://en.wikipedia.org/wiki/Balanced_ternary
- https://rosettacode.org/wiki/Balanced_ternary#Python
"""
if n == 0:
trits = []
else:
quotient, remainder = divmod(n, 3)
if remainder == 2:
# Lend 1 to the next place so we can make this trit
# negative.
quotient += 1
remainder = -1
trits = [remainder] + trits_from_int(quotient, pad=0)
if pad:
trits += [0] * max(0, pad - len(trits))
return trits
|
[
"Returns",
"a",
"trit",
"representation",
"of",
"an",
"integer",
"value",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/trits.py#L57-L90
|
[
"def",
"trits_from_int",
"(",
"n",
",",
"pad",
"=",
"1",
")",
":",
"# type: (int, Optional[int]) -> List[int]",
"if",
"n",
"==",
"0",
":",
"trits",
"=",
"[",
"]",
"else",
":",
"quotient",
",",
"remainder",
"=",
"divmod",
"(",
"n",
",",
"3",
")",
"if",
"remainder",
"==",
"2",
":",
"# Lend 1 to the next place so we can make this trit",
"# negative.",
"quotient",
"+=",
"1",
"remainder",
"=",
"-",
"1",
"trits",
"=",
"[",
"remainder",
"]",
"+",
"trits_from_int",
"(",
"quotient",
",",
"pad",
"=",
"0",
")",
"if",
"pad",
":",
"trits",
"+=",
"[",
"0",
"]",
"*",
"max",
"(",
"0",
",",
"pad",
"-",
"len",
"(",
"trits",
")",
")",
"return",
"trits"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
_add_trits
|
Adds two individual trits together.
The result is always a single trit.
|
iota/trits.py
|
def _add_trits(left, right):
# type: (int, int) -> int
"""
Adds two individual trits together.
The result is always a single trit.
"""
res = left + right
return res if -2 < res < 2 else (res < 0) - (res > 0)
|
def _add_trits(left, right):
# type: (int, int) -> int
"""
Adds two individual trits together.
The result is always a single trit.
"""
res = left + right
return res if -2 < res < 2 else (res < 0) - (res > 0)
|
[
"Adds",
"two",
"individual",
"trits",
"together",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/trits.py#L102-L110
|
[
"def",
"_add_trits",
"(",
"left",
",",
"right",
")",
":",
"# type: (int, int) -> int",
"res",
"=",
"left",
"+",
"right",
"return",
"res",
"if",
"-",
"2",
"<",
"res",
"<",
"2",
"else",
"(",
"res",
"<",
"0",
")",
"-",
"(",
"res",
">",
"0",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
_full_add_trits
|
Adds two trits together, with support for a carry trit.
|
iota/trits.py
|
def _full_add_trits(left, right, carry):
# type: (int, int, int) -> Tuple[int, int]
"""
Adds two trits together, with support for a carry trit.
"""
sum_both = _add_trits(left, right)
cons_left = _cons_trits(left, right)
cons_right = _cons_trits(sum_both, carry)
return _add_trits(sum_both, carry), _any_trits(cons_left, cons_right)
|
def _full_add_trits(left, right, carry):
# type: (int, int, int) -> Tuple[int, int]
"""
Adds two trits together, with support for a carry trit.
"""
sum_both = _add_trits(left, right)
cons_left = _cons_trits(left, right)
cons_right = _cons_trits(sum_both, carry)
return _add_trits(sum_both, carry), _any_trits(cons_left, cons_right)
|
[
"Adds",
"two",
"trits",
"together",
"with",
"support",
"for",
"a",
"carry",
"trit",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/trits.py#L123-L132
|
[
"def",
"_full_add_trits",
"(",
"left",
",",
"right",
",",
"carry",
")",
":",
"# type: (int, int, int) -> Tuple[int, int]",
"sum_both",
"=",
"_add_trits",
"(",
"left",
",",
"right",
")",
"cons_left",
"=",
"_cons_trits",
"(",
"left",
",",
"right",
")",
"cons_right",
"=",
"_cons_trits",
"(",
"sum_both",
",",
"carry",
")",
"return",
"_add_trits",
"(",
"sum_both",
",",
"carry",
")",
",",
"_any_trits",
"(",
"cons_left",
",",
"cons_right",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
output_seed
|
Outputs the user's seed to stdout, along with lots of warnings
about security.
|
examples/address_generator.py
|
def output_seed(seed):
# type: (Seed) -> None
"""
Outputs the user's seed to stdout, along with lots of warnings
about security.
"""
print(
'WARNING: Anyone who has your seed can spend your IOTAs! '
'Clear the screen after recording your seed!'
)
compat.input('')
print('Your seed is:')
print('')
print(binary_type(seed).decode('ascii'))
print('')
print(
'Clear the screen to prevent shoulder surfing, '
'and press return to continue.'
)
print('https://en.wikipedia.org/wiki/Shoulder_surfing_(computer_security)')
compat.input('')
|
def output_seed(seed):
# type: (Seed) -> None
"""
Outputs the user's seed to stdout, along with lots of warnings
about security.
"""
print(
'WARNING: Anyone who has your seed can spend your IOTAs! '
'Clear the screen after recording your seed!'
)
compat.input('')
print('Your seed is:')
print('')
print(binary_type(seed).decode('ascii'))
print('')
print(
'Clear the screen to prevent shoulder surfing, '
'and press return to continue.'
)
print('https://en.wikipedia.org/wiki/Shoulder_surfing_(computer_security)')
compat.input('')
|
[
"Outputs",
"the",
"user",
"s",
"seed",
"to",
"stdout",
"along",
"with",
"lots",
"of",
"warnings",
"about",
"security",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/examples/address_generator.py#L59-L80
|
[
"def",
"output_seed",
"(",
"seed",
")",
":",
"# type: (Seed) -> None",
"print",
"(",
"'WARNING: Anyone who has your seed can spend your IOTAs! '",
"'Clear the screen after recording your seed!'",
")",
"compat",
".",
"input",
"(",
"''",
")",
"print",
"(",
"'Your seed is:'",
")",
"print",
"(",
"''",
")",
"print",
"(",
"binary_type",
"(",
"seed",
")",
".",
"decode",
"(",
"'ascii'",
")",
")",
"print",
"(",
"''",
")",
"print",
"(",
"'Clear the screen to prevent shoulder surfing, '",
"'and press return to continue.'",
")",
"print",
"(",
"'https://en.wikipedia.org/wiki/Shoulder_surfing_(computer_security)'",
")",
"compat",
".",
"input",
"(",
"''",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
StrictIota.attach_to_tangle
|
Attaches the specified transactions (trytes) to the Tangle by
doing Proof of Work. You need to supply branchTransaction as
well as trunkTransaction (basically the tips which you're going
to validate and reference with this transaction) - both of which
you'll get through the getTransactionsToApprove API call.
The returned value is a different set of tryte values which you
can input into :py:meth:`broadcast_transactions` and
:py:meth:`store_transactions`.
References:
- https://iota.readme.io/docs/attachtotangle
|
iota/api.py
|
def attach_to_tangle(
self,
trunk_transaction, # type: TransactionHash
branch_transaction, # type: TransactionHash
trytes, # type: Iterable[TryteString]
min_weight_magnitude=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Attaches the specified transactions (trytes) to the Tangle by
doing Proof of Work. You need to supply branchTransaction as
well as trunkTransaction (basically the tips which you're going
to validate and reference with this transaction) - both of which
you'll get through the getTransactionsToApprove API call.
The returned value is a different set of tryte values which you
can input into :py:meth:`broadcast_transactions` and
:py:meth:`store_transactions`.
References:
- https://iota.readme.io/docs/attachtotangle
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return core.AttachToTangleCommand(self.adapter)(
trunkTransaction=trunk_transaction,
branchTransaction=branch_transaction,
minWeightMagnitude=min_weight_magnitude,
trytes=trytes,
)
|
def attach_to_tangle(
self,
trunk_transaction, # type: TransactionHash
branch_transaction, # type: TransactionHash
trytes, # type: Iterable[TryteString]
min_weight_magnitude=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Attaches the specified transactions (trytes) to the Tangle by
doing Proof of Work. You need to supply branchTransaction as
well as trunkTransaction (basically the tips which you're going
to validate and reference with this transaction) - both of which
you'll get through the getTransactionsToApprove API call.
The returned value is a different set of tryte values which you
can input into :py:meth:`broadcast_transactions` and
:py:meth:`store_transactions`.
References:
- https://iota.readme.io/docs/attachtotangle
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return core.AttachToTangleCommand(self.adapter)(
trunkTransaction=trunk_transaction,
branchTransaction=branch_transaction,
minWeightMagnitude=min_weight_magnitude,
trytes=trytes,
)
|
[
"Attaches",
"the",
"specified",
"transactions",
"(",
"trytes",
")",
"to",
"the",
"Tangle",
"by",
"doing",
"Proof",
"of",
"Work",
".",
"You",
"need",
"to",
"supply",
"branchTransaction",
"as",
"well",
"as",
"trunkTransaction",
"(",
"basically",
"the",
"tips",
"which",
"you",
"re",
"going",
"to",
"validate",
"and",
"reference",
"with",
"this",
"transaction",
")",
"-",
"both",
"of",
"which",
"you",
"ll",
"get",
"through",
"the",
"getTransactionsToApprove",
"API",
"call",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L171-L202
|
[
"def",
"attach_to_tangle",
"(",
"self",
",",
"trunk_transaction",
",",
"# type: TransactionHash",
"branch_transaction",
",",
"# type: TransactionHash",
"trytes",
",",
"# type: Iterable[TryteString]",
"min_weight_magnitude",
"=",
"None",
",",
"# type: Optional[int]",
")",
":",
"# type: (...) -> dict",
"if",
"min_weight_magnitude",
"is",
"None",
":",
"min_weight_magnitude",
"=",
"self",
".",
"default_min_weight_magnitude",
"return",
"core",
".",
"AttachToTangleCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"trunkTransaction",
"=",
"trunk_transaction",
",",
"branchTransaction",
"=",
"branch_transaction",
",",
"minWeightMagnitude",
"=",
"min_weight_magnitude",
",",
"trytes",
"=",
"trytes",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
StrictIota.find_transactions
|
Find the transactions which match the specified input and
return.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements.
Using multiple of these input fields returns the intersection of
the values.
:param bundles:
List of bundle IDs.
:param addresses:
List of addresses.
:param tags:
List of tags.
:param approvees:
List of approvee transaction IDs.
References:
- https://iota.readme.io/docs/findtransactions
|
iota/api.py
|
def find_transactions(
self,
bundles=None, # type: Optional[Iterable[BundleHash]]
addresses=None, # type: Optional[Iterable[Address]]
tags=None, # type: Optional[Iterable[Tag]]
approvees=None, # type: Optional[Iterable[TransactionHash]]
):
# type: (...) -> dict
"""
Find the transactions which match the specified input and
return.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements.
Using multiple of these input fields returns the intersection of
the values.
:param bundles:
List of bundle IDs.
:param addresses:
List of addresses.
:param tags:
List of tags.
:param approvees:
List of approvee transaction IDs.
References:
- https://iota.readme.io/docs/findtransactions
"""
return core.FindTransactionsCommand(self.adapter)(
bundles=bundles,
addresses=addresses,
tags=tags,
approvees=approvees,
)
|
def find_transactions(
self,
bundles=None, # type: Optional[Iterable[BundleHash]]
addresses=None, # type: Optional[Iterable[Address]]
tags=None, # type: Optional[Iterable[Tag]]
approvees=None, # type: Optional[Iterable[TransactionHash]]
):
# type: (...) -> dict
"""
Find the transactions which match the specified input and
return.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements.
Using multiple of these input fields returns the intersection of
the values.
:param bundles:
List of bundle IDs.
:param addresses:
List of addresses.
:param tags:
List of tags.
:param approvees:
List of approvee transaction IDs.
References:
- https://iota.readme.io/docs/findtransactions
"""
return core.FindTransactionsCommand(self.adapter)(
bundles=bundles,
addresses=addresses,
tags=tags,
approvees=approvees,
)
|
[
"Find",
"the",
"transactions",
"which",
"match",
"the",
"specified",
"input",
"and",
"return",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L249-L289
|
[
"def",
"find_transactions",
"(",
"self",
",",
"bundles",
"=",
"None",
",",
"# type: Optional[Iterable[BundleHash]]",
"addresses",
"=",
"None",
",",
"# type: Optional[Iterable[Address]]",
"tags",
"=",
"None",
",",
"# type: Optional[Iterable[Tag]]",
"approvees",
"=",
"None",
",",
"# type: Optional[Iterable[TransactionHash]]",
")",
":",
"# type: (...) -> dict",
"return",
"core",
".",
"FindTransactionsCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"bundles",
"=",
"bundles",
",",
"addresses",
"=",
"addresses",
",",
"tags",
"=",
"tags",
",",
"approvees",
"=",
"approvees",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
StrictIota.get_balances
|
Similar to :py:meth:`get_inclusion_states`. Returns the
confirmed balance which a list of addresses have at the latest
confirmed milestone.
In addition to the balances, it also returns the milestone as
well as the index with which the confirmed balance was
determined. The balances are returned as a list in the same
order as the addresses were provided as input.
:param addresses:
List of addresses to get the confirmed balance for.
:param threshold:
Confirmation threshold.
References:
- https://iota.readme.io/docs/getbalances
|
iota/api.py
|
def get_balances(self, addresses, threshold=100):
# type: (Iterable[Address], int) -> dict
"""
Similar to :py:meth:`get_inclusion_states`. Returns the
confirmed balance which a list of addresses have at the latest
confirmed milestone.
In addition to the balances, it also returns the milestone as
well as the index with which the confirmed balance was
determined. The balances are returned as a list in the same
order as the addresses were provided as input.
:param addresses:
List of addresses to get the confirmed balance for.
:param threshold:
Confirmation threshold.
References:
- https://iota.readme.io/docs/getbalances
"""
return core.GetBalancesCommand(self.adapter)(
addresses=addresses,
threshold=threshold,
)
|
def get_balances(self, addresses, threshold=100):
# type: (Iterable[Address], int) -> dict
"""
Similar to :py:meth:`get_inclusion_states`. Returns the
confirmed balance which a list of addresses have at the latest
confirmed milestone.
In addition to the balances, it also returns the milestone as
well as the index with which the confirmed balance was
determined. The balances are returned as a list in the same
order as the addresses were provided as input.
:param addresses:
List of addresses to get the confirmed balance for.
:param threshold:
Confirmation threshold.
References:
- https://iota.readme.io/docs/getbalances
"""
return core.GetBalancesCommand(self.adapter)(
addresses=addresses,
threshold=threshold,
)
|
[
"Similar",
"to",
":",
"py",
":",
"meth",
":",
"get_inclusion_states",
".",
"Returns",
"the",
"confirmed",
"balance",
"which",
"a",
"list",
"of",
"addresses",
"have",
"at",
"the",
"latest",
"confirmed",
"milestone",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L291-L316
|
[
"def",
"get_balances",
"(",
"self",
",",
"addresses",
",",
"threshold",
"=",
"100",
")",
":",
"# type: (Iterable[Address], int) -> dict",
"return",
"core",
".",
"GetBalancesCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"addresses",
"=",
"addresses",
",",
"threshold",
"=",
"threshold",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
StrictIota.get_inclusion_states
|
Get the inclusion states of a set of transactions. This is for
determining if a transaction was accepted and confirmed by the
network or not. You can search for multiple tips (and thus,
milestones) to get past inclusion states of transactions.
:param transactions:
List of transactions you want to get the inclusion state
for.
:param tips:
List of tips (including milestones) you want to search for
the inclusion state.
References:
- https://iota.readme.io/docs/getinclusionstates
|
iota/api.py
|
def get_inclusion_states(self, transactions, tips):
# type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict
"""
Get the inclusion states of a set of transactions. This is for
determining if a transaction was accepted and confirmed by the
network or not. You can search for multiple tips (and thus,
milestones) to get past inclusion states of transactions.
:param transactions:
List of transactions you want to get the inclusion state
for.
:param tips:
List of tips (including milestones) you want to search for
the inclusion state.
References:
- https://iota.readme.io/docs/getinclusionstates
"""
return core.GetInclusionStatesCommand(self.adapter)(
transactions=transactions,
tips=tips,
)
|
def get_inclusion_states(self, transactions, tips):
# type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict
"""
Get the inclusion states of a set of transactions. This is for
determining if a transaction was accepted and confirmed by the
network or not. You can search for multiple tips (and thus,
milestones) to get past inclusion states of transactions.
:param transactions:
List of transactions you want to get the inclusion state
for.
:param tips:
List of tips (including milestones) you want to search for
the inclusion state.
References:
- https://iota.readme.io/docs/getinclusionstates
"""
return core.GetInclusionStatesCommand(self.adapter)(
transactions=transactions,
tips=tips,
)
|
[
"Get",
"the",
"inclusion",
"states",
"of",
"a",
"set",
"of",
"transactions",
".",
"This",
"is",
"for",
"determining",
"if",
"a",
"transaction",
"was",
"accepted",
"and",
"confirmed",
"by",
"the",
"network",
"or",
"not",
".",
"You",
"can",
"search",
"for",
"multiple",
"tips",
"(",
"and",
"thus",
"milestones",
")",
"to",
"get",
"past",
"inclusion",
"states",
"of",
"transactions",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L318-L341
|
[
"def",
"get_inclusion_states",
"(",
"self",
",",
"transactions",
",",
"tips",
")",
":",
"# type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict",
"return",
"core",
".",
"GetInclusionStatesCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"transactions",
"=",
"transactions",
",",
"tips",
"=",
"tips",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.get_account_data
|
More comprehensive version of :py:meth:`get_transfers` that
returns addresses and account balance in addition to bundles.
This function is useful in getting all the relevant information
of your account.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'addresses': List[Address],
List of generated addresses.
Note that this list may include unused
addresses.
'balance': int,
Total account balance. Might be 0.
'bundles': List[Bundle],
List of bundles with transactions to/from this
account.
}
|
iota/api.py
|
def get_account_data(self, start=0, stop=None, inclusion_states=False, security_level=None):
# type: (int, Optional[int], bool, Optional[int]) -> dict
"""
More comprehensive version of :py:meth:`get_transfers` that
returns addresses and account balance in addition to bundles.
This function is useful in getting all the relevant information
of your account.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'addresses': List[Address],
List of generated addresses.
Note that this list may include unused
addresses.
'balance': int,
Total account balance. Might be 0.
'bundles': List[Bundle],
List of bundles with transactions to/from this
account.
}
"""
return extended.GetAccountDataCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
security_level=security_level
)
|
def get_account_data(self, start=0, stop=None, inclusion_states=False, security_level=None):
# type: (int, Optional[int], bool, Optional[int]) -> dict
"""
More comprehensive version of :py:meth:`get_transfers` that
returns addresses and account balance in addition to bundles.
This function is useful in getting all the relevant information
of your account.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'addresses': List[Address],
List of generated addresses.
Note that this list may include unused
addresses.
'balance': int,
Total account balance. Might be 0.
'bundles': List[Bundle],
List of bundles with transactions to/from this
account.
}
"""
return extended.GetAccountDataCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
security_level=security_level
)
|
[
"More",
"comprehensive",
"version",
"of",
":",
"py",
":",
"meth",
":",
"get_transfers",
"that",
"returns",
"addresses",
"and",
"account",
"balance",
"in",
"addition",
"to",
"bundles",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L520-L581
|
[
"def",
"get_account_data",
"(",
"self",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"inclusion_states",
"=",
"False",
",",
"security_level",
"=",
"None",
")",
":",
"# type: (int, Optional[int], bool, Optional[int]) -> dict",
"return",
"extended",
".",
"GetAccountDataCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"inclusionStates",
"=",
"inclusion_states",
",",
"security_level",
"=",
"security_level",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.get_inputs
|
Gets all possible inputs of a seed and returns them, along with
the total balance.
This is either done deterministically (by generating all
addresses until :py:meth:`find_transactions` returns an empty
result), or by providing a key range to search.
:param start:
Starting key index.
Defaults to 0.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will not stop until
it finds an unused address.
:param threshold:
If set, determines the minimum threshold for a successful
result:
- As soon as this threshold is reached, iteration will stop.
- If the command runs out of addresses before the threshold
is reached, an exception is raised.
.. note::
This method does not attempt to "optimize" the result
(e.g., smallest number of inputs, get as close to
``threshold`` as possible, etc.); it simply accumulates
inputs in order until the threshold is met.
If ``threshold`` is 0, the first address in the key range
with a non-zero balance will be returned (if it exists).
If ``threshold`` is ``None`` (default), this method will
return **all** inputs in the specified key range.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'inputs': List[Address],
Addresses with nonzero balances that can be used
as inputs.
'totalBalance': int,
Aggregate balance from all matching addresses.
}
Note that each Address in the result has its ``balance``
attribute set.
Example:
.. code-block:: python
response = iota.get_inputs(...)
input0 = response['inputs'][0] # type: Address
input0.balance # 42
:raise:
- :py:class:`iota.adapter.BadApiResponse` if ``threshold``
is not met. Not applicable if ``threshold`` is ``None``.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs
|
iota/api.py
|
def get_inputs(
self,
start=0,
stop=None,
threshold=None,
security_level=None,
):
# type: (int, Optional[int], Optional[int], Optional[int]) -> dict
"""
Gets all possible inputs of a seed and returns them, along with
the total balance.
This is either done deterministically (by generating all
addresses until :py:meth:`find_transactions` returns an empty
result), or by providing a key range to search.
:param start:
Starting key index.
Defaults to 0.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will not stop until
it finds an unused address.
:param threshold:
If set, determines the minimum threshold for a successful
result:
- As soon as this threshold is reached, iteration will stop.
- If the command runs out of addresses before the threshold
is reached, an exception is raised.
.. note::
This method does not attempt to "optimize" the result
(e.g., smallest number of inputs, get as close to
``threshold`` as possible, etc.); it simply accumulates
inputs in order until the threshold is met.
If ``threshold`` is 0, the first address in the key range
with a non-zero balance will be returned (if it exists).
If ``threshold`` is ``None`` (default), this method will
return **all** inputs in the specified key range.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'inputs': List[Address],
Addresses with nonzero balances that can be used
as inputs.
'totalBalance': int,
Aggregate balance from all matching addresses.
}
Note that each Address in the result has its ``balance``
attribute set.
Example:
.. code-block:: python
response = iota.get_inputs(...)
input0 = response['inputs'][0] # type: Address
input0.balance # 42
:raise:
- :py:class:`iota.adapter.BadApiResponse` if ``threshold``
is not met. Not applicable if ``threshold`` is ``None``.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs
"""
return extended.GetInputsCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
threshold=threshold,
securityLevel=security_level
)
|
def get_inputs(
self,
start=0,
stop=None,
threshold=None,
security_level=None,
):
# type: (int, Optional[int], Optional[int], Optional[int]) -> dict
"""
Gets all possible inputs of a seed and returns them, along with
the total balance.
This is either done deterministically (by generating all
addresses until :py:meth:`find_transactions` returns an empty
result), or by providing a key range to search.
:param start:
Starting key index.
Defaults to 0.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will not stop until
it finds an unused address.
:param threshold:
If set, determines the minimum threshold for a successful
result:
- As soon as this threshold is reached, iteration will stop.
- If the command runs out of addresses before the threshold
is reached, an exception is raised.
.. note::
This method does not attempt to "optimize" the result
(e.g., smallest number of inputs, get as close to
``threshold`` as possible, etc.); it simply accumulates
inputs in order until the threshold is met.
If ``threshold`` is 0, the first address in the key range
with a non-zero balance will be returned (if it exists).
If ``threshold`` is ``None`` (default), this method will
return **all** inputs in the specified key range.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'inputs': List[Address],
Addresses with nonzero balances that can be used
as inputs.
'totalBalance': int,
Aggregate balance from all matching addresses.
}
Note that each Address in the result has its ``balance``
attribute set.
Example:
.. code-block:: python
response = iota.get_inputs(...)
input0 = response['inputs'][0] # type: Address
input0.balance # 42
:raise:
- :py:class:`iota.adapter.BadApiResponse` if ``threshold``
is not met. Not applicable if ``threshold`` is ``None``.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs
"""
return extended.GetInputsCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
threshold=threshold,
securityLevel=security_level
)
|
[
"Gets",
"all",
"possible",
"inputs",
"of",
"a",
"seed",
"and",
"returns",
"them",
"along",
"with",
"the",
"total",
"balance",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L611-L708
|
[
"def",
"get_inputs",
"(",
"self",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"threshold",
"=",
"None",
",",
"security_level",
"=",
"None",
",",
")",
":",
"# type: (int, Optional[int], Optional[int], Optional[int]) -> dict",
"return",
"extended",
".",
"GetInputsCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"threshold",
"=",
"threshold",
",",
"securityLevel",
"=",
"security_level",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.get_new_addresses
|
Generates one or more new addresses from the seed.
:param index:
The key index of the first new address to generate (must be
>= 1).
:param count:
Number of addresses to generate (must be >= 1).
.. tip::
This is more efficient than calling ``get_new_address``
inside a loop.
If ``None``, this method will progressively generate
addresses and scan the Tangle until it finds one that has no
transactions referencing it.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:param checksum:
Specify whether to return the address with the checksum.
Defaults to ``False``.
:return:
Dict with the following structure::
{
'addresses': List[Address],
Always a list, even if only one address was
generated.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress
|
iota/api.py
|
def get_new_addresses(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
checksum=False,
):
# type: (int, Optional[int], int, bool) -> dict
"""
Generates one or more new addresses from the seed.
:param index:
The key index of the first new address to generate (must be
>= 1).
:param count:
Number of addresses to generate (must be >= 1).
.. tip::
This is more efficient than calling ``get_new_address``
inside a loop.
If ``None``, this method will progressively generate
addresses and scan the Tangle until it finds one that has no
transactions referencing it.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:param checksum:
Specify whether to return the address with the checksum.
Defaults to ``False``.
:return:
Dict with the following structure::
{
'addresses': List[Address],
Always a list, even if only one address was
generated.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress
"""
return extended.GetNewAddressesCommand(self.adapter)(
count=count,
index=index,
securityLevel=security_level,
checksum=checksum,
seed=self.seed,
)
|
def get_new_addresses(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
checksum=False,
):
# type: (int, Optional[int], int, bool) -> dict
"""
Generates one or more new addresses from the seed.
:param index:
The key index of the first new address to generate (must be
>= 1).
:param count:
Number of addresses to generate (must be >= 1).
.. tip::
This is more efficient than calling ``get_new_address``
inside a loop.
If ``None``, this method will progressively generate
addresses and scan the Tangle until it finds one that has no
transactions referencing it.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:param checksum:
Specify whether to return the address with the checksum.
Defaults to ``False``.
:return:
Dict with the following structure::
{
'addresses': List[Address],
Always a list, even if only one address was
generated.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress
"""
return extended.GetNewAddressesCommand(self.adapter)(
count=count,
index=index,
securityLevel=security_level,
checksum=checksum,
seed=self.seed,
)
|
[
"Generates",
"one",
"or",
"more",
"new",
"addresses",
"from",
"the",
"seed",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L732-L789
|
[
"def",
"get_new_addresses",
"(",
"self",
",",
"index",
"=",
"0",
",",
"count",
"=",
"1",
",",
"security_level",
"=",
"AddressGenerator",
".",
"DEFAULT_SECURITY_LEVEL",
",",
"checksum",
"=",
"False",
",",
")",
":",
"# type: (int, Optional[int], int, bool) -> dict",
"return",
"extended",
".",
"GetNewAddressesCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"count",
"=",
"count",
",",
"index",
"=",
"index",
",",
"securityLevel",
"=",
"security_level",
",",
"checksum",
"=",
"checksum",
",",
"seed",
"=",
"self",
".",
"seed",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.get_transfers
|
Returns all transfers associated with the seed.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:return:
Dict with the following structure::
{
'bundles': List[Bundle],
Matching bundles, sorted by tail transaction
timestamp.
This value is always a list, even if only one
bundle was found.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers
|
iota/api.py
|
def get_transfers(self, start=0, stop=None, inclusion_states=False):
# type: (int, Optional[int], bool) -> dict
"""
Returns all transfers associated with the seed.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:return:
Dict with the following structure::
{
'bundles': List[Bundle],
Matching bundles, sorted by tail transaction
timestamp.
This value is always a list, even if only one
bundle was found.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers
"""
return extended.GetTransfersCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
)
|
def get_transfers(self, start=0, stop=None, inclusion_states=False):
# type: (int, Optional[int], bool) -> dict
"""
Returns all transfers associated with the seed.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:return:
Dict with the following structure::
{
'bundles': List[Bundle],
Matching bundles, sorted by tail transaction
timestamp.
This value is always a list, even if only one
bundle was found.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers
"""
return extended.GetTransfersCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
)
|
[
"Returns",
"all",
"transfers",
"associated",
"with",
"the",
"seed",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L791-L836
|
[
"def",
"get_transfers",
"(",
"self",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"inclusion_states",
"=",
"False",
")",
":",
"# type: (int, Optional[int], bool) -> dict",
"return",
"extended",
".",
"GetTransfersCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"start",
"=",
"start",
",",
"stop",
"=",
"stop",
",",
"inclusionStates",
"=",
"inclusion_states",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.prepare_transfer
|
Prepares transactions to be broadcast to the Tangle, by
generating the correct bundle, as well as choosing and signing
the inputs (for value transfers).
:param transfers:
Transaction objects to prepare.
:param inputs:
List of addresses used to fund the transfer.
Ignored for zero-value transfers.
If not provided, addresses will be selected automatically by
scanning the Tangle for unspent inputs. Depending on how
many transfers you've already sent with your seed, this
process could take awhile.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes for the transactions in the bundle,
ready to be provided to :py:meth:`send_trytes`.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#preparetransfers
|
iota/api.py
|
def prepare_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
inputs=None, # type: Optional[Iterable[Address]]
change_address=None, # type: Optional[Address]
security_level=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Prepares transactions to be broadcast to the Tangle, by
generating the correct bundle, as well as choosing and signing
the inputs (for value transfers).
:param transfers:
Transaction objects to prepare.
:param inputs:
List of addresses used to fund the transfer.
Ignored for zero-value transfers.
If not provided, addresses will be selected automatically by
scanning the Tangle for unspent inputs. Depending on how
many transfers you've already sent with your seed, this
process could take awhile.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes for the transactions in the bundle,
ready to be provided to :py:meth:`send_trytes`.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#preparetransfers
"""
return extended.PrepareTransferCommand(self.adapter)(
seed=self.seed,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
securityLevel=security_level,
)
|
def prepare_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
inputs=None, # type: Optional[Iterable[Address]]
change_address=None, # type: Optional[Address]
security_level=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Prepares transactions to be broadcast to the Tangle, by
generating the correct bundle, as well as choosing and signing
the inputs (for value transfers).
:param transfers:
Transaction objects to prepare.
:param inputs:
List of addresses used to fund the transfer.
Ignored for zero-value transfers.
If not provided, addresses will be selected automatically by
scanning the Tangle for unspent inputs. Depending on how
many transfers you've already sent with your seed, this
process could take awhile.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes for the transactions in the bundle,
ready to be provided to :py:meth:`send_trytes`.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#preparetransfers
"""
return extended.PrepareTransferCommand(self.adapter)(
seed=self.seed,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
securityLevel=security_level,
)
|
[
"Prepares",
"transactions",
"to",
"be",
"broadcast",
"to",
"the",
"Tangle",
"by",
"generating",
"the",
"correct",
"bundle",
"as",
"well",
"as",
"choosing",
"and",
"signing",
"the",
"inputs",
"(",
"for",
"value",
"transfers",
")",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L838-L898
|
[
"def",
"prepare_transfer",
"(",
"self",
",",
"transfers",
",",
"# type: Iterable[ProposedTransaction]",
"inputs",
"=",
"None",
",",
"# type: Optional[Iterable[Address]]",
"change_address",
"=",
"None",
",",
"# type: Optional[Address]",
"security_level",
"=",
"None",
",",
"# type: Optional[int]",
")",
":",
"# type: (...) -> dict",
"return",
"extended",
".",
"PrepareTransferCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"transfers",
"=",
"transfers",
",",
"inputs",
"=",
"inputs",
",",
"changeAddress",
"=",
"change_address",
",",
"securityLevel",
"=",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.promote_transaction
|
Promotes a transaction by adding spam on top of it.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
|
iota/api.py
|
def promote_transaction(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Promotes a transaction by adding spam on top of it.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.PromoteTransactionCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
def promote_transaction(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Promotes a transaction by adding spam on top of it.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.PromoteTransactionCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
[
"Promotes",
"a",
"transaction",
"by",
"adding",
"spam",
"on",
"top",
"of",
"it",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L900-L925
|
[
"def",
"promote_transaction",
"(",
"self",
",",
"transaction",
",",
"depth",
"=",
"3",
",",
"min_weight_magnitude",
"=",
"None",
",",
")",
":",
"# type: (TransactionHash, int, Optional[int]) -> dict",
"if",
"min_weight_magnitude",
"is",
"None",
":",
"min_weight_magnitude",
"=",
"self",
".",
"default_min_weight_magnitude",
"return",
"extended",
".",
"PromoteTransactionCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"transaction",
"=",
"transaction",
",",
"depth",
"=",
"depth",
",",
"minWeightMagnitude",
"=",
"min_weight_magnitude",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.replay_bundle
|
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
|
iota/api.py
|
def replay_bundle(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.ReplayBundleCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
def replay_bundle(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.ReplayBundleCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
[
"Takes",
"a",
"tail",
"transaction",
"hash",
"as",
"input",
"gets",
"the",
"bundle",
"associated",
"with",
"the",
"transaction",
"and",
"then",
"replays",
"the",
"bundle",
"by",
"attaching",
"it",
"to",
"the",
"Tangle",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L927-L971
|
[
"def",
"replay_bundle",
"(",
"self",
",",
"transaction",
",",
"depth",
"=",
"3",
",",
"min_weight_magnitude",
"=",
"None",
",",
")",
":",
"# type: (TransactionHash, int, Optional[int]) -> dict",
"if",
"min_weight_magnitude",
"is",
"None",
":",
"min_weight_magnitude",
"=",
"self",
".",
"default_min_weight_magnitude",
"return",
"extended",
".",
"ReplayBundleCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"transaction",
"=",
"transaction",
",",
"depth",
"=",
"depth",
",",
"minWeightMagnitude",
"=",
"min_weight_magnitude",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.send_transfer
|
Prepares a set of transfers and creates the bundle, then
attaches the bundle to the Tangle, and broadcasts and stores the
transactions.
:param transfers:
Transfers to include in the bundle.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param inputs:
List of inputs used to fund the transfer.
Not needed for zero-value transfers.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer
|
iota/api.py
|
def send_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
depth=3, # type: int
inputs=None, # type: Optional[Iterable[Address]]
change_address=None, # type: Optional[Address]
min_weight_magnitude=None, # type: Optional[int]
security_level=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Prepares a set of transfers and creates the bundle, then
attaches the bundle to the Tangle, and broadcasts and stores the
transactions.
:param transfers:
Transfers to include in the bundle.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param inputs:
List of inputs used to fund the transfer.
Not needed for zero-value transfers.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTransferCommand(self.adapter)(
seed=self.seed,
depth=depth,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
minWeightMagnitude=min_weight_magnitude,
securityLevel=security_level,
)
|
def send_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
depth=3, # type: int
inputs=None, # type: Optional[Iterable[Address]]
change_address=None, # type: Optional[Address]
min_weight_magnitude=None, # type: Optional[int]
security_level=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Prepares a set of transfers and creates the bundle, then
attaches the bundle to the Tangle, and broadcasts and stores the
transactions.
:param transfers:
Transfers to include in the bundle.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param inputs:
List of inputs used to fund the transfer.
Not needed for zero-value transfers.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTransferCommand(self.adapter)(
seed=self.seed,
depth=depth,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
minWeightMagnitude=min_weight_magnitude,
securityLevel=security_level,
)
|
[
"Prepares",
"a",
"set",
"of",
"transfers",
"and",
"creates",
"the",
"bundle",
"then",
"attaches",
"the",
"bundle",
"to",
"the",
"Tangle",
"and",
"broadcasts",
"and",
"stores",
"the",
"transactions",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L973-L1044
|
[
"def",
"send_transfer",
"(",
"self",
",",
"transfers",
",",
"# type: Iterable[ProposedTransaction]",
"depth",
"=",
"3",
",",
"# type: int",
"inputs",
"=",
"None",
",",
"# type: Optional[Iterable[Address]]",
"change_address",
"=",
"None",
",",
"# type: Optional[Address]",
"min_weight_magnitude",
"=",
"None",
",",
"# type: Optional[int]",
"security_level",
"=",
"None",
",",
"# type: Optional[int]",
")",
":",
"# type: (...) -> dict",
"if",
"min_weight_magnitude",
"is",
"None",
":",
"min_weight_magnitude",
"=",
"self",
".",
"default_min_weight_magnitude",
"return",
"extended",
".",
"SendTransferCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"seed",
"=",
"self",
".",
"seed",
",",
"depth",
"=",
"depth",
",",
"transfers",
"=",
"transfers",
",",
"inputs",
"=",
"inputs",
",",
"changeAddress",
"=",
"change_address",
",",
"minWeightMagnitude",
"=",
"min_weight_magnitude",
",",
"securityLevel",
"=",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Iota.send_trytes
|
Attaches transaction trytes to the Tangle, then broadcasts and
stores them.
:param trytes:
Transaction encoded as a tryte sequence.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes
|
iota/api.py
|
def send_trytes(self, trytes, depth=3, min_weight_magnitude=None):
# type: (Iterable[TransactionTrytes], int, Optional[int]) -> dict
"""
Attaches transaction trytes to the Tangle, then broadcasts and
stores them.
:param trytes:
Transaction encoded as a tryte sequence.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTrytesCommand(self.adapter)(
trytes=trytes,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
def send_trytes(self, trytes, depth=3, min_weight_magnitude=None):
# type: (Iterable[TransactionTrytes], int, Optional[int]) -> dict
"""
Attaches transaction trytes to the Tangle, then broadcasts and
stores them.
:param trytes:
Transaction encoded as a tryte sequence.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTrytesCommand(self.adapter)(
trytes=trytes,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
|
[
"Attaches",
"transaction",
"trytes",
"to",
"the",
"Tangle",
"then",
"broadcasts",
"and",
"stores",
"them",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/api.py#L1046-L1084
|
[
"def",
"send_trytes",
"(",
"self",
",",
"trytes",
",",
"depth",
"=",
"3",
",",
"min_weight_magnitude",
"=",
"None",
")",
":",
"# type: (Iterable[TransactionTrytes], int, Optional[int]) -> dict",
"if",
"min_weight_magnitude",
"is",
"None",
":",
"min_weight_magnitude",
"=",
"self",
".",
"default_min_weight_magnitude",
"return",
"extended",
".",
"SendTrytesCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"trytes",
"=",
"trytes",
",",
"depth",
"=",
"depth",
",",
"minWeightMagnitude",
"=",
"min_weight_magnitude",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
resolve_adapter
|
Given a URI, returns a properly-configured adapter instance.
|
iota/adapter/__init__.py
|
def resolve_adapter(uri):
# type: (AdapterSpec) -> BaseAdapter
"""
Given a URI, returns a properly-configured adapter instance.
"""
if isinstance(uri, BaseAdapter):
return uri
parsed = compat.urllib_parse.urlsplit(uri) # type: SplitResult
if not parsed.scheme:
raise with_context(
exc=InvalidUri(
'URI must begin with "<protocol>://" (e.g., "udp://").',
),
context={
'parsed': parsed,
'uri': uri,
},
)
try:
adapter_type = adapter_registry[parsed.scheme]
except KeyError:
raise with_context(
exc=InvalidUri('Unrecognized protocol {protocol!r}.'.format(
protocol=parsed.scheme,
)),
context={
'parsed': parsed,
'uri': uri,
},
)
return adapter_type.configure(parsed)
|
def resolve_adapter(uri):
# type: (AdapterSpec) -> BaseAdapter
"""
Given a URI, returns a properly-configured adapter instance.
"""
if isinstance(uri, BaseAdapter):
return uri
parsed = compat.urllib_parse.urlsplit(uri) # type: SplitResult
if not parsed.scheme:
raise with_context(
exc=InvalidUri(
'URI must begin with "<protocol>://" (e.g., "udp://").',
),
context={
'parsed': parsed,
'uri': uri,
},
)
try:
adapter_type = adapter_registry[parsed.scheme]
except KeyError:
raise with_context(
exc=InvalidUri('Unrecognized protocol {protocol!r}.'.format(
protocol=parsed.scheme,
)),
context={
'parsed': parsed,
'uri': uri,
},
)
return adapter_type.configure(parsed)
|
[
"Given",
"a",
"URI",
"returns",
"a",
"properly",
"-",
"configured",
"adapter",
"instance",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L77-L113
|
[
"def",
"resolve_adapter",
"(",
"uri",
")",
":",
"# type: (AdapterSpec) -> BaseAdapter",
"if",
"isinstance",
"(",
"uri",
",",
"BaseAdapter",
")",
":",
"return",
"uri",
"parsed",
"=",
"compat",
".",
"urllib_parse",
".",
"urlsplit",
"(",
"uri",
")",
"# type: SplitResult",
"if",
"not",
"parsed",
".",
"scheme",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"InvalidUri",
"(",
"'URI must begin with \"<protocol>://\" (e.g., \"udp://\").'",
",",
")",
",",
"context",
"=",
"{",
"'parsed'",
":",
"parsed",
",",
"'uri'",
":",
"uri",
",",
"}",
",",
")",
"try",
":",
"adapter_type",
"=",
"adapter_registry",
"[",
"parsed",
".",
"scheme",
"]",
"except",
"KeyError",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"InvalidUri",
"(",
"'Unrecognized protocol {protocol!r}.'",
".",
"format",
"(",
"protocol",
"=",
"parsed",
".",
"scheme",
",",
")",
")",
",",
"context",
"=",
"{",
"'parsed'",
":",
"parsed",
",",
"'uri'",
":",
"uri",
",",
"}",
",",
")",
"return",
"adapter_type",
".",
"configure",
"(",
"parsed",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
BaseAdapter.send_request
|
Sends an API request to the node.
:param payload:
JSON payload.
:param kwargs:
Additional keyword arguments for the adapter.
:return:
Decoded response from the node.
:raise:
- :py:class:`BadApiResponse` if a non-success response was
received.
|
iota/adapter/__init__.py
|
def send_request(self, payload, **kwargs):
# type: (dict, dict) -> dict
"""
Sends an API request to the node.
:param payload:
JSON payload.
:param kwargs:
Additional keyword arguments for the adapter.
:return:
Decoded response from the node.
:raise:
- :py:class:`BadApiResponse` if a non-success response was
received.
"""
raise NotImplementedError(
'Not implemented in {cls}.'.format(cls=type(self).__name__),
)
|
def send_request(self, payload, **kwargs):
# type: (dict, dict) -> dict
"""
Sends an API request to the node.
:param payload:
JSON payload.
:param kwargs:
Additional keyword arguments for the adapter.
:return:
Decoded response from the node.
:raise:
- :py:class:`BadApiResponse` if a non-success response was
received.
"""
raise NotImplementedError(
'Not implemented in {cls}.'.format(cls=type(self).__name__),
)
|
[
"Sends",
"an",
"API",
"request",
"to",
"the",
"node",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L171-L191
|
[
"def",
"send_request",
"(",
"self",
",",
"payload",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: (dict, dict) -> dict",
"raise",
"NotImplementedError",
"(",
"'Not implemented in {cls}.'",
".",
"format",
"(",
"cls",
"=",
"type",
"(",
"self",
")",
".",
"__name__",
")",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
BaseAdapter._log
|
Sends a message to the instance's logger, if configured.
|
iota/adapter/__init__.py
|
def _log(self, level, message, context=None):
# type: (int, Text, Optional[dict]) -> None
"""
Sends a message to the instance's logger, if configured.
"""
if self._logger:
self._logger.log(level, message, extra={'context': context or {}})
|
def _log(self, level, message, context=None):
# type: (int, Text, Optional[dict]) -> None
"""
Sends a message to the instance's logger, if configured.
"""
if self._logger:
self._logger.log(level, message, extra={'context': context or {}})
|
[
"Sends",
"a",
"message",
"to",
"the",
"instance",
"s",
"logger",
"if",
"configured",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L203-L209
|
[
"def",
"_log",
"(",
"self",
",",
"level",
",",
"message",
",",
"context",
"=",
"None",
")",
":",
"# type: (int, Text, Optional[dict]) -> None",
"if",
"self",
".",
"_logger",
":",
"self",
".",
"_logger",
".",
"log",
"(",
"level",
",",
"message",
",",
"extra",
"=",
"{",
"'context'",
":",
"context",
"or",
"{",
"}",
"}",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
HttpAdapter._send_http_request
|
Sends the actual HTTP request.
Split into its own method so that it can be mocked during unit
tests.
|
iota/adapter/__init__.py
|
def _send_http_request(self, url, payload, method='post', **kwargs):
# type: (Text, Optional[Text], Text, dict) -> Response
"""
Sends the actual HTTP request.
Split into its own method so that it can be mocked during unit
tests.
"""
kwargs.setdefault(
'timeout',
self.timeout if self.timeout else get_default_timeout(),
)
if self.authentication:
kwargs.setdefault('auth', auth.HTTPBasicAuth(*self.authentication))
self._log(
level=DEBUG,
message='Sending {method} to {url}: {payload!r}'.format(
method=method,
payload=payload,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
},
)
response = request(method=method, url=url, data=payload, **kwargs)
self._log(
level=DEBUG,
message='Receiving {method} from {url}: {response!r}'.format(
method=method,
response=response.content,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
'response_headers': response.headers,
'response_content': response.content,
},
)
return response
|
def _send_http_request(self, url, payload, method='post', **kwargs):
# type: (Text, Optional[Text], Text, dict) -> Response
"""
Sends the actual HTTP request.
Split into its own method so that it can be mocked during unit
tests.
"""
kwargs.setdefault(
'timeout',
self.timeout if self.timeout else get_default_timeout(),
)
if self.authentication:
kwargs.setdefault('auth', auth.HTTPBasicAuth(*self.authentication))
self._log(
level=DEBUG,
message='Sending {method} to {url}: {payload!r}'.format(
method=method,
payload=payload,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
},
)
response = request(method=method, url=url, data=payload, **kwargs)
self._log(
level=DEBUG,
message='Receiving {method} from {url}: {response!r}'.format(
method=method,
response=response.content,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
'response_headers': response.headers,
'response_content': response.content,
},
)
return response
|
[
"Sends",
"the",
"actual",
"HTTP",
"request",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L311-L366
|
[
"def",
"_send_http_request",
"(",
"self",
",",
"url",
",",
"payload",
",",
"method",
"=",
"'post'",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: (Text, Optional[Text], Text, dict) -> Response",
"kwargs",
".",
"setdefault",
"(",
"'timeout'",
",",
"self",
".",
"timeout",
"if",
"self",
".",
"timeout",
"else",
"get_default_timeout",
"(",
")",
",",
")",
"if",
"self",
".",
"authentication",
":",
"kwargs",
".",
"setdefault",
"(",
"'auth'",
",",
"auth",
".",
"HTTPBasicAuth",
"(",
"*",
"self",
".",
"authentication",
")",
")",
"self",
".",
"_log",
"(",
"level",
"=",
"DEBUG",
",",
"message",
"=",
"'Sending {method} to {url}: {payload!r}'",
".",
"format",
"(",
"method",
"=",
"method",
",",
"payload",
"=",
"payload",
",",
"url",
"=",
"url",
",",
")",
",",
"context",
"=",
"{",
"'request_method'",
":",
"method",
",",
"'request_kwargs'",
":",
"kwargs",
",",
"'request_payload'",
":",
"payload",
",",
"'request_url'",
":",
"url",
",",
"}",
",",
")",
"response",
"=",
"request",
"(",
"method",
"=",
"method",
",",
"url",
"=",
"url",
",",
"data",
"=",
"payload",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_log",
"(",
"level",
"=",
"DEBUG",
",",
"message",
"=",
"'Receiving {method} from {url}: {response!r}'",
".",
"format",
"(",
"method",
"=",
"method",
",",
"response",
"=",
"response",
".",
"content",
",",
"url",
"=",
"url",
",",
")",
",",
"context",
"=",
"{",
"'request_method'",
":",
"method",
",",
"'request_kwargs'",
":",
"kwargs",
",",
"'request_payload'",
":",
"payload",
",",
"'request_url'",
":",
"url",
",",
"'response_headers'",
":",
"response",
".",
"headers",
",",
"'response_content'",
":",
"response",
".",
"content",
",",
"}",
",",
")",
"return",
"response"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
HttpAdapter._interpret_response
|
Interprets the HTTP response from the node.
:param response:
The response object received from
:py:meth:`_send_http_request`.
:param payload:
The request payload that was sent (used for debugging).
:param expected_status:
The response should match one of these status codes to be
considered valid.
|
iota/adapter/__init__.py
|
def _interpret_response(self, response, payload, expected_status):
# type: (Response, dict, Container[int]) -> dict
"""
Interprets the HTTP response from the node.
:param response:
The response object received from
:py:meth:`_send_http_request`.
:param payload:
The request payload that was sent (used for debugging).
:param expected_status:
The response should match one of these status codes to be
considered valid.
"""
raw_content = response.text
if not raw_content:
raise with_context(
exc=BadApiResponse(
'Empty {status} response from node.'.format(
status=response.status_code,
),
),
context={
'request': payload,
},
)
try:
decoded = json.loads(raw_content) # type: dict
# :bc: py2k doesn't have JSONDecodeError
except ValueError:
raise with_context(
exc=BadApiResponse(
'Non-JSON {status} response from node: '
'{raw_content}'.format(
status=response.status_code,
raw_content=raw_content,
)
),
context={
'request': payload,
'raw_response': raw_content,
},
)
if not isinstance(decoded, dict):
raise with_context(
exc=BadApiResponse(
'Malformed {status} response from node: {decoded!r}'.format(
status=response.status_code,
decoded=decoded,
),
),
context={
'request': payload,
'response': decoded,
},
)
if response.status_code in expected_status:
return decoded
error = None
try:
if response.status_code == codes['bad_request']:
error = decoded['error']
elif response.status_code == codes['internal_server_error']:
error = decoded['exception']
except KeyError:
pass
raise with_context(
exc=BadApiResponse(
'{status} response from node: {error}'.format(
error=error or decoded,
status=response.status_code,
),
),
context={
'request': payload,
'response': decoded,
},
)
|
def _interpret_response(self, response, payload, expected_status):
# type: (Response, dict, Container[int]) -> dict
"""
Interprets the HTTP response from the node.
:param response:
The response object received from
:py:meth:`_send_http_request`.
:param payload:
The request payload that was sent (used for debugging).
:param expected_status:
The response should match one of these status codes to be
considered valid.
"""
raw_content = response.text
if not raw_content:
raise with_context(
exc=BadApiResponse(
'Empty {status} response from node.'.format(
status=response.status_code,
),
),
context={
'request': payload,
},
)
try:
decoded = json.loads(raw_content) # type: dict
# :bc: py2k doesn't have JSONDecodeError
except ValueError:
raise with_context(
exc=BadApiResponse(
'Non-JSON {status} response from node: '
'{raw_content}'.format(
status=response.status_code,
raw_content=raw_content,
)
),
context={
'request': payload,
'raw_response': raw_content,
},
)
if not isinstance(decoded, dict):
raise with_context(
exc=BadApiResponse(
'Malformed {status} response from node: {decoded!r}'.format(
status=response.status_code,
decoded=decoded,
),
),
context={
'request': payload,
'response': decoded,
},
)
if response.status_code in expected_status:
return decoded
error = None
try:
if response.status_code == codes['bad_request']:
error = decoded['error']
elif response.status_code == codes['internal_server_error']:
error = decoded['exception']
except KeyError:
pass
raise with_context(
exc=BadApiResponse(
'{status} response from node: {error}'.format(
error=error or decoded,
status=response.status_code,
),
),
context={
'request': payload,
'response': decoded,
},
)
|
[
"Interprets",
"the",
"HTTP",
"response",
"from",
"the",
"node",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L368-L456
|
[
"def",
"_interpret_response",
"(",
"self",
",",
"response",
",",
"payload",
",",
"expected_status",
")",
":",
"# type: (Response, dict, Container[int]) -> dict",
"raw_content",
"=",
"response",
".",
"text",
"if",
"not",
"raw_content",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Empty {status} response from node.'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"}",
",",
")",
"try",
":",
"decoded",
"=",
"json",
".",
"loads",
"(",
"raw_content",
")",
"# type: dict",
"# :bc: py2k doesn't have JSONDecodeError",
"except",
"ValueError",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Non-JSON {status} response from node: '",
"'{raw_content}'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
"raw_content",
"=",
"raw_content",
",",
")",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'raw_response'",
":",
"raw_content",
",",
"}",
",",
")",
"if",
"not",
"isinstance",
"(",
"decoded",
",",
"dict",
")",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'Malformed {status} response from node: {decoded!r}'",
".",
"format",
"(",
"status",
"=",
"response",
".",
"status_code",
",",
"decoded",
"=",
"decoded",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'response'",
":",
"decoded",
",",
"}",
",",
")",
"if",
"response",
".",
"status_code",
"in",
"expected_status",
":",
"return",
"decoded",
"error",
"=",
"None",
"try",
":",
"if",
"response",
".",
"status_code",
"==",
"codes",
"[",
"'bad_request'",
"]",
":",
"error",
"=",
"decoded",
"[",
"'error'",
"]",
"elif",
"response",
".",
"status_code",
"==",
"codes",
"[",
"'internal_server_error'",
"]",
":",
"error",
"=",
"decoded",
"[",
"'exception'",
"]",
"except",
"KeyError",
":",
"pass",
"raise",
"with_context",
"(",
"exc",
"=",
"BadApiResponse",
"(",
"'{status} response from node: {error}'",
".",
"format",
"(",
"error",
"=",
"error",
"or",
"decoded",
",",
"status",
"=",
"response",
".",
"status_code",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'request'",
":",
"payload",
",",
"'response'",
":",
"decoded",
",",
"}",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MockAdapter.seed_response
|
Sets the response that the adapter will return for the specified
command.
You can seed multiple responses per command; the adapter will
put them into a FIFO queue. When a request comes in, the
adapter will pop the corresponding response off of the queue.
Example:
.. code-block:: python
adapter.seed_response('sayHello', {'message': 'Hi!'})
adapter.seed_response('sayHello', {'message': 'Hello!'})
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hi!'}
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hello!'}
|
iota/adapter/__init__.py
|
def seed_response(self, command, response):
# type: (Text, dict) -> MockAdapter
"""
Sets the response that the adapter will return for the specified
command.
You can seed multiple responses per command; the adapter will
put them into a FIFO queue. When a request comes in, the
adapter will pop the corresponding response off of the queue.
Example:
.. code-block:: python
adapter.seed_response('sayHello', {'message': 'Hi!'})
adapter.seed_response('sayHello', {'message': 'Hello!'})
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hi!'}
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hello!'}
"""
if command not in self.responses:
self.responses[command] = deque()
self.responses[command].append(response)
return self
|
def seed_response(self, command, response):
# type: (Text, dict) -> MockAdapter
"""
Sets the response that the adapter will return for the specified
command.
You can seed multiple responses per command; the adapter will
put them into a FIFO queue. When a request comes in, the
adapter will pop the corresponding response off of the queue.
Example:
.. code-block:: python
adapter.seed_response('sayHello', {'message': 'Hi!'})
adapter.seed_response('sayHello', {'message': 'Hello!'})
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hi!'}
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hello!'}
"""
if command not in self.responses:
self.responses[command] = deque()
self.responses[command].append(response)
return self
|
[
"Sets",
"the",
"response",
"that",
"the",
"adapter",
"will",
"return",
"for",
"the",
"specified",
"command",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/__init__.py#L483-L510
|
[
"def",
"seed_response",
"(",
"self",
",",
"command",
",",
"response",
")",
":",
"# type: (Text, dict) -> MockAdapter",
"if",
"command",
"not",
"in",
"self",
".",
"responses",
":",
"self",
".",
"responses",
"[",
"command",
"]",
"=",
"deque",
"(",
")",
"self",
".",
"responses",
"[",
"command",
"]",
".",
"append",
"(",
"response",
")",
"return",
"self"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MultisigAddressBuilder.add_digest
|
Absorbs a digest into the sponge.
.. important::
Keep track of the order that digests are added!
To spend inputs from a multisig address, you must provide
the private keys in the same order!
References:
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs
|
iota/multisig/crypto/addresses.py
|
def add_digest(self, digest):
# type: (Digest) -> None
"""
Absorbs a digest into the sponge.
.. important::
Keep track of the order that digests are added!
To spend inputs from a multisig address, you must provide
the private keys in the same order!
References:
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs
"""
if self._address:
raise ValueError('Cannot add digests once an address is extracted.')
self._sponge.absorb(digest.as_trits())
self._digests.append(digest)
|
def add_digest(self, digest):
# type: (Digest) -> None
"""
Absorbs a digest into the sponge.
.. important::
Keep track of the order that digests are added!
To spend inputs from a multisig address, you must provide
the private keys in the same order!
References:
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs
"""
if self._address:
raise ValueError('Cannot add digests once an address is extracted.')
self._sponge.absorb(digest.as_trits())
self._digests.append(digest)
|
[
"Absorbs",
"a",
"digest",
"into",
"the",
"sponge",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/crypto/addresses.py#L47-L66
|
[
"def",
"add_digest",
"(",
"self",
",",
"digest",
")",
":",
"# type: (Digest) -> None",
"if",
"self",
".",
"_address",
":",
"raise",
"ValueError",
"(",
"'Cannot add digests once an address is extracted.'",
")",
"self",
".",
"_sponge",
".",
"absorb",
"(",
"digest",
".",
"as_trits",
"(",
")",
")",
"self",
".",
"_digests",
".",
"append",
"(",
"digest",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
MultisigAddressBuilder.get_address
|
Returns the new multisig address.
Note that you can continue to add digests after extracting an
address; the next address will use *all* of the digests that
have been added so far.
|
iota/multisig/crypto/addresses.py
|
def get_address(self):
# type: () -> MultisigAddress
"""
Returns the new multisig address.
Note that you can continue to add digests after extracting an
address; the next address will use *all* of the digests that
have been added so far.
"""
if not self._digests:
raise ValueError(
'Must call ``add_digest`` at least once '
'before calling ``get_address``.',
)
if not self._address:
address_trits = [0] * HASH_LENGTH
self._sponge.squeeze(address_trits)
self._address = MultisigAddress.from_trits(
address_trits,
digests=self._digests[:],
)
return self._address
|
def get_address(self):
# type: () -> MultisigAddress
"""
Returns the new multisig address.
Note that you can continue to add digests after extracting an
address; the next address will use *all* of the digests that
have been added so far.
"""
if not self._digests:
raise ValueError(
'Must call ``add_digest`` at least once '
'before calling ``get_address``.',
)
if not self._address:
address_trits = [0] * HASH_LENGTH
self._sponge.squeeze(address_trits)
self._address = MultisigAddress.from_trits(
address_trits,
digests=self._digests[:],
)
return self._address
|
[
"Returns",
"the",
"new",
"multisig",
"address",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/crypto/addresses.py#L68-L92
|
[
"def",
"get_address",
"(",
"self",
")",
":",
"# type: () -> MultisigAddress",
"if",
"not",
"self",
".",
"_digests",
":",
"raise",
"ValueError",
"(",
"'Must call ``add_digest`` at least once '",
"'before calling ``get_address``.'",
",",
")",
"if",
"not",
"self",
".",
"_address",
":",
"address_trits",
"=",
"[",
"0",
"]",
"*",
"HASH_LENGTH",
"self",
".",
"_sponge",
".",
"squeeze",
"(",
"address_trits",
")",
"self",
".",
"_address",
"=",
"MultisigAddress",
".",
"from_trits",
"(",
"address_trits",
",",
"digests",
"=",
"self",
".",
"_digests",
"[",
":",
"]",
",",
")",
"return",
"self",
".",
"_address"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AddressGenerator.get_addresses
|
Generates and returns one or more addresses at the specified
index(es).
This is a one-time operation; if you want to create lots of
addresses across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested addresses is a large
number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of addresses to generate.
Must be > 0.
:param step:
Number of indexes to advance after each address.
This may be any non-zero (positive or negative) integer.
:return:
Always returns a list, even if only one address is generated.
The returned list will contain ``count`` addresses, except
when ``step * count < start`` (only applies when ``step`` is
negative).
|
iota/crypto/addresses.py
|
def get_addresses(self, start, count=1, step=1):
# type: (int, int, int) -> List[Address]
"""
Generates and returns one or more addresses at the specified
index(es).
This is a one-time operation; if you want to create lots of
addresses across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested addresses is a large
number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of addresses to generate.
Must be > 0.
:param step:
Number of indexes to advance after each address.
This may be any non-zero (positive or negative) integer.
:return:
Always returns a list, even if only one address is generated.
The returned list will contain ``count`` addresses, except
when ``step * count < start`` (only applies when ``step`` is
negative).
"""
if count < 1:
raise with_context(
exc=ValueError('``count`` must be positive.'),
context={
'start': start,
'count': count,
'step': step,
},
)
if not step:
raise with_context(
exc=ValueError('``step`` must not be zero.'),
context={
'start': start,
'count': count,
'step': step,
},
)
generator = self.create_iterator(start, step)
addresses = []
for _ in range(count):
try:
next_addy = next(generator)
except StopIteration:
break
else:
addresses.append(next_addy)
return addresses
|
def get_addresses(self, start, count=1, step=1):
# type: (int, int, int) -> List[Address]
"""
Generates and returns one or more addresses at the specified
index(es).
This is a one-time operation; if you want to create lots of
addresses across multiple contexts, consider invoking
:py:meth:`create_iterator` and sharing the resulting generator
object instead.
Warning: This method may take awhile to run if the starting
index and/or the number of requested addresses is a large
number!
:param start:
Starting index.
Must be >= 0.
:param count:
Number of addresses to generate.
Must be > 0.
:param step:
Number of indexes to advance after each address.
This may be any non-zero (positive or negative) integer.
:return:
Always returns a list, even if only one address is generated.
The returned list will contain ``count`` addresses, except
when ``step * count < start`` (only applies when ``step`` is
negative).
"""
if count < 1:
raise with_context(
exc=ValueError('``count`` must be positive.'),
context={
'start': start,
'count': count,
'step': step,
},
)
if not step:
raise with_context(
exc=ValueError('``step`` must not be zero.'),
context={
'start': start,
'count': count,
'step': step,
},
)
generator = self.create_iterator(start, step)
addresses = []
for _ in range(count):
try:
next_addy = next(generator)
except StopIteration:
break
else:
addresses.append(next_addy)
return addresses
|
[
"Generates",
"and",
"returns",
"one",
"or",
"more",
"addresses",
"at",
"the",
"specified",
"index",
"(",
"es",
")",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/addresses.py#L65-L132
|
[
"def",
"get_addresses",
"(",
"self",
",",
"start",
",",
"count",
"=",
"1",
",",
"step",
"=",
"1",
")",
":",
"# type: (int, int, int) -> List[Address]",
"if",
"count",
"<",
"1",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'``count`` must be positive.'",
")",
",",
"context",
"=",
"{",
"'start'",
":",
"start",
",",
"'count'",
":",
"count",
",",
"'step'",
":",
"step",
",",
"}",
",",
")",
"if",
"not",
"step",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'``step`` must not be zero.'",
")",
",",
"context",
"=",
"{",
"'start'",
":",
"start",
",",
"'count'",
":",
"count",
",",
"'step'",
":",
"step",
",",
"}",
",",
")",
"generator",
"=",
"self",
".",
"create_iterator",
"(",
"start",
",",
"step",
")",
"addresses",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"count",
")",
":",
"try",
":",
"next_addy",
"=",
"next",
"(",
"generator",
")",
"except",
"StopIteration",
":",
"break",
"else",
":",
"addresses",
".",
"append",
"(",
"next_addy",
")",
"return",
"addresses"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AddressGenerator.create_iterator
|
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
|
iota/crypto/addresses.py
|
def create_iterator(self, start=0, step=1):
# type: (int, int) -> Generator[Address, None, None]
"""
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
"""
key_iterator = (
KeyGenerator(self.seed).create_iterator(
start,
step,
self.security_level,
)
)
while True:
yield self._generate_address(key_iterator)
|
def create_iterator(self, start=0, step=1):
# type: (int, int) -> Generator[Address, None, None]
"""
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
"""
key_iterator = (
KeyGenerator(self.seed).create_iterator(
start,
step,
self.security_level,
)
)
while True:
yield self._generate_address(key_iterator)
|
[
"Creates",
"an",
"iterator",
"that",
"can",
"be",
"used",
"to",
"progressively",
"generate",
"new",
"addresses",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/addresses.py#L134-L161
|
[
"def",
"create_iterator",
"(",
"self",
",",
"start",
"=",
"0",
",",
"step",
"=",
"1",
")",
":",
"# type: (int, int) -> Generator[Address, None, None]",
"key_iterator",
"=",
"(",
"KeyGenerator",
"(",
"self",
".",
"seed",
")",
".",
"create_iterator",
"(",
"start",
",",
"step",
",",
"self",
".",
"security_level",
",",
")",
")",
"while",
"True",
":",
"yield",
"self",
".",
"_generate_address",
"(",
"key_iterator",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AddressGenerator.address_from_digest
|
Generates an address from a private key digest.
|
iota/crypto/addresses.py
|
def address_from_digest(digest):
# type: (Digest) -> Address
"""
Generates an address from a private key digest.
"""
address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: List[int]
sponge = Kerl()
sponge.absorb(digest.as_trits())
sponge.squeeze(address_trits)
return Address.from_trits(
trits=address_trits,
key_index=digest.key_index,
security_level=digest.security_level,
)
|
def address_from_digest(digest):
# type: (Digest) -> Address
"""
Generates an address from a private key digest.
"""
address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: List[int]
sponge = Kerl()
sponge.absorb(digest.as_trits())
sponge.squeeze(address_trits)
return Address.from_trits(
trits=address_trits,
key_index=digest.key_index,
security_level=digest.security_level,
)
|
[
"Generates",
"an",
"address",
"from",
"a",
"private",
"key",
"digest",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/addresses.py#L164-L180
|
[
"def",
"address_from_digest",
"(",
"digest",
")",
":",
"# type: (Digest) -> Address",
"address_trits",
"=",
"[",
"0",
"]",
"*",
"(",
"Address",
".",
"LEN",
"*",
"TRITS_PER_TRYTE",
")",
"# type: List[int]",
"sponge",
"=",
"Kerl",
"(",
")",
"sponge",
".",
"absorb",
"(",
"digest",
".",
"as_trits",
"(",
")",
")",
"sponge",
".",
"squeeze",
"(",
"address_trits",
")",
"return",
"Address",
".",
"from_trits",
"(",
"trits",
"=",
"address_trits",
",",
"key_index",
"=",
"digest",
".",
"key_index",
",",
"security_level",
"=",
"digest",
".",
"security_level",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AddressGenerator._generate_address
|
Generates a new address.
Used in the event of a cache miss.
|
iota/crypto/addresses.py
|
def _generate_address(self, key_iterator):
# type: (KeyIterator) -> Address
"""
Generates a new address.
Used in the event of a cache miss.
"""
if self.checksum:
return (
self.address_from_digest(
digest=self._get_digest(key_iterator),
).with_valid_checksum()
)
else:
return self.address_from_digest(self._get_digest(key_iterator))
|
def _generate_address(self, key_iterator):
# type: (KeyIterator) -> Address
"""
Generates a new address.
Used in the event of a cache miss.
"""
if self.checksum:
return (
self.address_from_digest(
digest=self._get_digest(key_iterator),
).with_valid_checksum()
)
else:
return self.address_from_digest(self._get_digest(key_iterator))
|
[
"Generates",
"a",
"new",
"address",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/crypto/addresses.py#L182-L196
|
[
"def",
"_generate_address",
"(",
"self",
",",
"key_iterator",
")",
":",
"# type: (KeyIterator) -> Address",
"if",
"self",
".",
"checksum",
":",
"return",
"(",
"self",
".",
"address_from_digest",
"(",
"digest",
"=",
"self",
".",
"_get_digest",
"(",
"key_iterator",
")",
",",
")",
".",
"with_valid_checksum",
"(",
")",
")",
"else",
":",
"return",
"self",
".",
"address_from_digest",
"(",
"self",
".",
"_get_digest",
"(",
"key_iterator",
")",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
find_transaction_objects
|
Finds transactions matching the specified criteria, fetches the
corresponding trytes and converts them into Transaction objects.
|
iota/commands/extended/utils.py
|
def find_transaction_objects(adapter, **kwargs):
# type: (BaseAdapter, **Iterable) -> List[Transaction]
"""
Finds transactions matching the specified criteria, fetches the
corresponding trytes and converts them into Transaction objects.
"""
ft_response = FindTransactionsCommand(adapter)(**kwargs)
hashes = ft_response['hashes']
if hashes:
gt_response = GetTrytesCommand(adapter)(hashes=hashes)
return list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return []
|
def find_transaction_objects(adapter, **kwargs):
# type: (BaseAdapter, **Iterable) -> List[Transaction]
"""
Finds transactions matching the specified criteria, fetches the
corresponding trytes and converts them into Transaction objects.
"""
ft_response = FindTransactionsCommand(adapter)(**kwargs)
hashes = ft_response['hashes']
if hashes:
gt_response = GetTrytesCommand(adapter)(hashes=hashes)
return list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return []
|
[
"Finds",
"transactions",
"matching",
"the",
"specified",
"criteria",
"fetches",
"the",
"corresponding",
"trytes",
"and",
"converts",
"them",
"into",
"Transaction",
"objects",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/commands/extended/utils.py#L19-L37
|
[
"def",
"find_transaction_objects",
"(",
"adapter",
",",
"*",
"*",
"kwargs",
")",
":",
"# type: (BaseAdapter, **Iterable) -> List[Transaction]",
"ft_response",
"=",
"FindTransactionsCommand",
"(",
"adapter",
")",
"(",
"*",
"*",
"kwargs",
")",
"hashes",
"=",
"ft_response",
"[",
"'hashes'",
"]",
"if",
"hashes",
":",
"gt_response",
"=",
"GetTrytesCommand",
"(",
"adapter",
")",
"(",
"hashes",
"=",
"hashes",
")",
"return",
"list",
"(",
"map",
"(",
"Transaction",
".",
"from_tryte_string",
",",
"gt_response",
".",
"get",
"(",
"'trytes'",
")",
"or",
"[",
"]",
",",
")",
")",
"# type: List[Transaction]",
"return",
"[",
"]"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
iter_used_addresses
|
Scans the Tangle for used addresses.
This is basically the opposite of invoking ``getNewAddresses`` with
``stop=None``.
|
iota/commands/extended/utils.py
|
def iter_used_addresses(
adapter, # type: BaseAdapter
seed, # type: Seed
start, # type: int
security_level=None, # type: Optional[int]
):
# type: (...) -> Generator[Tuple[Address, List[TransactionHash]], None, None]
"""
Scans the Tangle for used addresses.
This is basically the opposite of invoking ``getNewAddresses`` with
``stop=None``.
"""
if security_level is None:
security_level = AddressGenerator.DEFAULT_SECURITY_LEVEL
ft_command = FindTransactionsCommand(adapter)
for addy in AddressGenerator(seed, security_level).create_iterator(start):
ft_response = ft_command(addresses=[addy])
if ft_response['hashes']:
yield addy, ft_response['hashes']
else:
break
# Reset the command so that we can call it again.
ft_command.reset()
|
def iter_used_addresses(
adapter, # type: BaseAdapter
seed, # type: Seed
start, # type: int
security_level=None, # type: Optional[int]
):
# type: (...) -> Generator[Tuple[Address, List[TransactionHash]], None, None]
"""
Scans the Tangle for used addresses.
This is basically the opposite of invoking ``getNewAddresses`` with
``stop=None``.
"""
if security_level is None:
security_level = AddressGenerator.DEFAULT_SECURITY_LEVEL
ft_command = FindTransactionsCommand(adapter)
for addy in AddressGenerator(seed, security_level).create_iterator(start):
ft_response = ft_command(addresses=[addy])
if ft_response['hashes']:
yield addy, ft_response['hashes']
else:
break
# Reset the command so that we can call it again.
ft_command.reset()
|
[
"Scans",
"the",
"Tangle",
"for",
"used",
"addresses",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/commands/extended/utils.py#L40-L67
|
[
"def",
"iter_used_addresses",
"(",
"adapter",
",",
"# type: BaseAdapter",
"seed",
",",
"# type: Seed",
"start",
",",
"# type: int",
"security_level",
"=",
"None",
",",
"# type: Optional[int]",
")",
":",
"# type: (...) -> Generator[Tuple[Address, List[TransactionHash]], None, None]",
"if",
"security_level",
"is",
"None",
":",
"security_level",
"=",
"AddressGenerator",
".",
"DEFAULT_SECURITY_LEVEL",
"ft_command",
"=",
"FindTransactionsCommand",
"(",
"adapter",
")",
"for",
"addy",
"in",
"AddressGenerator",
"(",
"seed",
",",
"security_level",
")",
".",
"create_iterator",
"(",
"start",
")",
":",
"ft_response",
"=",
"ft_command",
"(",
"addresses",
"=",
"[",
"addy",
"]",
")",
"if",
"ft_response",
"[",
"'hashes'",
"]",
":",
"yield",
"addy",
",",
"ft_response",
"[",
"'hashes'",
"]",
"else",
":",
"break",
"# Reset the command so that we can call it again.",
"ft_command",
".",
"reset",
"(",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
get_bundles_from_transaction_hashes
|
Given a set of transaction hashes, returns the corresponding bundles,
sorted by tail transaction timestamp.
|
iota/commands/extended/utils.py
|
def get_bundles_from_transaction_hashes(
adapter,
transaction_hashes,
inclusion_states,
):
# type: (BaseAdapter, Iterable[TransactionHash], bool) -> List[Bundle]
"""
Given a set of transaction hashes, returns the corresponding bundles,
sorted by tail transaction timestamp.
"""
transaction_hashes = list(transaction_hashes)
if not transaction_hashes:
return []
my_bundles = [] # type: List[Bundle]
# Sort transactions into tail and non-tail.
tail_transaction_hashes = set()
non_tail_bundle_hashes = set()
gt_response = GetTrytesCommand(adapter)(hashes=transaction_hashes)
all_transactions = list(map(
Transaction.from_tryte_string,
gt_response['trytes'],
)) # type: List[Transaction]
for txn in all_transactions:
if txn.is_tail:
tail_transaction_hashes.add(txn.hash)
else:
# Capture the bundle ID instead of the transaction hash so
# that we can query the node to find the tail transaction
# for that bundle.
non_tail_bundle_hashes.add(txn.bundle_hash)
if non_tail_bundle_hashes:
for txn in find_transaction_objects(
adapter=adapter,
bundles=list(non_tail_bundle_hashes),
):
if txn.is_tail:
if txn.hash not in tail_transaction_hashes:
all_transactions.append(txn)
tail_transaction_hashes.add(txn.hash)
# Filter out all non-tail transactions.
tail_transactions = [
txn
for txn in all_transactions
if txn.hash in tail_transaction_hashes
]
# Attach inclusion states, if requested.
if inclusion_states:
gli_response = GetLatestInclusionCommand(adapter)(
hashes=list(tail_transaction_hashes),
)
for txn in tail_transactions:
txn.is_confirmed = gli_response['states'].get(txn.hash)
# Find the bundles for each transaction.
for txn in tail_transactions:
gb_response = GetBundlesCommand(adapter)(transaction=txn.hash)
txn_bundles = gb_response['bundles'] # type: List[Bundle]
if inclusion_states:
for bundle in txn_bundles:
bundle.is_confirmed = txn.is_confirmed
my_bundles.extend(txn_bundles)
return list(sorted(
my_bundles,
key=lambda bundle_: bundle_.tail_transaction.timestamp,
))
|
def get_bundles_from_transaction_hashes(
adapter,
transaction_hashes,
inclusion_states,
):
# type: (BaseAdapter, Iterable[TransactionHash], bool) -> List[Bundle]
"""
Given a set of transaction hashes, returns the corresponding bundles,
sorted by tail transaction timestamp.
"""
transaction_hashes = list(transaction_hashes)
if not transaction_hashes:
return []
my_bundles = [] # type: List[Bundle]
# Sort transactions into tail and non-tail.
tail_transaction_hashes = set()
non_tail_bundle_hashes = set()
gt_response = GetTrytesCommand(adapter)(hashes=transaction_hashes)
all_transactions = list(map(
Transaction.from_tryte_string,
gt_response['trytes'],
)) # type: List[Transaction]
for txn in all_transactions:
if txn.is_tail:
tail_transaction_hashes.add(txn.hash)
else:
# Capture the bundle ID instead of the transaction hash so
# that we can query the node to find the tail transaction
# for that bundle.
non_tail_bundle_hashes.add(txn.bundle_hash)
if non_tail_bundle_hashes:
for txn in find_transaction_objects(
adapter=adapter,
bundles=list(non_tail_bundle_hashes),
):
if txn.is_tail:
if txn.hash not in tail_transaction_hashes:
all_transactions.append(txn)
tail_transaction_hashes.add(txn.hash)
# Filter out all non-tail transactions.
tail_transactions = [
txn
for txn in all_transactions
if txn.hash in tail_transaction_hashes
]
# Attach inclusion states, if requested.
if inclusion_states:
gli_response = GetLatestInclusionCommand(adapter)(
hashes=list(tail_transaction_hashes),
)
for txn in tail_transactions:
txn.is_confirmed = gli_response['states'].get(txn.hash)
# Find the bundles for each transaction.
for txn in tail_transactions:
gb_response = GetBundlesCommand(adapter)(transaction=txn.hash)
txn_bundles = gb_response['bundles'] # type: List[Bundle]
if inclusion_states:
for bundle in txn_bundles:
bundle.is_confirmed = txn.is_confirmed
my_bundles.extend(txn_bundles)
return list(sorted(
my_bundles,
key=lambda bundle_: bundle_.tail_transaction.timestamp,
))
|
[
"Given",
"a",
"set",
"of",
"transaction",
"hashes",
"returns",
"the",
"corresponding",
"bundles",
"sorted",
"by",
"tail",
"transaction",
"timestamp",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/commands/extended/utils.py#L70-L145
|
[
"def",
"get_bundles_from_transaction_hashes",
"(",
"adapter",
",",
"transaction_hashes",
",",
"inclusion_states",
",",
")",
":",
"# type: (BaseAdapter, Iterable[TransactionHash], bool) -> List[Bundle]",
"transaction_hashes",
"=",
"list",
"(",
"transaction_hashes",
")",
"if",
"not",
"transaction_hashes",
":",
"return",
"[",
"]",
"my_bundles",
"=",
"[",
"]",
"# type: List[Bundle]",
"# Sort transactions into tail and non-tail.",
"tail_transaction_hashes",
"=",
"set",
"(",
")",
"non_tail_bundle_hashes",
"=",
"set",
"(",
")",
"gt_response",
"=",
"GetTrytesCommand",
"(",
"adapter",
")",
"(",
"hashes",
"=",
"transaction_hashes",
")",
"all_transactions",
"=",
"list",
"(",
"map",
"(",
"Transaction",
".",
"from_tryte_string",
",",
"gt_response",
"[",
"'trytes'",
"]",
",",
")",
")",
"# type: List[Transaction]",
"for",
"txn",
"in",
"all_transactions",
":",
"if",
"txn",
".",
"is_tail",
":",
"tail_transaction_hashes",
".",
"add",
"(",
"txn",
".",
"hash",
")",
"else",
":",
"# Capture the bundle ID instead of the transaction hash so",
"# that we can query the node to find the tail transaction",
"# for that bundle.",
"non_tail_bundle_hashes",
".",
"add",
"(",
"txn",
".",
"bundle_hash",
")",
"if",
"non_tail_bundle_hashes",
":",
"for",
"txn",
"in",
"find_transaction_objects",
"(",
"adapter",
"=",
"adapter",
",",
"bundles",
"=",
"list",
"(",
"non_tail_bundle_hashes",
")",
",",
")",
":",
"if",
"txn",
".",
"is_tail",
":",
"if",
"txn",
".",
"hash",
"not",
"in",
"tail_transaction_hashes",
":",
"all_transactions",
".",
"append",
"(",
"txn",
")",
"tail_transaction_hashes",
".",
"add",
"(",
"txn",
".",
"hash",
")",
"# Filter out all non-tail transactions.",
"tail_transactions",
"=",
"[",
"txn",
"for",
"txn",
"in",
"all_transactions",
"if",
"txn",
".",
"hash",
"in",
"tail_transaction_hashes",
"]",
"# Attach inclusion states, if requested.",
"if",
"inclusion_states",
":",
"gli_response",
"=",
"GetLatestInclusionCommand",
"(",
"adapter",
")",
"(",
"hashes",
"=",
"list",
"(",
"tail_transaction_hashes",
")",
",",
")",
"for",
"txn",
"in",
"tail_transactions",
":",
"txn",
".",
"is_confirmed",
"=",
"gli_response",
"[",
"'states'",
"]",
".",
"get",
"(",
"txn",
".",
"hash",
")",
"# Find the bundles for each transaction.",
"for",
"txn",
"in",
"tail_transactions",
":",
"gb_response",
"=",
"GetBundlesCommand",
"(",
"adapter",
")",
"(",
"transaction",
"=",
"txn",
".",
"hash",
")",
"txn_bundles",
"=",
"gb_response",
"[",
"'bundles'",
"]",
"# type: List[Bundle]",
"if",
"inclusion_states",
":",
"for",
"bundle",
"in",
"txn_bundles",
":",
"bundle",
".",
"is_confirmed",
"=",
"txn",
".",
"is_confirmed",
"my_bundles",
".",
"extend",
"(",
"txn_bundles",
")",
"return",
"list",
"(",
"sorted",
"(",
"my_bundles",
",",
"key",
"=",
"lambda",
"bundle_",
":",
"bundle_",
".",
"tail_transaction",
".",
"timestamp",
",",
")",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
ProposedMultisigBundle.add_inputs
|
Adds inputs to spend in the bundle.
Note that each input may require multiple transactions, in order to
hold the entire signature.
:param inputs:
MultisigAddresses to use as the inputs for this bundle.
Note: at this time, only a single multisig input is supported.
|
iota/multisig/transaction.py
|
def add_inputs(self, inputs):
# type: (Iterable[MultisigAddress]) -> None
"""
Adds inputs to spend in the bundle.
Note that each input may require multiple transactions, in order to
hold the entire signature.
:param inputs:
MultisigAddresses to use as the inputs for this bundle.
Note: at this time, only a single multisig input is supported.
"""
if self.hash:
raise RuntimeError('Bundle is already finalized.')
count = 0
for addy in inputs:
if count > 0:
raise ValueError(
'{cls} only supports 1 input.'.format(cls=type(self).__name__),
)
if not isinstance(addy, MultisigAddress):
raise with_context(
exc =
TypeError(
'Incorrect input type for {cls} '
'(expected {expected}, actual {actual}).'.format(
actual = type(addy).__name__,
cls = type(self).__name__,
expected = MultisigAddress.__name__,
),
),
context = {
'actual_input': addy,
},
)
security_level = addy.security_level
if security_level < 1:
raise with_context(
exc =
ValueError(
'Unable to determine security level for {type} '
'(is ``digests`` populated correctly?).'.format(
type = type(addy).__name__,
),
),
context = {
'actual_input': addy,
'security_level': security_level,
},
)
if not addy.balance:
raise with_context(
exc =
ValueError(
'Cannot add input with empty/unknown balance to {type} '
'(use ``Iota.get_balances`` to get balance first).'.format(
type = type(self).__name__,
),
),
context = {
'actual_input': addy,
},
)
self._create_input_transactions(addy)
count += 1
|
def add_inputs(self, inputs):
# type: (Iterable[MultisigAddress]) -> None
"""
Adds inputs to spend in the bundle.
Note that each input may require multiple transactions, in order to
hold the entire signature.
:param inputs:
MultisigAddresses to use as the inputs for this bundle.
Note: at this time, only a single multisig input is supported.
"""
if self.hash:
raise RuntimeError('Bundle is already finalized.')
count = 0
for addy in inputs:
if count > 0:
raise ValueError(
'{cls} only supports 1 input.'.format(cls=type(self).__name__),
)
if not isinstance(addy, MultisigAddress):
raise with_context(
exc =
TypeError(
'Incorrect input type for {cls} '
'(expected {expected}, actual {actual}).'.format(
actual = type(addy).__name__,
cls = type(self).__name__,
expected = MultisigAddress.__name__,
),
),
context = {
'actual_input': addy,
},
)
security_level = addy.security_level
if security_level < 1:
raise with_context(
exc =
ValueError(
'Unable to determine security level for {type} '
'(is ``digests`` populated correctly?).'.format(
type = type(addy).__name__,
),
),
context = {
'actual_input': addy,
'security_level': security_level,
},
)
if not addy.balance:
raise with_context(
exc =
ValueError(
'Cannot add input with empty/unknown balance to {type} '
'(use ``Iota.get_balances`` to get balance first).'.format(
type = type(self).__name__,
),
),
context = {
'actual_input': addy,
},
)
self._create_input_transactions(addy)
count += 1
|
[
"Adds",
"inputs",
"to",
"spend",
"in",
"the",
"bundle",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/multisig/transaction.py#L22-L96
|
[
"def",
"add_inputs",
"(",
"self",
",",
"inputs",
")",
":",
"# type: (Iterable[MultisigAddress]) -> None",
"if",
"self",
".",
"hash",
":",
"raise",
"RuntimeError",
"(",
"'Bundle is already finalized.'",
")",
"count",
"=",
"0",
"for",
"addy",
"in",
"inputs",
":",
"if",
"count",
">",
"0",
":",
"raise",
"ValueError",
"(",
"'{cls} only supports 1 input.'",
".",
"format",
"(",
"cls",
"=",
"type",
"(",
"self",
")",
".",
"__name__",
")",
",",
")",
"if",
"not",
"isinstance",
"(",
"addy",
",",
"MultisigAddress",
")",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"TypeError",
"(",
"'Incorrect input type for {cls} '",
"'(expected {expected}, actual {actual}).'",
".",
"format",
"(",
"actual",
"=",
"type",
"(",
"addy",
")",
".",
"__name__",
",",
"cls",
"=",
"type",
"(",
"self",
")",
".",
"__name__",
",",
"expected",
"=",
"MultisigAddress",
".",
"__name__",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'actual_input'",
":",
"addy",
",",
"}",
",",
")",
"security_level",
"=",
"addy",
".",
"security_level",
"if",
"security_level",
"<",
"1",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'Unable to determine security level for {type} '",
"'(is ``digests`` populated correctly?).'",
".",
"format",
"(",
"type",
"=",
"type",
"(",
"addy",
")",
".",
"__name__",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'actual_input'",
":",
"addy",
",",
"'security_level'",
":",
"security_level",
",",
"}",
",",
")",
"if",
"not",
"addy",
".",
"balance",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"ValueError",
"(",
"'Cannot add input with empty/unknown balance to {type} '",
"'(use ``Iota.get_balances`` to get balance first).'",
".",
"format",
"(",
"type",
"=",
"type",
"(",
"self",
")",
".",
"__name__",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'actual_input'",
":",
"addy",
",",
"}",
",",
")",
"self",
".",
"_create_input_transactions",
"(",
"addy",
")",
"count",
"+=",
"1"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
check_trytes_codec
|
Determines which codec to use for the specified encoding.
References:
- https://docs.python.org/3/library/codecs.html#codecs.register
|
iota/codecs.py
|
def check_trytes_codec(encoding):
"""
Determines which codec to use for the specified encoding.
References:
- https://docs.python.org/3/library/codecs.html#codecs.register
"""
if encoding == AsciiTrytesCodec.name:
return AsciiTrytesCodec.get_codec_info()
elif encoding == AsciiTrytesCodec.compat_name:
warn(
'"{old_codec}" codec will be removed in PyOTA v2.1. '
'Use "{new_codec}" instead.'.format(
new_codec=AsciiTrytesCodec.name,
old_codec=AsciiTrytesCodec.compat_name,
),
DeprecationWarning,
)
return AsciiTrytesCodec.get_codec_info()
return None
|
def check_trytes_codec(encoding):
"""
Determines which codec to use for the specified encoding.
References:
- https://docs.python.org/3/library/codecs.html#codecs.register
"""
if encoding == AsciiTrytesCodec.name:
return AsciiTrytesCodec.get_codec_info()
elif encoding == AsciiTrytesCodec.compat_name:
warn(
'"{old_codec}" codec will be removed in PyOTA v2.1. '
'Use "{new_codec}" instead.'.format(
new_codec=AsciiTrytesCodec.name,
old_codec=AsciiTrytesCodec.compat_name,
),
DeprecationWarning,
)
return AsciiTrytesCodec.get_codec_info()
return None
|
[
"Determines",
"which",
"codec",
"to",
"use",
"for",
"the",
"specified",
"encoding",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/codecs.py#L197-L220
|
[
"def",
"check_trytes_codec",
"(",
"encoding",
")",
":",
"if",
"encoding",
"==",
"AsciiTrytesCodec",
".",
"name",
":",
"return",
"AsciiTrytesCodec",
".",
"get_codec_info",
"(",
")",
"elif",
"encoding",
"==",
"AsciiTrytesCodec",
".",
"compat_name",
":",
"warn",
"(",
"'\"{old_codec}\" codec will be removed in PyOTA v2.1. '",
"'Use \"{new_codec}\" instead.'",
".",
"format",
"(",
"new_codec",
"=",
"AsciiTrytesCodec",
".",
"name",
",",
"old_codec",
"=",
"AsciiTrytesCodec",
".",
"compat_name",
",",
")",
",",
"DeprecationWarning",
",",
")",
"return",
"AsciiTrytesCodec",
".",
"get_codec_info",
"(",
")",
"return",
"None"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AsciiTrytesCodec.get_codec_info
|
Returns information used by the codecs library to configure the
codec for use.
|
iota/codecs.py
|
def get_codec_info(cls):
"""
Returns information used by the codecs library to configure the
codec for use.
"""
codec = cls()
codec_info = {
'encode': codec.encode,
'decode': codec.decode,
}
# In Python 2, all codecs are made equal.
# In Python 3, some codecs are more equal than others.
if PY3:
codec_info['_is_text_encoding'] = False
return CodecInfo(**codec_info)
|
def get_codec_info(cls):
"""
Returns information used by the codecs library to configure the
codec for use.
"""
codec = cls()
codec_info = {
'encode': codec.encode,
'decode': codec.decode,
}
# In Python 2, all codecs are made equal.
# In Python 3, some codecs are more equal than others.
if PY3:
codec_info['_is_text_encoding'] = False
return CodecInfo(**codec_info)
|
[
"Returns",
"information",
"used",
"by",
"the",
"codecs",
"library",
"to",
"configure",
"the",
"codec",
"for",
"use",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/codecs.py#L61-L78
|
[
"def",
"get_codec_info",
"(",
"cls",
")",
":",
"codec",
"=",
"cls",
"(",
")",
"codec_info",
"=",
"{",
"'encode'",
":",
"codec",
".",
"encode",
",",
"'decode'",
":",
"codec",
".",
"decode",
",",
"}",
"# In Python 2, all codecs are made equal.",
"# In Python 3, some codecs are more equal than others.",
"if",
"PY3",
":",
"codec_info",
"[",
"'_is_text_encoding'",
"]",
"=",
"False",
"return",
"CodecInfo",
"(",
"*",
"*",
"codec_info",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AsciiTrytesCodec.encode
|
Encodes a byte string into trytes.
|
iota/codecs.py
|
def encode(self, input, errors='strict'):
"""
Encodes a byte string into trytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't encode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
trytes = bytearray()
for c in input:
second, first = divmod(c, len(self.alphabet))
trytes.append(self.alphabet[first])
trytes.append(self.alphabet[second])
return binary_type(trytes), len(input)
|
def encode(self, input, errors='strict'):
"""
Encodes a byte string into trytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't encode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
trytes = bytearray()
for c in input:
second, first = divmod(c, len(self.alphabet))
trytes.append(self.alphabet[first])
trytes.append(self.alphabet[second])
return binary_type(trytes), len(input)
|
[
"Encodes",
"a",
"byte",
"string",
"into",
"trytes",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/codecs.py#L81-L113
|
[
"def",
"encode",
"(",
"self",
",",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"isinstance",
"(",
"input",
",",
"memoryview",
")",
":",
"input",
"=",
"input",
".",
"tobytes",
"(",
")",
"if",
"not",
"isinstance",
"(",
"input",
",",
"(",
"binary_type",
",",
"bytearray",
")",
")",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"TypeError",
"(",
"\"Can't encode {type}; byte string expected.\"",
".",
"format",
"(",
"type",
"=",
"type",
"(",
"input",
")",
".",
"__name__",
",",
")",
")",
",",
"context",
"=",
"{",
"'input'",
":",
"input",
",",
"}",
",",
")",
"# :bc: In Python 2, iterating over a byte string yields",
"# characters instead of integers.",
"if",
"not",
"isinstance",
"(",
"input",
",",
"bytearray",
")",
":",
"input",
"=",
"bytearray",
"(",
"input",
")",
"trytes",
"=",
"bytearray",
"(",
")",
"for",
"c",
"in",
"input",
":",
"second",
",",
"first",
"=",
"divmod",
"(",
"c",
",",
"len",
"(",
"self",
".",
"alphabet",
")",
")",
"trytes",
".",
"append",
"(",
"self",
".",
"alphabet",
"[",
"first",
"]",
")",
"trytes",
".",
"append",
"(",
"self",
".",
"alphabet",
"[",
"second",
"]",
")",
"return",
"binary_type",
"(",
"trytes",
")",
",",
"len",
"(",
"input",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
AsciiTrytesCodec.decode
|
Decodes a tryte string into bytes.
|
iota/codecs.py
|
def decode(self, input, errors='strict'):
"""
Decodes a tryte string into bytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't decode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
bytes_ = bytearray()
for i in range(0, len(input), 2):
try:
first, second = input[i:i + 2]
except ValueError:
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode value; "
"tryte sequence has odd length.".format(
name=self.name,
),
),
context={
'input': input,
},
)
elif errors == 'replace':
bytes_ += b'?'
continue
try:
bytes_.append(
self.index[first]
+ (self.index[second] * len(self.index))
)
except ValueError:
# This combination of trytes yields a value > 255 when
# decoded.
# Naturally, we can't represent this using ASCII.
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode trytes {pair} "
"at position {i}-{j}: "
"ordinal not in range(255)".format(
name=self.name,
pair=chr(first) + chr(second),
i=i,
j=i + 1,
),
),
context={
'input': input,
}
)
elif errors == 'replace':
bytes_ += b'?'
return binary_type(bytes_), len(input)
|
def decode(self, input, errors='strict'):
"""
Decodes a tryte string into bytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't decode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
bytes_ = bytearray()
for i in range(0, len(input), 2):
try:
first, second = input[i:i + 2]
except ValueError:
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode value; "
"tryte sequence has odd length.".format(
name=self.name,
),
),
context={
'input': input,
},
)
elif errors == 'replace':
bytes_ += b'?'
continue
try:
bytes_.append(
self.index[first]
+ (self.index[second] * len(self.index))
)
except ValueError:
# This combination of trytes yields a value > 255 when
# decoded.
# Naturally, we can't represent this using ASCII.
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode trytes {pair} "
"at position {i}-{j}: "
"ordinal not in range(255)".format(
name=self.name,
pair=chr(first) + chr(second),
i=i,
j=i + 1,
),
),
context={
'input': input,
}
)
elif errors == 'replace':
bytes_ += b'?'
return binary_type(bytes_), len(input)
|
[
"Decodes",
"a",
"tryte",
"string",
"into",
"bytes",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/codecs.py#L116-L193
|
[
"def",
"decode",
"(",
"self",
",",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"isinstance",
"(",
"input",
",",
"memoryview",
")",
":",
"input",
"=",
"input",
".",
"tobytes",
"(",
")",
"if",
"not",
"isinstance",
"(",
"input",
",",
"(",
"binary_type",
",",
"bytearray",
")",
")",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"TypeError",
"(",
"\"Can't decode {type}; byte string expected.\"",
".",
"format",
"(",
"type",
"=",
"type",
"(",
"input",
")",
".",
"__name__",
",",
")",
")",
",",
"context",
"=",
"{",
"'input'",
":",
"input",
",",
"}",
",",
")",
"# :bc: In Python 2, iterating over a byte string yields",
"# characters instead of integers.",
"if",
"not",
"isinstance",
"(",
"input",
",",
"bytearray",
")",
":",
"input",
"=",
"bytearray",
"(",
"input",
")",
"bytes_",
"=",
"bytearray",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"input",
")",
",",
"2",
")",
":",
"try",
":",
"first",
",",
"second",
"=",
"input",
"[",
"i",
":",
"i",
"+",
"2",
"]",
"except",
"ValueError",
":",
"if",
"errors",
"==",
"'strict'",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"TrytesDecodeError",
"(",
"\"'{name}' codec can't decode value; \"",
"\"tryte sequence has odd length.\"",
".",
"format",
"(",
"name",
"=",
"self",
".",
"name",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'input'",
":",
"input",
",",
"}",
",",
")",
"elif",
"errors",
"==",
"'replace'",
":",
"bytes_",
"+=",
"b'?'",
"continue",
"try",
":",
"bytes_",
".",
"append",
"(",
"self",
".",
"index",
"[",
"first",
"]",
"+",
"(",
"self",
".",
"index",
"[",
"second",
"]",
"*",
"len",
"(",
"self",
".",
"index",
")",
")",
")",
"except",
"ValueError",
":",
"# This combination of trytes yields a value > 255 when",
"# decoded.",
"# Naturally, we can't represent this using ASCII.",
"if",
"errors",
"==",
"'strict'",
":",
"raise",
"with_context",
"(",
"exc",
"=",
"TrytesDecodeError",
"(",
"\"'{name}' codec can't decode trytes {pair} \"",
"\"at position {i}-{j}: \"",
"\"ordinal not in range(255)\"",
".",
"format",
"(",
"name",
"=",
"self",
".",
"name",
",",
"pair",
"=",
"chr",
"(",
"first",
")",
"+",
"chr",
"(",
"second",
")",
",",
"i",
"=",
"i",
",",
"j",
"=",
"i",
"+",
"1",
",",
")",
",",
")",
",",
"context",
"=",
"{",
"'input'",
":",
"input",
",",
"}",
")",
"elif",
"errors",
"==",
"'replace'",
":",
"bytes_",
"+=",
"b'?'",
"return",
"binary_type",
"(",
"bytes_",
")",
",",
"len",
"(",
"input",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
GetNewAddressesCommand._find_addresses
|
Find addresses matching the command parameters.
|
iota/commands/extended/get_new_addresses.py
|
def _find_addresses(self, seed, index, count, security_level, checksum):
# type: (Seed, int, Optional[int], int, bool) -> List[Address]
"""
Find addresses matching the command parameters.
"""
generator = AddressGenerator(seed, security_level, checksum)
if count is None:
# Connect to Tangle and find the first address without any
# transactions.
for addy in generator.create_iterator(start=index):
# We use addy.address here because FindTransactions does
# not work on an address with a checksum
response = FindTransactionsCommand(self.adapter)(
addresses=[addy.address],
)
if not response.get('hashes'):
return [addy]
return generator.get_addresses(start=index, count=count)
|
def _find_addresses(self, seed, index, count, security_level, checksum):
# type: (Seed, int, Optional[int], int, bool) -> List[Address]
"""
Find addresses matching the command parameters.
"""
generator = AddressGenerator(seed, security_level, checksum)
if count is None:
# Connect to Tangle and find the first address without any
# transactions.
for addy in generator.create_iterator(start=index):
# We use addy.address here because FindTransactions does
# not work on an address with a checksum
response = FindTransactionsCommand(self.adapter)(
addresses=[addy.address],
)
if not response.get('hashes'):
return [addy]
return generator.get_addresses(start=index, count=count)
|
[
"Find",
"addresses",
"matching",
"the",
"command",
"parameters",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/commands/extended/get_new_addresses.py#L53-L73
|
[
"def",
"_find_addresses",
"(",
"self",
",",
"seed",
",",
"index",
",",
"count",
",",
"security_level",
",",
"checksum",
")",
":",
"# type: (Seed, int, Optional[int], int, bool) -> List[Address]",
"generator",
"=",
"AddressGenerator",
"(",
"seed",
",",
"security_level",
",",
"checksum",
")",
"if",
"count",
"is",
"None",
":",
"# Connect to Tangle and find the first address without any",
"# transactions.",
"for",
"addy",
"in",
"generator",
".",
"create_iterator",
"(",
"start",
"=",
"index",
")",
":",
"# We use addy.address here because FindTransactions does",
"# not work on an address with a checksum",
"response",
"=",
"FindTransactionsCommand",
"(",
"self",
".",
"adapter",
")",
"(",
"addresses",
"=",
"[",
"addy",
".",
"address",
"]",
",",
")",
"if",
"not",
"response",
".",
"get",
"(",
"'hashes'",
")",
":",
"return",
"[",
"addy",
"]",
"return",
"generator",
".",
"get_addresses",
"(",
"start",
"=",
"index",
",",
"count",
"=",
"count",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
RoutingWrapper.add_route
|
Adds a route to the wrapper.
:param command:
The name of the command to route (e.g., "attachToTangle").
:param adapter:
The adapter object or URI to route requests to.
|
iota/adapter/wrappers.py
|
def add_route(self, command, adapter):
# type: (Text, AdapterSpec) -> RoutingWrapper
"""
Adds a route to the wrapper.
:param command:
The name of the command to route (e.g., "attachToTangle").
:param adapter:
The adapter object or URI to route requests to.
"""
if not isinstance(adapter, BaseAdapter):
try:
adapter = self.adapter_aliases[adapter]
except KeyError:
self.adapter_aliases[adapter] = adapter = resolve_adapter(
adapter
)
self.routes[command] = adapter
return self
|
def add_route(self, command, adapter):
# type: (Text, AdapterSpec) -> RoutingWrapper
"""
Adds a route to the wrapper.
:param command:
The name of the command to route (e.g., "attachToTangle").
:param adapter:
The adapter object or URI to route requests to.
"""
if not isinstance(adapter, BaseAdapter):
try:
adapter = self.adapter_aliases[adapter]
except KeyError:
self.adapter_aliases[adapter] = adapter = resolve_adapter(
adapter
)
self.routes[command] = adapter
return self
|
[
"Adds",
"a",
"route",
"to",
"the",
"wrapper",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/adapter/wrappers.py#L77-L98
|
[
"def",
"add_route",
"(",
"self",
",",
"command",
",",
"adapter",
")",
":",
"# type: (Text, AdapterSpec) -> RoutingWrapper",
"if",
"not",
"isinstance",
"(",
"adapter",
",",
"BaseAdapter",
")",
":",
"try",
":",
"adapter",
"=",
"self",
".",
"adapter_aliases",
"[",
"adapter",
"]",
"except",
"KeyError",
":",
"self",
".",
"adapter_aliases",
"[",
"adapter",
"]",
"=",
"adapter",
"=",
"resolve_adapter",
"(",
"adapter",
")",
"self",
".",
"routes",
"[",
"command",
"]",
"=",
"adapter",
"return",
"self"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Transaction.from_tryte_string
|
Creates a Transaction object from a sequence of trytes.
:param trytes:
Raw trytes. Should be exactly 2673 trytes long.
:param hash_:
The transaction hash, if available.
If not provided, it will be computed from the transaction
trytes.
|
iota/transaction/base.py
|
def from_tryte_string(cls, trytes, hash_=None):
# type: (TrytesCompatible, Optional[TransactionHash]) -> Transaction
"""
Creates a Transaction object from a sequence of trytes.
:param trytes:
Raw trytes. Should be exactly 2673 trytes long.
:param hash_:
The transaction hash, if available.
If not provided, it will be computed from the transaction
trytes.
"""
tryte_string = TransactionTrytes(trytes)
if not hash_:
hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int]
sponge = Curl()
sponge.absorb(tryte_string.as_trits())
sponge.squeeze(hash_trits)
hash_ = TransactionHash.from_trits(hash_trits)
return cls(
hash_=hash_,
signature_message_fragment=Fragment(tryte_string[0:2187]),
address=Address(tryte_string[2187:2268]),
value=int_from_trits(tryte_string[2268:2295].as_trits()),
legacy_tag=Tag(tryte_string[2295:2322]),
timestamp=int_from_trits(tryte_string[2322:2331].as_trits()),
current_index=int_from_trits(tryte_string[2331:2340].as_trits()),
last_index=int_from_trits(tryte_string[2340:2349].as_trits()),
bundle_hash=BundleHash(tryte_string[2349:2430]),
trunk_transaction_hash=TransactionHash(tryte_string[2430:2511]),
branch_transaction_hash=TransactionHash(tryte_string[2511:2592]),
tag=Tag(tryte_string[2592:2619]),
attachment_timestamp=int_from_trits(
tryte_string[2619:2628].as_trits()),
attachment_timestamp_lower_bound=int_from_trits(
tryte_string[2628:2637].as_trits()),
attachment_timestamp_upper_bound=int_from_trits(
tryte_string[2637:2646].as_trits()),
nonce=Nonce(tryte_string[2646:2673]),
)
|
def from_tryte_string(cls, trytes, hash_=None):
# type: (TrytesCompatible, Optional[TransactionHash]) -> Transaction
"""
Creates a Transaction object from a sequence of trytes.
:param trytes:
Raw trytes. Should be exactly 2673 trytes long.
:param hash_:
The transaction hash, if available.
If not provided, it will be computed from the transaction
trytes.
"""
tryte_string = TransactionTrytes(trytes)
if not hash_:
hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int]
sponge = Curl()
sponge.absorb(tryte_string.as_trits())
sponge.squeeze(hash_trits)
hash_ = TransactionHash.from_trits(hash_trits)
return cls(
hash_=hash_,
signature_message_fragment=Fragment(tryte_string[0:2187]),
address=Address(tryte_string[2187:2268]),
value=int_from_trits(tryte_string[2268:2295].as_trits()),
legacy_tag=Tag(tryte_string[2295:2322]),
timestamp=int_from_trits(tryte_string[2322:2331].as_trits()),
current_index=int_from_trits(tryte_string[2331:2340].as_trits()),
last_index=int_from_trits(tryte_string[2340:2349].as_trits()),
bundle_hash=BundleHash(tryte_string[2349:2430]),
trunk_transaction_hash=TransactionHash(tryte_string[2430:2511]),
branch_transaction_hash=TransactionHash(tryte_string[2511:2592]),
tag=Tag(tryte_string[2592:2619]),
attachment_timestamp=int_from_trits(
tryte_string[2619:2628].as_trits()),
attachment_timestamp_lower_bound=int_from_trits(
tryte_string[2628:2637].as_trits()),
attachment_timestamp_upper_bound=int_from_trits(
tryte_string[2637:2646].as_trits()),
nonce=Nonce(tryte_string[2646:2673]),
)
|
[
"Creates",
"a",
"Transaction",
"object",
"from",
"a",
"sequence",
"of",
"trytes",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/transaction/base.py#L29-L78
|
[
"def",
"from_tryte_string",
"(",
"cls",
",",
"trytes",
",",
"hash_",
"=",
"None",
")",
":",
"# type: (TrytesCompatible, Optional[TransactionHash]) -> Transaction",
"tryte_string",
"=",
"TransactionTrytes",
"(",
"trytes",
")",
"if",
"not",
"hash_",
":",
"hash_trits",
"=",
"[",
"0",
"]",
"*",
"HASH_LENGTH",
"# type: MutableSequence[int]",
"sponge",
"=",
"Curl",
"(",
")",
"sponge",
".",
"absorb",
"(",
"tryte_string",
".",
"as_trits",
"(",
")",
")",
"sponge",
".",
"squeeze",
"(",
"hash_trits",
")",
"hash_",
"=",
"TransactionHash",
".",
"from_trits",
"(",
"hash_trits",
")",
"return",
"cls",
"(",
"hash_",
"=",
"hash_",
",",
"signature_message_fragment",
"=",
"Fragment",
"(",
"tryte_string",
"[",
"0",
":",
"2187",
"]",
")",
",",
"address",
"=",
"Address",
"(",
"tryte_string",
"[",
"2187",
":",
"2268",
"]",
")",
",",
"value",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2268",
":",
"2295",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"legacy_tag",
"=",
"Tag",
"(",
"tryte_string",
"[",
"2295",
":",
"2322",
"]",
")",
",",
"timestamp",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2322",
":",
"2331",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"current_index",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2331",
":",
"2340",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"last_index",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2340",
":",
"2349",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"bundle_hash",
"=",
"BundleHash",
"(",
"tryte_string",
"[",
"2349",
":",
"2430",
"]",
")",
",",
"trunk_transaction_hash",
"=",
"TransactionHash",
"(",
"tryte_string",
"[",
"2430",
":",
"2511",
"]",
")",
",",
"branch_transaction_hash",
"=",
"TransactionHash",
"(",
"tryte_string",
"[",
"2511",
":",
"2592",
"]",
")",
",",
"tag",
"=",
"Tag",
"(",
"tryte_string",
"[",
"2592",
":",
"2619",
"]",
")",
",",
"attachment_timestamp",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2619",
":",
"2628",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"attachment_timestamp_lower_bound",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2628",
":",
"2637",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"attachment_timestamp_upper_bound",
"=",
"int_from_trits",
"(",
"tryte_string",
"[",
"2637",
":",
"2646",
"]",
".",
"as_trits",
"(",
")",
")",
",",
"nonce",
"=",
"Nonce",
"(",
"tryte_string",
"[",
"2646",
":",
"2673",
"]",
")",
",",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Transaction.as_json_compatible
|
Returns a JSON-compatible representation of the object.
References:
- :py:class:`iota.json.JsonEncoder`.
|
iota/transaction/base.py
|
def as_json_compatible(self):
# type: () -> dict
"""
Returns a JSON-compatible representation of the object.
References:
- :py:class:`iota.json.JsonEncoder`.
"""
return {
'hash_': self.hash,
'signature_message_fragment': self.signature_message_fragment,
'address': self.address,
'value': self.value,
'legacy_tag': self.legacy_tag,
'timestamp': self.timestamp,
'current_index': self.current_index,
'last_index': self.last_index,
'bundle_hash': self.bundle_hash,
'trunk_transaction_hash': self.trunk_transaction_hash,
'branch_transaction_hash': self.branch_transaction_hash,
'tag': self.tag,
'attachment_timestamp': self.attachment_timestamp,
'attachment_timestamp_lower_bound':
self.attachment_timestamp_lower_bound,
'attachment_timestamp_upper_bound':
self.attachment_timestamp_upper_bound,
'nonce': self.nonce,
}
|
def as_json_compatible(self):
# type: () -> dict
"""
Returns a JSON-compatible representation of the object.
References:
- :py:class:`iota.json.JsonEncoder`.
"""
return {
'hash_': self.hash,
'signature_message_fragment': self.signature_message_fragment,
'address': self.address,
'value': self.value,
'legacy_tag': self.legacy_tag,
'timestamp': self.timestamp,
'current_index': self.current_index,
'last_index': self.last_index,
'bundle_hash': self.bundle_hash,
'trunk_transaction_hash': self.trunk_transaction_hash,
'branch_transaction_hash': self.branch_transaction_hash,
'tag': self.tag,
'attachment_timestamp': self.attachment_timestamp,
'attachment_timestamp_lower_bound':
self.attachment_timestamp_lower_bound,
'attachment_timestamp_upper_bound':
self.attachment_timestamp_upper_bound,
'nonce': self.nonce,
}
|
[
"Returns",
"a",
"JSON",
"-",
"compatible",
"representation",
"of",
"the",
"object",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/transaction/base.py#L310-L341
|
[
"def",
"as_json_compatible",
"(",
"self",
")",
":",
"# type: () -> dict",
"return",
"{",
"'hash_'",
":",
"self",
".",
"hash",
",",
"'signature_message_fragment'",
":",
"self",
".",
"signature_message_fragment",
",",
"'address'",
":",
"self",
".",
"address",
",",
"'value'",
":",
"self",
".",
"value",
",",
"'legacy_tag'",
":",
"self",
".",
"legacy_tag",
",",
"'timestamp'",
":",
"self",
".",
"timestamp",
",",
"'current_index'",
":",
"self",
".",
"current_index",
",",
"'last_index'",
":",
"self",
".",
"last_index",
",",
"'bundle_hash'",
":",
"self",
".",
"bundle_hash",
",",
"'trunk_transaction_hash'",
":",
"self",
".",
"trunk_transaction_hash",
",",
"'branch_transaction_hash'",
":",
"self",
".",
"branch_transaction_hash",
",",
"'tag'",
":",
"self",
".",
"tag",
",",
"'attachment_timestamp'",
":",
"self",
".",
"attachment_timestamp",
",",
"'attachment_timestamp_lower_bound'",
":",
"self",
".",
"attachment_timestamp_lower_bound",
",",
"'attachment_timestamp_upper_bound'",
":",
"self",
".",
"attachment_timestamp_upper_bound",
",",
"'nonce'",
":",
"self",
".",
"nonce",
",",
"}"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
test
|
Transaction.as_tryte_string
|
Returns a TryteString representation of the transaction.
|
iota/transaction/base.py
|
def as_tryte_string(self):
# type: () -> TransactionTrytes
"""
Returns a TryteString representation of the transaction.
"""
return TransactionTrytes(
self.signature_message_fragment
+ self.address.address
+ self.value_as_trytes
+ self.legacy_tag
+ self.timestamp_as_trytes
+ self.current_index_as_trytes
+ self.last_index_as_trytes
+ self.bundle_hash
+ self.trunk_transaction_hash
+ self.branch_transaction_hash
+ self.tag
+ self.attachment_timestamp_as_trytes
+ self.attachment_timestamp_lower_bound_as_trytes
+ self.attachment_timestamp_upper_bound_as_trytes
+ self.nonce
)
|
def as_tryte_string(self):
# type: () -> TransactionTrytes
"""
Returns a TryteString representation of the transaction.
"""
return TransactionTrytes(
self.signature_message_fragment
+ self.address.address
+ self.value_as_trytes
+ self.legacy_tag
+ self.timestamp_as_trytes
+ self.current_index_as_trytes
+ self.last_index_as_trytes
+ self.bundle_hash
+ self.trunk_transaction_hash
+ self.branch_transaction_hash
+ self.tag
+ self.attachment_timestamp_as_trytes
+ self.attachment_timestamp_lower_bound_as_trytes
+ self.attachment_timestamp_upper_bound_as_trytes
+ self.nonce
)
|
[
"Returns",
"a",
"TryteString",
"representation",
"of",
"the",
"transaction",
"."
] |
iotaledger/iota.lib.py
|
python
|
https://github.com/iotaledger/iota.lib.py/blob/97cdd1e241498446b46157b79b2a1ea2ec6d387a/iota/transaction/base.py#L343-L364
|
[
"def",
"as_tryte_string",
"(",
"self",
")",
":",
"# type: () -> TransactionTrytes",
"return",
"TransactionTrytes",
"(",
"self",
".",
"signature_message_fragment",
"+",
"self",
".",
"address",
".",
"address",
"+",
"self",
".",
"value_as_trytes",
"+",
"self",
".",
"legacy_tag",
"+",
"self",
".",
"timestamp_as_trytes",
"+",
"self",
".",
"current_index_as_trytes",
"+",
"self",
".",
"last_index_as_trytes",
"+",
"self",
".",
"bundle_hash",
"+",
"self",
".",
"trunk_transaction_hash",
"+",
"self",
".",
"branch_transaction_hash",
"+",
"self",
".",
"tag",
"+",
"self",
".",
"attachment_timestamp_as_trytes",
"+",
"self",
".",
"attachment_timestamp_lower_bound_as_trytes",
"+",
"self",
".",
"attachment_timestamp_upper_bound_as_trytes",
"+",
"self",
".",
"nonce",
")"
] |
97cdd1e241498446b46157b79b2a1ea2ec6d387a
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.