INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
** Description ** Find the image with the tag <image > and return its metadata.
def query_image_metadata(self, image, metadata_type=""): '''**Description** Find the image with the tag <image> and return its metadata. **Arguments** - image: Input image can be in the following formats: registry/repo:tag - metadata_type: The metadata type can be one of the types returned by running without a type specified **Success Return Value** A JSON object representing the image metadata. ''' return self._query_image(image, query_group='metadata', query_type=metadata_type)
** Description ** Find the image with the tag <image > and return its vulnerabilities.
def query_image_vuln(self, image, vuln_type="", vendor_only=True): '''**Description** Find the image with the tag <image> and return its vulnerabilities. **Arguments** - image: Input image can be in the following formats: registry/repo:tag - vuln_type: Vulnerability type can be one of the following types: - os: CVE/distro vulnerabilities against operating system packages **Success Return Value** A JSON object representing the image vulnerabilities. ''' return self._query_image(image, query_group='vuln', query_type=vuln_type, vendor_only=vendor_only)
** Description ** Delete image from the scanner.
def delete_image(self, image, force=False): '''**Description** Delete image from the scanner. **Arguments** - None ''' _, _, image_digest = self._discover_inputimage(image) if not image_digest: return [False, "cannot use input image string: no discovered imageDigest"] url = self.url + "/api/scanning/v1/anchore/images/" + image_digest res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Check the latest policy evaluation for an image
def check_image_evaluation(self, image, show_history=False, detail=False, tag=None, policy=None): '''**Description** Check the latest policy evaluation for an image **Arguments** - image: Input image can be in the following formats: registry/repo:tag - show_history: Show all previous policy evaluations - detail: Show detailed policy evaluation report - tag: Specify which TAG is evaluated for a given image ID or Image Digest - policy: Specify which POLICY to use for evaluate (defaults currently active policy) **Success Return Value** A JSON object representing the evaluation status. ''' itype, _, image_digest = self._discover_inputimage(image) if not image_digest: return [False, "could not get image record from anchore"] if not tag and itype != 'tag': return [False, "input image name is not a tag, and no --tag is specified"] thetag = tag if tag else image url = "{base_url}/api/scanning/v1/anchore/images/{image_digest}/check?history={history}&detail={detail}&tag={tag}{policy_id}" url = url.format( base_url=self.url, image_digest=image_digest, history=str(show_history).lower(), detail=str(detail).lower(), tag=thetag, policy_id=("&policyId=%s" % policy) if policy else "") res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Add image registry
def add_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True): '''**Description** Add image registry **Arguments** - registry: Full hostname/port of registry. Eg. myrepo.example.com:5000 - registry_user: Username - registry_pass: Password - insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate) - registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2') - validate: If set to 'False' will not attempt to validate registry/creds on registry add **Success Return Value** A JSON object representing the registry. ''' registry_types = ['docker_v2', 'awsecr'] if registry_type and registry_type not in registry_types: return [False, "input registry type not supported (supported registry_types: " + str(registry_types)] if self._registry_string_is_valid(registry): return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"] if not registry_type: registry_type = self._get_registry_type(registry) payload = { 'registry': registry, 'registry_user': registry_user, 'registry_pass': registry_pass, 'registry_type': registry_type, 'registry_verify': not insecure} url = "{base_url}/api/scanning/v1/anchore/registries?validate={validate}".format( base_url=self.url, validate=validate) res = requests.post(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Update an existing image registry.
def update_registry(self, registry, registry_user, registry_pass, insecure=False, registry_type="docker_v2", validate=True): '''**Description** Update an existing image registry. **Arguments** - registry: Full hostname/port of registry. Eg. myrepo.example.com:5000 - registry_user: Username - registry_pass: Password - insecure: Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate) - registry_type: Specify the registry type. 'docker_v2' and 'awsecr' are supported (default='docker_v2') - validate: If set to 'False' will not attempt to validate registry/creds on registry add **Success Return Value** A JSON object representing the registry. ''' if self._registry_string_is_valid(registry): return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"] payload = { 'registry': registry, 'registry_user': registry_user, 'registry_pass': registry_pass, 'registry_type': registry_type, 'registry_verify': not insecure} url = "{base_url}/api/scanning/v1/anchore/registries/{registry}?validate={validate}".format( base_url=self.url, registry=registry, validate=validate) res = requests.put(url, data=json.dumps(payload), headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Delete an existing image registry
def delete_registry(self, registry): '''**Description** Delete an existing image registry **Arguments** - registry: Full hostname/port of registry. Eg. myrepo.example.com:5000 ''' # do some input string checking if re.match(".*\\/.*", registry): return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"] url = self.url + "/api/scanning/v1/anchore/registries/" + registry res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Find the registry and return its json description
def get_registry(self, registry): '''**Description** Find the registry and return its json description **Arguments** - registry: Full hostname/port of registry. Eg. myrepo.example.com:5000 **Success Return Value** A JSON object representing the registry. ''' if self._registry_string_is_valid(registry): return [False, "input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional"] url = self.url + "/api/scanning/v1/anchore/registries/" + registry res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Create a new policy
def add_policy(self, name, rules, comment="", bundleid=None): '''**Description** Create a new policy **Arguments** - name: The name of the policy. - rules: A list of Anchore PolicyRule elements (while creating/updating a policy, new rule IDs will be created backend side) - comment: A human-readable description. - bundleid: Target bundle. If not specified, the currently active bundle will be used. **Success Return Value** A JSON object containing the policy description. ''' policy = { 'name': name, 'comment': comment, 'rules': rules, 'version': '1_0' } if bundleid: policy['policyBundleId'] = bundleid url = self.url + '/api/scanning/v1/policies' data = json.dumps(policy) res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Retrieve the policy with the given id in the targeted policy bundle
def get_policy(self, policyid, bundleid=None): '''**Description** Retrieve the policy with the given id in the targeted policy bundle **Arguments** - policyid: Unique identifier associated with this policy. - bundleid: Target bundle. If not specified, the currently active bundle will be used. **Success Return Value** A JSON object containing the policy description. ''' url = self.url + '/api/scanning/v1/policies/' + policyid if bundleid: url += '?bundleId=' + bundleid
** Description ** Update the policy with the given id
def update_policy(self, policyid, policy_description): '''**Description** Update the policy with the given id **Arguments** - policyid: Unique identifier associated with this policy. - policy_description: A dictionary with the policy description. **Success Return Value** A JSON object containing the policy description. ''' url = self.url + '/api/scanning/v1/policies/' + policyid data = json.dumps(policy_description) res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Create a new alert
def add_alert(self, name, description=None, scope="", triggers={'failed': True, 'unscanned': True}, enabled=False, notification_channels=[]): '''**Description** Create a new alert **Arguments** - name: The name of the alert. - description: The descprition of the alert. - scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"') - tiggers: A dict {str: bool} indicating wich triggers should be enabled/disabled. (default: {'failed': True, 'unscanned': True}) - enabled: Whether this alert should actually be applied. - notification_channels: A list of notification channel ids. **Success Return Value** A JSON object containing the alert description. ''' alert = { 'name': name, 'description': description, 'triggers': triggers, 'scope': scope, 'enabled': enabled, 'autoscan': True, 'notificationChannelIds': notification_channels, } url = self.url + '/api/scanning/v1/alerts' data = json.dumps(alert) res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** List the current set of scanning alerts.
def list_alerts(self, limit=None, cursor=None): '''**Description** List the current set of scanning alerts. **Arguments** - limit: Maximum number of alerts in the response. - cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response. **Success Return Value** A JSON object containing the list of alerts. ''' url = self.url + '/api/scanning/v1/alerts' if limit: url += '?limit=' + str(limit) if cursor: url += '&cursor=' + cursor res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Update the alert with the given id
def update_alert(self, alertid, alert_description): '''**Description** Update the alert with the given id **Arguments** - alertid: Unique identifier associated with this alert. - alert_description: A dictionary with the alert description. **Success Return Value** A JSON object containing the alert description. ''' url = self.url + '/api/scanning/v1/alerts/' + alertid data = json.dumps(alert_description) res = requests.put(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** Delete the alert with the given id
def delete_alert(self, policyid): '''**Description** Delete the alert with the given id **Arguments** - alertid: Unique identifier associated with this alert. ''' url = self.url + '/api/scanning/v1/alerts/' + policyid res = requests.delete(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.text]
** Description ** List all subscriptions
def list_subscription(self): '''**Description** List all subscriptions **Arguments** - None **Success Return Value** A JSON object representing the list of subscriptions. ''' url = self.url + "/api/scanning/v1/anchore/subscriptions" res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
** Description ** List runtime containers
def list_runtime(self, scope="", skip_policy_evaluation=True, start_time=None, end_time=None): '''**Description** List runtime containers **Arguments** - scope: An AND-composed string of predicates that selects the scope in which the alert will be applied. (like: 'host.domain = "example.com" and container.image != "alpine:latest"') - skip_policy_evaluation: If true, no policy evaluations will be triggered for the images. - start_time: Start of the time range (integer of unix time). - end_time: End of the time range (integer of unix time). **Success Return Value** A JSON object representing the list of runtime containers. ''' containers = { 'scope': scope, 'skipPolicyEvaluation': skip_policy_evaluation } if start_time or end_time: containers['time'] = {} containers['time']['from'] = int(start_time * 100000) if start_time else 0 end_time = end_time if end_time else time.time() containers['time']['to'] = int(end_time * 1000000) url = self.url + '/api/scanning/v1/query/containers' data = json.dumps(containers) res = requests.post(url, headers=self.hdrs, data=data, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
None means system default
def addSourceAddr(self, addr): """None means 'system default'""" try: self._multiInSocket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._makeMreq(addr)) except socket.error: # if 1 interface has more than 1 address, exception is raised for the second pass sock = self._createMulticastOutSocket(addr, self._observer.ttl) self._multiOutUniInSockets[addr] = sock self._poll.register(sock, select.POLLIN)
Method sleeps if nothing to do
def _sendPendingMessages(self): """Method sleeps, if nothing to do""" if len(self._queue) == 0: time.sleep(0.1) return msg = self._queue.pop(0) if msg.canSend(): self._sendMsg(msg) msg.refresh() if not (msg.isFinished()): self._queue.append(msg) else: self._queue.append(msg) time.sleep(0.01)
Set callback which will be called when new service appeared online and sent Hi message
def setRemoteServiceHelloCallback(self, cb, types=None, scopes=None): """Set callback, which will be called when new service appeared online and sent Hi message typesFilter and scopesFilter might be list of types and scopes. If filter is set, callback is called only for Hello messages, which match filter Set None to disable callback """ self._remoteServiceHelloCallback = cb self._remoteServiceHelloCallbackTypesFilter = types self._remoteServiceHelloCallbackScopesFilter = scopes
cleans up and stops the discovery server
def stop(self): 'cleans up and stops the discovery server' self.clearRemoteServices() self.clearLocalServices() self._stopThreads() self._serverStarted = False
send Bye messages for the services and remove them
def clearLocalServices(self): 'send Bye messages for the services and remove them' for service in list(self._localServices.values()): self._sendBye(service) self._localServices.clear()
search for services given the TYPES and SCOPES in a given TIMEOUT
def searchServices(self, types=None, scopes=None, timeout=3): 'search for services given the TYPES and SCOPES in a given TIMEOUT' if not self._serverStarted: raise Exception("Server not started") self._sendProbe(types, scopes) time.sleep(timeout) return self._filterServices(list(self._remoteServices.values()), types, scopes)
Publish a service with the given TYPES SCOPES and XAddrs ( service addresses )
def publishService(self, types, scopes, xAddrs): """Publish a service with the given TYPES, SCOPES and XAddrs (service addresses) if xAddrs contains item, which includes {ip} pattern, one item per IP addres will be sent """ if not self._serverStarted: raise Exception("Server not started") instanceId = _generateInstanceId() service = Service(types, scopes, xAddrs, self.uuid, instanceId) self._localServices[self.uuid] = service self._sendHello(service) time.sleep(0.001)
construct a a raw SOAP XML string given a prepared SoapEnvelope object
def createSOAPMessage(env): "construct a a raw SOAP XML string, given a prepared SoapEnvelope object" if env.getAction() == ACTION_PROBE: return createProbeMessage(env) if env.getAction() == ACTION_PROBE_MATCH: return createProbeMatchMessage(env) if env.getAction() == ACTION_RESOLVE: return createResolveMessage(env) if env.getAction() == ACTION_RESOLVE_MATCH: return createResolveMatchMessage(env) if env.getAction() == ACTION_HELLO: return createHelloMessage(env) if env.getAction() == ACTION_BYE: return createByeMessage(env)
parse raw XML data string return a ( minidom ) xml document
def parseSOAPMessage(data, ipAddr): "parse raw XML data string, return a (minidom) xml document" try: dom = minidom.parseString(data) except Exception: #print('Failed to parse message from %s\n"%s": %s' % (ipAddr, data, ex), file=sys.stderr) return None if dom.getElementsByTagNameNS(NS_S, "Fault"): #print('Fault received from %s:' % (ipAddr, data), file=sys.stderr) return None soapAction = dom.getElementsByTagNameNS(NS_A, "Action")[0].firstChild.data.strip() if soapAction == ACTION_PROBE: return parseProbeMessage(dom) elif soapAction == ACTION_PROBE_MATCH: return parseProbeMatchMessage(dom) elif soapAction == ACTION_RESOLVE: return parseResolveMessage(dom) elif soapAction == ACTION_RESOLVE_MATCH: return parseResolveMatchMessage(dom) elif soapAction == ACTION_BYE: return parseByeMessage(dom) elif soapAction == ACTION_HELLO: return parseHelloMessage(dom)
Discover systems using WS - Discovery
def discover(scope, loglevel, capture): "Discover systems using WS-Discovery" if loglevel: level = getattr(logging, loglevel, None) if not level: print("Invalid log level '%s'" % loglevel) return logger.setLevel(level) run(scope=scope, capture=capture)
Return the manager that handles the relation from this instance to the tagged_item class. If content_object on the tagged_item class is defined as a ParentalKey this will be a DeferringRelatedManager which allows writing related objects without committing them to the database.
def get_tagged_item_manager(self): """Return the manager that handles the relation from this instance to the tagged_item class. If content_object on the tagged_item class is defined as a ParentalKey, this will be a DeferringRelatedManager which allows writing related objects without committing them to the database. """ rel_name = self.through._meta.get_field('content_object').remote_field.get_accessor_name() return getattr(self.instance, rel_name)
Return a serialised version of the model s fields which exist as local database columns ( i. e. excluding m2m and incoming foreign key relations )
def get_serializable_data_for_fields(model): """ Return a serialised version of the model's fields which exist as local database columns (i.e. excluding m2m and incoming foreign key relations) """ pk_field = model._meta.pk # If model is a child via multitable inheritance, use parent's pk while pk_field.remote_field and pk_field.remote_field.parent_link: pk_field = pk_field.remote_field.model._meta.pk obj = {'pk': get_field_value(pk_field, model)} for field in model._meta.fields: if field.serialize: obj[field.name] = get_field_value(field, model) return obj
Return a list of RelatedObject records for child relations of the given model including ones attached to ancestors of the model
def get_all_child_relations(model): """ Return a list of RelatedObject records for child relations of the given model, including ones attached to ancestors of the model """ return [ field for field in model._meta.get_fields() if isinstance(field.remote_field, ParentalKey) ]
Return a list of ParentalManyToManyFields on the given model including ones attached to ancestors of the model
def get_all_child_m2m_relations(model): """ Return a list of ParentalManyToManyFields on the given model, including ones attached to ancestors of the model """ return [ field for field in model._meta.get_fields() if isinstance(field, ParentalManyToManyField) ]
Save the model and commit all child relations.
def save(self, **kwargs): """ Save the model and commit all child relations. """ child_relation_names = [rel.get_accessor_name() for rel in get_all_child_relations(self)] child_m2m_field_names = [field.name for field in get_all_child_m2m_relations(self)] update_fields = kwargs.pop('update_fields', None) if update_fields is None: real_update_fields = None relations_to_commit = child_relation_names m2m_fields_to_commit = child_m2m_field_names else: real_update_fields = [] relations_to_commit = [] m2m_fields_to_commit = [] for field in update_fields: if field in child_relation_names: relations_to_commit.append(field) elif field in child_m2m_field_names: m2m_fields_to_commit.append(field) else: real_update_fields.append(field) super(ClusterableModel, self).save(update_fields=real_update_fields, **kwargs) for relation in relations_to_commit: getattr(self, relation).commit() for field in m2m_fields_to_commit: getattr(self, field).commit()
Build an instance of this model from the JSON - like structure passed in recursing into related objects as required. If check_fks is true it will check whether referenced foreign keys still exist in the database. - dangling foreign keys on related objects are dealt with by either nullifying the key or dropping the related object according to the on_delete setting. - dangling foreign keys on the base object will be nullified unless strict_fks is true in which case any dangling foreign keys with on_delete = CASCADE will cause None to be returned for the entire object.
def from_serializable_data(cls, data, check_fks=True, strict_fks=False): """ Build an instance of this model from the JSON-like structure passed in, recursing into related objects as required. If check_fks is true, it will check whether referenced foreign keys still exist in the database. - dangling foreign keys on related objects are dealt with by either nullifying the key or dropping the related object, according to the 'on_delete' setting. - dangling foreign keys on the base object will be nullified, unless strict_fks is true, in which case any dangling foreign keys with on_delete=CASCADE will cause None to be returned for the entire object. """ obj = model_from_serializable_data(cls, data, check_fks=check_fks, strict_fks=strict_fks) if obj is None: return None child_relations = get_all_child_relations(cls) for rel in child_relations: rel_name = rel.get_accessor_name() try: child_data_list = data[rel_name] except KeyError: continue related_model = rel.related_model if hasattr(related_model, 'from_serializable_data'): children = [ related_model.from_serializable_data(child_data, check_fks=check_fks, strict_fks=True) for child_data in child_data_list ] else: children = [ model_from_serializable_data(related_model, child_data, check_fks=check_fks, strict_fks=True) for child_data in child_data_list ] children = filter(lambda child: child is not None, children) setattr(obj, rel_name, children) return obj
This clean method will check for unique_together condition
def validate_unique(self): '''This clean method will check for unique_together condition''' # Collect unique_checks and to run from all the forms. all_unique_checks = set() all_date_checks = set() forms_to_delete = self.deleted_forms valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete] for form in valid_forms: unique_checks, date_checks = form.instance._get_unique_checks() all_unique_checks.update(unique_checks) all_date_checks.update(date_checks) errors = [] # Do each of the unique checks (unique and unique_together) for uclass, unique_check in all_unique_checks: seen_data = set() for form in valid_forms: # Get the data for the set of fields that must be unique among the forms. row_data = ( field if field in self.unique_fields else form.cleaned_data[field] for field in unique_check if field in form.cleaned_data ) # Reduce Model instances to their primary key values row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d for d in row_data) if row_data and None not in row_data: # if we've already seen it then we have a uniqueness failure if row_data in seen_data: # poke error messages into the right places and mark # the form as invalid errors.append(self.get_unique_error_message(unique_check)) form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()]) # remove the data from the cleaned_data dict since it was invalid for field in unique_check: if field in form.cleaned_data: del form.cleaned_data[field] # mark the data as seen seen_data.add(row_data) if errors: raise ValidationError(errors)
Return True if data differs from initial.
def has_changed(self): """Return True if data differs from initial.""" # Need to recurse over nested formsets so that the form is saved if there are changes # to child forms but not the parent if self.formsets: for formset in self.formsets.values(): for form in formset.forms: if form.has_changed(): return True return bool(self.changed_data)
Create a DeferringRelatedManager class that wraps an ordinary RelatedManager with deferring behaviour: any updates to the object set ( via e. g. add () or clear () ) are written to a holding area rather than committed to the database immediately. Writing to the database is deferred until the model is saved.
def create_deferring_foreign_related_manager(related, original_manager_cls): """ Create a DeferringRelatedManager class that wraps an ordinary RelatedManager with 'deferring' behaviour: any updates to the object set (via e.g. add() or clear()) are written to a holding area rather than committed to the database immediately. Writing to the database is deferred until the model is saved. """ relation_name = related.get_accessor_name() rel_field = related.field rel_model = related.related_model superclass = rel_model._default_manager.__class__ class DeferringRelatedManager(superclass): def __init__(self, instance): super(DeferringRelatedManager, self).__init__() self.model = rel_model self.instance = instance def _get_cluster_related_objects(self): # Helper to retrieve the instance's _cluster_related_objects dict, # creating it if it does not already exist try: return self.instance._cluster_related_objects except AttributeError: cluster_related_objects = {} self.instance._cluster_related_objects = cluster_related_objects return cluster_related_objects def get_live_query_set(self): # deprecated; renamed to get_live_queryset to match the move from # get_query_set to get_queryset in Django 1.6 return self.get_live_queryset() def get_live_queryset(self): """ return the original manager's queryset, which reflects the live database """ return original_manager_cls(self.instance).get_queryset() def get_queryset(self): """ return the current object set with any updates applied, wrapped up in a FakeQuerySet if it doesn't match the database state """ try: results = self.instance._cluster_related_objects[relation_name] except (AttributeError, KeyError): return self.get_live_queryset() return FakeQuerySet(related.related_model, results) def _apply_rel_filters(self, queryset): # Implemented as empty for compatibility sake # But there is probably a better implementation of this function return queryset._next_is_sticky() def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: db = self._db or router.db_for_read(self.model, instance=instances[0]) queryset = super(DeferringRelatedManager, self).get_queryset().using(db) rel_obj_attr = rel_field.get_local_related_value instance_attr = rel_field.get_foreign_related_value instances_dict = dict((instance_attr(inst), inst) for inst in instances) query = {'%s__in' % rel_field.name: instances} qs = queryset.filter(**query) # Since we just bypassed this class' get_queryset(), we must manage # the reverse relation manually. for rel_obj in qs: instance = instances_dict[rel_obj_attr(rel_obj)] setattr(rel_obj, rel_field.name, instance) cache_name = rel_field.related_query_name() return qs, rel_obj_attr, instance_attr, False, cache_name, False def get_object_list(self): """ return the mutable list that forms the current in-memory state of this relation. If there is no such list (i.e. the manager is returning querysets from the live database instead), one is created, populating it with the live database state """ cluster_related_objects = self._get_cluster_related_objects() try: object_list = cluster_related_objects[relation_name] except KeyError: object_list = list(self.get_live_queryset()) cluster_related_objects[relation_name] = object_list return object_list def add(self, *new_items): """ Add the passed items to the stored object set, but do not commit them to the database """ items = self.get_object_list() for target in new_items: item_matched = False for i, item in enumerate(items): if item == target: # Replace the matched item with the new one. This ensures that any # modifications to that item's fields take effect within the recordset - # i.e. we can perform a virtual UPDATE to an object in the list # by calling add(updated_object). Which is semantically a bit dubious, # but it does the job... items[i] = target item_matched = True break if not item_matched: items.append(target) # update the foreign key on the added item to point back to the parent instance setattr(target, related.field.name, self.instance) # Sort list if rel_model._meta.ordering and len(items) > 1: sort_by_fields(items, rel_model._meta.ordering) def remove(self, *items_to_remove): """ Remove the passed items from the stored object set, but do not commit the change to the database """ items = self.get_object_list() # filter items list in place: see http://stackoverflow.com/a/1208792/1853523 items[:] = [item for item in items if item not in items_to_remove] def create(self, **kwargs): items = self.get_object_list() new_item = related.related_model(**kwargs) items.append(new_item) return new_item def clear(self): """ Clear the stored object set, without affecting the database """ self.set([]) def set(self, objs, bulk=True, clear=False): # cast objs to a list so that: # 1) we can call len() on it (which we can't do on, say, a queryset) # 2) if we need to sort it, we can do so without mutating the original objs = list(objs) cluster_related_objects = self._get_cluster_related_objects() for obj in objs: # update the foreign key on the added item to point back to the parent instance setattr(obj, related.field.name, self.instance) # Clone and sort the 'objs' list, if necessary if rel_model._meta.ordering and len(objs) > 1: sort_by_fields(objs, rel_model._meta.ordering) cluster_related_objects[relation_name] = objs def commit(self): """ Apply any changes made to the stored object set to the database. Any objects removed from the initial set will be deleted entirely from the database. """ if self.instance.pk is None: raise IntegrityError("Cannot commit relation %r on an unsaved model" % relation_name) try: final_items = self.instance._cluster_related_objects[relation_name] except (AttributeError, KeyError): # _cluster_related_objects entry never created => no changes to make return original_manager = original_manager_cls(self.instance) live_items = list(original_manager.get_queryset()) for item in live_items: if item not in final_items: item.delete() for item in final_items: # Django 1.9+ bulk updates items by default which assumes # that they have already been saved to the database. # Disable this behaviour. # https://code.djangoproject.com/ticket/18556 # https://github.com/django/django/commit/adc0c4fbac98f9cb975e8fa8220323b2de638b46 original_manager.add(item, bulk=False) # purge the _cluster_related_objects entry, so we switch back to live SQL del self.instance._cluster_related_objects[relation_name] return DeferringRelatedManager
Sort a list of objects on the given fields. The field list works analogously to queryset. order_by ( * fields ): each field is either a property of the object or is prefixed by - ( e. g. - name ) to indicate reverse ordering.
def sort_by_fields(items, fields): """ Sort a list of objects on the given fields. The field list works analogously to queryset.order_by(*fields): each field is either a property of the object, or is prefixed by '-' (e.g. '-name') to indicate reverse ordering. """ # To get the desired behaviour, we need to order by keys in reverse order # See: https://docs.python.org/2/howto/sorting.html#sort-stability-and-complex-sorts for key in reversed(fields): # Check if this key has been reversed reverse = False if key[0] == '-': reverse = True key = key[1:] # Sort # Use a tuple of (v is not None, v) as the key, to ensure that None sorts before other values, # as comparing directly with None breaks on python3 items.sort(key=lambda x: (getattr(x, key) is not None, getattr(x, key)), reverse=reverse)
Returns the address with a valid checksum attached.
def with_valid_checksum(self): # type: () -> Address """ Returns the address with a valid checksum attached. """ return Address( trytes=self.address + self._generate_checksum(), # Make sure to copy all of the ancillary attributes, too! balance=self.balance, key_index=self.key_index, security_level=self.security_level, )
Generates the correct checksum for this address.
def _generate_checksum(self): # type: () -> AddressChecksum """ Generates the correct checksum for this address. """ checksum_trits = [] # type: MutableSequence[int] sponge = Kerl() sponge.absorb(self.address.as_trits()) sponge.squeeze(checksum_trits) checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE return AddressChecksum.from_trits(checksum_trits[-checksum_length:])
Executes the command and ( optionally ) returns an exit code ( used by the shell to determine if the application exited cleanly ).
def execute(self, api, **arguments): # type: (Iota, **Any) -> Optional[int] """ Executes the command and (optionally) returns an exit code (used by the shell to determine if the application exited cleanly). :param api: The API object used to communicate with the node. :param arguments: Command-line arguments parsed by the argument parser. """ raise NotImplementedError( 'Not implemented in {cls}.'.format(cls=type(self).__name__), )
Executes the command from a collection of arguments ( e. g.: py: data sys. argv ) and returns the exit code.
def run_from_argv(self, argv=None): # type: (Optional[tuple]) -> int """ Executes the command from a collection of arguments (e.g., :py:data`sys.argv`) and returns the exit code. :param argv: Arguments to pass to the argument parser. If ``None``, defaults to ``sys.argv[1:]``. """ exit_code = self.execute(**self.parse_argv(argv)) if exit_code is None: exit_code = 0 return exit_code
Parses arguments for the command.
def parse_argv(self, argv=None): # type: (Optional[tuple]) -> dict """ Parses arguments for the command. :param argv: Arguments to pass to the argument parser. If ``None``, defaults to ``sys.argv[1:]``. """ arguments = vars(self.create_argument_parser().parse_args(argv)) seed = None if self.requires_seed: seed_filepath = arguments.pop('seed_file') seed = ( self.seed_from_filepath(seed_filepath) if seed_filepath else self.prompt_for_seed() ) arguments['api'] = Iota( adapter=arguments.pop('uri'), seed=seed, testnet=arguments.pop('testnet'), ) return arguments
Returns the argument parser that will be used to interpret arguments and options from argv.
def create_argument_parser(self): # type: () -> ArgumentParser """ Returns the argument parser that will be used to interpret arguments and options from argv. """ parser = ArgumentParser( description=self.__doc__, epilog='PyOTA v{version}'.format(version=__version__), ) parser.add_argument( '--uri', type=text_type, default='http://localhost:14265/', help=( 'URI of the node to connect to ' '(defaults to http://localhost:14265/).' ), ) if self.requires_seed: parser.add_argument( '--seed-file', type=text_type, dest='seed_file', help=( 'Path to a file containing your seed in cleartext. ' 'If not provided, you will be prompted to enter ' 'your seed via stdin.' ), ) parser.add_argument( '--testnet', action='store_true', default=False, help='If set, use testnet settings (e.g., for PoW).', ) return parser
Prompts the user to enter their seed via stdin.
def prompt_for_seed(): # type: () -> Seed """ Prompts the user to enter their seed via stdin. """ seed = secure_input( 'Enter seed and press return (typing will not be shown).\n' 'If no seed is specified, a random one will be used instead.\n' ) if isinstance(seed, text_type): seed = seed.encode('ascii') return Seed(seed) if seed else Seed.random()
Normalizes a hash converting it into a sequence of integers ( not trits! ) suitable for use in signature generation/ validation.
def normalize(hash_): # type: (Hash) -> List[List[int]] """ "Normalizes" a hash, converting it into a sequence of integers (not trits!) suitable for use in signature generation/validation. The hash is divided up into 3 parts, each of which is "balanced" (sum of all the values is equal to zero). """ normalized = [] source = hash_.as_integers() chunk_size = 27 for i in range(Hash.LEN // chunk_size): start = i * chunk_size stop = start + chunk_size chunk = source[start:stop] chunk_sum = sum(chunk) while chunk_sum > 0: chunk_sum -= 1 for j in range(chunk_size): if chunk[j] > -13: chunk[j] -= 1 break while chunk_sum < 0: chunk_sum += 1 for j in range(chunk_size): if chunk[j] < 13: chunk[j] += 1 break normalized.append(chunk) return normalized
Returns whether a sequence of signature fragments is valid.
def validate_signature_fragments( fragments, hash_, public_key, sponge_type=Kerl, ): # type: (Sequence[TryteString], Hash, TryteString, type) -> bool """ Returns whether a sequence of signature fragments is valid. :param fragments: Sequence of signature fragments (usually :py:class:`iota.transaction.Fragment` instances). :param hash_: Hash used to generate the signature fragments (usually a :py:class:`iota.transaction.BundleHash` instance). :param public_key: The public key value used to verify the signature digest (usually a :py:class:`iota.types.Address` instance). :param sponge_type: The class used to create the cryptographic sponge (i.e., Curl or Kerl). """ checksum = [0] * (HASH_LENGTH * len(fragments)) normalized_hash = normalize(hash_) for i, fragment in enumerate(fragments): outer_sponge = sponge_type() # If there are more than 3 iterations, loop back around to the # start. normalized_chunk = normalized_hash[i % len(normalized_hash)] buffer = [] for j, hash_trytes in enumerate(fragment.iter_chunks(Hash.LEN)): buffer = hash_trytes.as_trits() # type: List[int] inner_sponge = sponge_type() # Note the sign flip compared to # :py;class:`SignatureFragmentGenerator`. for _ in range(13 + normalized_chunk[j]): inner_sponge.reset() inner_sponge.absorb(buffer) inner_sponge.squeeze(buffer) outer_sponge.absorb(buffer) outer_sponge.squeeze(buffer) checksum[i * HASH_LENGTH:(i + 1) * HASH_LENGTH] = buffer actual_public_key = [0] * HASH_LENGTH addy_sponge = sponge_type() addy_sponge.absorb(checksum) addy_sponge.squeeze(actual_public_key) return actual_public_key == public_key.as_trits()
Generates a single key.
def get_key(self, index, iterations): # type: (int, int) -> PrivateKey """ Generates a single key. :param index: The key index. :param iterations: Number of transform iterations to apply to the key, also known as security level. Must be >= 1. Increasing this value makes key generation slower, but more resistant to brute-forcing. """ return ( self.get_keys( start=index, count=1, step=1, iterations=iterations, )[0] )
Generates the key associated with the specified address.
def get_key_for(self, address): """ Generates the key associated with the specified address. Note that this method will generate the wrong key if the input address was generated from a different key! """ return self.get_key( index=address.key_index, iterations=address.security_level, )
Generates and returns one or more keys at the specified index ( es ).
def get_keys(self, start, count=1, step=1, iterations=1): # type: (int, int, int, int) -> List[PrivateKey] """ Generates and returns one or more keys at the specified index(es). This is a one-time operation; if you want to create lots of keys across multiple contexts, consider invoking :py:meth:`create_iterator` and sharing the resulting generator object instead. Warning: This method may take awhile to run if the starting index and/or the number of requested keys is a large number! :param start: Starting index. Must be >= 0. :param count: Number of keys to generate. Must be > 0. :param step: Number of indexes to advance after each key. This may be any non-zero (positive or negative) integer. :param iterations: Number of transform iterations to apply to each key, also known as security level. Must be >= 1. Increasing this value makes key generation slower, but more resistant to brute-forcing. :return: Always returns a list, even if only one key is generated. The returned list will contain ``count`` keys, except when ``step * count < start`` (only applies when ``step`` is negative). """ if count < 1: raise with_context( exc=ValueError('``count`` must be positive.'), context={ 'start': start, 'count': count, 'step': step, 'iterations': iterations, }, ) if not step: raise with_context( exc=ValueError('``step`` must not be zero.'), context={ 'start': start, 'count': count, 'step': step, 'iterations': iterations, }, ) iterator = self.create_iterator(start, step, iterations) keys = [] for _ in range(count): try: next_key = next(iterator) except StopIteration: break else: keys.append(next_key) return keys
Creates a generator that can be used to progressively generate new keys.
def create_iterator(self, start=0, step=1, security_level=1): # type: (int, int, int) -> KeyIterator """ Creates a generator that can be used to progressively generate new keys. :param start: Starting index. Warning: This method may take awhile to reset if ``start`` is a large number! :param step: Number of indexes to advance after each key. This value can be negative; the generator will exit if it reaches an index < 0. Warning: The generator may take awhile to advance between iterations if ``step`` is a large number! :param security_level: Number of _transform iterations to apply to each key. Must be >= 1. Increasing this value makes key generation slower, but more resistant to brute-forcing. """ return KeyIterator(self.seed, start, step, security_level)
Prepares the hash sponge for the generator.
def _create_sponge(self, index): # type: (int) -> Kerl """ Prepares the hash sponge for the generator. """ seed = self.seed_as_trits[:] sponge = Kerl() sponge.absorb(add_trits(seed, trits_from_int(index))) # Squeeze all of the trits out of the sponge and re-absorb them. # Note that the sponge transforms several times per operation, # so this sequence is not as redundant as it looks at first # glance. sponge.squeeze(seed) sponge.reset() sponge.absorb(seed) return sponge
Absorb trits into the sponge.
def absorb(self, trits, offset=0, length=None): # type: (Sequence[int], Optional[int], Optional[int]) -> None """ Absorb trits into the sponge. :param trits: Sequence of trits to absorb. :param offset: Starting offset in ``trits``. :param length: Number of trits to absorb. Defaults to ``len(trits)``. """ pad = ((len(trits) % HASH_LENGTH) or HASH_LENGTH) trits += [0] * (HASH_LENGTH - pad) if length is None: length = len(trits) if length < 1: raise with_context( exc=ValueError('Invalid length passed to ``absorb``.'), context={ 'trits': trits, 'offset': offset, 'length': length, }, ) # Copy trits from ``trits`` into internal state, one hash at a # time, transforming internal state in between hashes. while offset < length: start = offset stop = min(start + HASH_LENGTH, length) # Copy the next hash worth of trits to internal state. # # Note that we always copy the trits to the start of the # state. ``self._state`` is 3 hashes long, but only the # first hash is "public"; the other 2 are only accessible to # :py:meth:`_transform`. self._state[0:stop - start] = trits[start:stop] # Transform. self._transform() # Move on to the next hash. offset += HASH_LENGTH
Squeeze trits from the sponge.
def squeeze(self, trits, offset=0, length=HASH_LENGTH): # type: (MutableSequence[int], Optional[int], Optional[int]) -> None """ Squeeze trits from the sponge. :param trits: Sequence that the squeezed trits will be copied to. Note: this object will be modified! :param offset: Starting offset in ``trits``. :param length: Number of trits to squeeze, default to ``HASH_LENGTH`` """ # Squeeze is kind of like the opposite of absorb; it copies # trits from internal state to the ``trits`` parameter, one hash # at a time, and transforming internal state in between hashes. # # However, only the first hash of the state is "public", so we # can simplify the implementation somewhat. # Ensure length can be mod by HASH_LENGTH if length % HASH_LENGTH != 0: raise with_context( exc=ValueError('Invalid length passed to ``squeeze`.'), context={ 'trits': trits, 'offset': offset, 'length': length, }) # Ensure that ``trits`` can hold at least one hash worth of # trits. trits.extend([0] * max(0, length - len(trits))) # Check trits with offset can handle hash length if len(trits) - offset < HASH_LENGTH: raise with_context( exc=ValueError('Invalid offset passed to ``squeeze``.'), context={ 'trits': trits, 'offset': offset, 'length': length }, ) while length >= HASH_LENGTH: # Copy exactly one hash. trits[offset:offset + HASH_LENGTH] = self._state[0:HASH_LENGTH] # One hash worth of trits copied; now transform. self._transform() offset += HASH_LENGTH length -= HASH_LENGTH
Transforms internal state.
def _transform(self): # type: () -> None """ Transforms internal state. """ # Copy some values locally so we can avoid global lookups in the # inner loop. # # References: # # - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Local_Variables state_length = STATE_LENGTH truth_table = TRUTH_TABLE # Operate on a copy of ``self._state`` to eliminate dot lookups # in the inner loop. # # References: # # - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Avoiding_dots... # - http://stackoverflow.com/a/2612990/ prev_state = self._state[:] new_state = prev_state[:] # Note: This code looks significantly different from the C # implementation because it has been optimized to limit the # number of list item lookups (these are relatively slow in # Python). index = 0 for _ in range(NUMBER_OF_ROUNDS): prev_trit = prev_state[index] for pos in range(state_length): index += (364 if index < 365 else -365) new_trit = prev_state[index] new_state[pos] = truth_table[prev_trit + (3 * new_trit) + 4] prev_trit = new_trit prev_state = new_state new_state = new_state[:] self._state = new_state
Generates one or more key digests from the seed.
def get_digests( self, index=0, count=1, security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL, ): # type: (int, int, int) -> dict """ Generates one or more key digests from the seed. Digests are safe to share; use them to generate multisig addresses. :param index: The starting key index. :param count: Number of digests to generate. :param security_level: Number of iterations to use when generating new addresses. Larger values take longer, but the resulting signatures are more secure. This value must be between 1 and 3, inclusive. :return: Dict with the following items:: { 'digests': List[Digest], Always contains a list, even if only one digest was generated. } """ return commands.GetDigestsCommand(self.adapter)( seed=self.seed, index=index, count=count, securityLevel=security_level, )
Generates one or more private keys from the seed.
def get_private_keys( self, index=0, count=1, security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL, ): # type: (int, int, int) -> dict """ Generates one or more private keys from the seed. As the name implies, private keys should not be shared. However, in a few cases it may be necessary (e.g., for M-of-N transactions). :param index: The starting key index. :param count: Number of keys to generate. :param security_level: Number of iterations to use when generating new keys. Larger values take longer, but the resulting signatures are more secure. This value must be between 1 and 3, inclusive. :return: Dict with the following items:: { 'keys': List[PrivateKey], Always contains a list, even if only one key was generated. } References: - :py:class:`iota.crypto.signing.KeyGenerator` - https://github.com/iotaledger/wiki/blob/master/multisigs.md#how-m-of-n-works """ return commands.GetPrivateKeysCommand(self.adapter)( seed=self.seed, index=index, count=count, securityLevel=security_level, )
Prepares a bundle that authorizes the spending of IOTAs from a multisig address.
def prepare_multisig_transfer( self, transfers, # type: Iterable[ProposedTransaction] multisig_input, # type: MultisigAddress change_address=None, # type: Optional[Address] ): # type: (...) -> dict """ Prepares a bundle that authorizes the spending of IOTAs from a multisig address. .. note:: This method is used exclusively to spend IOTAs from a multisig address. If you want to spend IOTAs from non-multisig addresses, or if you want to create 0-value transfers (i.e., that don't require inputs), use :py:meth:`iota.api.Iota.prepare_transfer` instead. :param transfers: Transaction objects to prepare. .. important:: Must include at least one transaction that spends IOTAs (i.e., has a nonzero ``value``). If you want to prepare a bundle that does not spend any IOTAs, use :py:meth:`iota.api.prepare_transfer` instead. :param multisig_input: The multisig address to use as the input for the transfers. .. note:: This method only supports creating a bundle with a single multisig input. If you would like to spend from multiple multisig addresses in the same bundle, create the :py:class:`iota.multisig.transaction.ProposedMultisigBundle` object manually. :param change_address: If inputs are provided, any unspent amount will be sent to this address. If the bundle has no unspent inputs, ``change_address` is ignored. .. important:: Unlike :py:meth:`iota.api.Iota.prepare_transfer`, this method will NOT generate a change address automatically. If there are unspent inputs and ``change_address`` is empty, an exception will be raised. This is because multisig transactions typically involve multiple individuals, and it would be unfair to the participants if we generated a change address automatically using the seed of whoever happened to run the ``prepare_multisig_transfer`` method! .. danger:: Note that this protective measure is not a substitute for due diligence! Always verify the details of every transaction in a bundle (including the change transaction) before signing the input(s)! :return: Dict containing the following values:: { 'trytes': List[TransactionTrytes], Finalized bundle, as trytes. The input transactions are not signed. } In order to authorize the spending of IOTAs from the multisig input, you must generate the correct private keys and invoke the :py:meth:`iota.crypto.types.PrivateKey.sign_input_at` method for each key, in the correct order. Once the correct signatures are applied, you can then perform proof of work (``attachToTangle``) and broadcast the bundle using :py:meth:`iota.api.Iota.send_trytes`. """ return commands.PrepareMultisigTransferCommand(self.adapter)( changeAddress=change_address, multisigInput=multisig_input, transfers=transfers, )
Adds two sequences of trits together.
def add_trits(left, right): # type: (Sequence[int], Sequence[int]) -> List[int] """ Adds two sequences of trits together. The result is a list of trits equal in length to the longer of the two sequences. .. note:: Overflow is possible. For example, ``add_trits([1], [1])`` returns ``[-1]``. """ target_len = max(len(left), len(right)) res = [0] * target_len left += [0] * (target_len - len(left)) right += [0] * (target_len - len(right)) carry = 0 for i in range(len(res)): res[i], carry = _full_add_trits(left[i], right[i], carry) return res
Returns a trit representation of an integer value.
def trits_from_int(n, pad=1): # type: (int, Optional[int]) -> List[int] """ Returns a trit representation of an integer value. :param n: Integer value to convert. :param pad: Ensure the result has at least this many trits. References: - https://dev.to/buntine/the-balanced-ternary-machines-of-soviet-russia - https://en.wikipedia.org/wiki/Balanced_ternary - https://rosettacode.org/wiki/Balanced_ternary#Python """ if n == 0: trits = [] else: quotient, remainder = divmod(n, 3) if remainder == 2: # Lend 1 to the next place so we can make this trit # negative. quotient += 1 remainder = -1 trits = [remainder] + trits_from_int(quotient, pad=0) if pad: trits += [0] * max(0, pad - len(trits)) return trits
Adds two individual trits together.
def _add_trits(left, right): # type: (int, int) -> int """ Adds two individual trits together. The result is always a single trit. """ res = left + right return res if -2 < res < 2 else (res < 0) - (res > 0)
Adds two trits together with support for a carry trit.
def _full_add_trits(left, right, carry): # type: (int, int, int) -> Tuple[int, int] """ Adds two trits together, with support for a carry trit. """ sum_both = _add_trits(left, right) cons_left = _cons_trits(left, right) cons_right = _cons_trits(sum_both, carry) return _add_trits(sum_both, carry), _any_trits(cons_left, cons_right)
Outputs the user s seed to stdout along with lots of warnings about security.
def output_seed(seed): # type: (Seed) -> None """ Outputs the user's seed to stdout, along with lots of warnings about security. """ print( 'WARNING: Anyone who has your seed can spend your IOTAs! ' 'Clear the screen after recording your seed!' ) compat.input('') print('Your seed is:') print('') print(binary_type(seed).decode('ascii')) print('') print( 'Clear the screen to prevent shoulder surfing, ' 'and press return to continue.' ) print('https://en.wikipedia.org/wiki/Shoulder_surfing_(computer_security)') compat.input('')
Attaches the specified transactions ( trytes ) to the Tangle by doing Proof of Work. You need to supply branchTransaction as well as trunkTransaction ( basically the tips which you re going to validate and reference with this transaction ) - both of which you ll get through the getTransactionsToApprove API call.
def attach_to_tangle( self, trunk_transaction, # type: TransactionHash branch_transaction, # type: TransactionHash trytes, # type: Iterable[TryteString] min_weight_magnitude=None, # type: Optional[int] ): # type: (...) -> dict """ Attaches the specified transactions (trytes) to the Tangle by doing Proof of Work. You need to supply branchTransaction as well as trunkTransaction (basically the tips which you're going to validate and reference with this transaction) - both of which you'll get through the getTransactionsToApprove API call. The returned value is a different set of tryte values which you can input into :py:meth:`broadcast_transactions` and :py:meth:`store_transactions`. References: - https://iota.readme.io/docs/attachtotangle """ if min_weight_magnitude is None: min_weight_magnitude = self.default_min_weight_magnitude return core.AttachToTangleCommand(self.adapter)( trunkTransaction=trunk_transaction, branchTransaction=branch_transaction, minWeightMagnitude=min_weight_magnitude, trytes=trytes, )
Find the transactions which match the specified input and return.
def find_transactions( self, bundles=None, # type: Optional[Iterable[BundleHash]] addresses=None, # type: Optional[Iterable[Address]] tags=None, # type: Optional[Iterable[Tag]] approvees=None, # type: Optional[Iterable[TransactionHash]] ): # type: (...) -> dict """ Find the transactions which match the specified input and return. All input values are lists, for which a list of return values (transaction hashes), in the same order, is returned for all individual elements. Using multiple of these input fields returns the intersection of the values. :param bundles: List of bundle IDs. :param addresses: List of addresses. :param tags: List of tags. :param approvees: List of approvee transaction IDs. References: - https://iota.readme.io/docs/findtransactions """ return core.FindTransactionsCommand(self.adapter)( bundles=bundles, addresses=addresses, tags=tags, approvees=approvees, )
Similar to: py: meth: get_inclusion_states. Returns the confirmed balance which a list of addresses have at the latest confirmed milestone.
def get_balances(self, addresses, threshold=100): # type: (Iterable[Address], int) -> dict """ Similar to :py:meth:`get_inclusion_states`. Returns the confirmed balance which a list of addresses have at the latest confirmed milestone. In addition to the balances, it also returns the milestone as well as the index with which the confirmed balance was determined. The balances are returned as a list in the same order as the addresses were provided as input. :param addresses: List of addresses to get the confirmed balance for. :param threshold: Confirmation threshold. References: - https://iota.readme.io/docs/getbalances """ return core.GetBalancesCommand(self.adapter)( addresses=addresses, threshold=threshold, )
Get the inclusion states of a set of transactions. This is for determining if a transaction was accepted and confirmed by the network or not. You can search for multiple tips ( and thus milestones ) to get past inclusion states of transactions.
def get_inclusion_states(self, transactions, tips): # type: (Iterable[TransactionHash], Iterable[TransactionHash]) -> dict """ Get the inclusion states of a set of transactions. This is for determining if a transaction was accepted and confirmed by the network or not. You can search for multiple tips (and thus, milestones) to get past inclusion states of transactions. :param transactions: List of transactions you want to get the inclusion state for. :param tips: List of tips (including milestones) you want to search for the inclusion state. References: - https://iota.readme.io/docs/getinclusionstates """ return core.GetInclusionStatesCommand(self.adapter)( transactions=transactions, tips=tips, )
More comprehensive version of: py: meth: get_transfers that returns addresses and account balance in addition to bundles.
def get_account_data(self, start=0, stop=None, inclusion_states=False, security_level=None): # type: (int, Optional[int], bool, Optional[int]) -> dict """ More comprehensive version of :py:meth:`get_transfers` that returns addresses and account balance in addition to bundles. This function is useful in getting all the relevant information of your account. :param start: Starting key index. :param stop: Stop before this index. Note that this parameter behaves like the ``stop`` attribute in a :py:class:`slice` object; the stop index is *not* included in the result. If ``None`` (default), then this method will check every address until it finds one without any transfers. :param inclusion_states: Whether to also fetch the inclusion states of the transfers. This requires an additional API call to the node, so it is disabled by default. :param security_level: Number of iterations to use when generating new addresses (see :py:meth:`get_new_addresses`). This value must be between 1 and 3, inclusive. If not set, defaults to :py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`. :return: Dict with the following structure:: { 'addresses': List[Address], List of generated addresses. Note that this list may include unused addresses. 'balance': int, Total account balance. Might be 0. 'bundles': List[Bundle], List of bundles with transactions to/from this account. } """ return extended.GetAccountDataCommand(self.adapter)( seed=self.seed, start=start, stop=stop, inclusionStates=inclusion_states, security_level=security_level )
Gets all possible inputs of a seed and returns them along with the total balance.
def get_inputs( self, start=0, stop=None, threshold=None, security_level=None, ): # type: (int, Optional[int], Optional[int], Optional[int]) -> dict """ Gets all possible inputs of a seed and returns them, along with the total balance. This is either done deterministically (by generating all addresses until :py:meth:`find_transactions` returns an empty result), or by providing a key range to search. :param start: Starting key index. Defaults to 0. :param stop: Stop before this index. Note that this parameter behaves like the ``stop`` attribute in a :py:class:`slice` object; the stop index is *not* included in the result. If ``None`` (default), then this method will not stop until it finds an unused address. :param threshold: If set, determines the minimum threshold for a successful result: - As soon as this threshold is reached, iteration will stop. - If the command runs out of addresses before the threshold is reached, an exception is raised. .. note:: This method does not attempt to "optimize" the result (e.g., smallest number of inputs, get as close to ``threshold`` as possible, etc.); it simply accumulates inputs in order until the threshold is met. If ``threshold`` is 0, the first address in the key range with a non-zero balance will be returned (if it exists). If ``threshold`` is ``None`` (default), this method will return **all** inputs in the specified key range. :param security_level: Number of iterations to use when generating new addresses (see :py:meth:`get_new_addresses`). This value must be between 1 and 3, inclusive. If not set, defaults to :py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`. :return: Dict with the following structure:: { 'inputs': List[Address], Addresses with nonzero balances that can be used as inputs. 'totalBalance': int, Aggregate balance from all matching addresses. } Note that each Address in the result has its ``balance`` attribute set. Example: .. code-block:: python response = iota.get_inputs(...) input0 = response['inputs'][0] # type: Address input0.balance # 42 :raise: - :py:class:`iota.adapter.BadApiResponse` if ``threshold`` is not met. Not applicable if ``threshold`` is ``None``. References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs """ return extended.GetInputsCommand(self.adapter)( seed=self.seed, start=start, stop=stop, threshold=threshold, securityLevel=security_level )
Generates one or more new addresses from the seed.
def get_new_addresses( self, index=0, count=1, security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL, checksum=False, ): # type: (int, Optional[int], int, bool) -> dict """ Generates one or more new addresses from the seed. :param index: The key index of the first new address to generate (must be >= 1). :param count: Number of addresses to generate (must be >= 1). .. tip:: This is more efficient than calling ``get_new_address`` inside a loop. If ``None``, this method will progressively generate addresses and scan the Tangle until it finds one that has no transactions referencing it. :param security_level: Number of iterations to use when generating new addresses. Larger values take longer, but the resulting signatures are more secure. This value must be between 1 and 3, inclusive. :param checksum: Specify whether to return the address with the checksum. Defaults to ``False``. :return: Dict with the following structure:: { 'addresses': List[Address], Always a list, even if only one address was generated. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress """ return extended.GetNewAddressesCommand(self.adapter)( count=count, index=index, securityLevel=security_level, checksum=checksum, seed=self.seed, )
Returns all transfers associated with the seed.
def get_transfers(self, start=0, stop=None, inclusion_states=False): # type: (int, Optional[int], bool) -> dict """ Returns all transfers associated with the seed. :param start: Starting key index. :param stop: Stop before this index. Note that this parameter behaves like the ``stop`` attribute in a :py:class:`slice` object; the stop index is *not* included in the result. If ``None`` (default), then this method will check every address until it finds one without any transfers. :param inclusion_states: Whether to also fetch the inclusion states of the transfers. This requires an additional API call to the node, so it is disabled by default. :return: Dict with the following structure:: { 'bundles': List[Bundle], Matching bundles, sorted by tail transaction timestamp. This value is always a list, even if only one bundle was found. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers """ return extended.GetTransfersCommand(self.adapter)( seed=self.seed, start=start, stop=stop, inclusionStates=inclusion_states, )
Prepares transactions to be broadcast to the Tangle by generating the correct bundle as well as choosing and signing the inputs ( for value transfers ).
def prepare_transfer( self, transfers, # type: Iterable[ProposedTransaction] inputs=None, # type: Optional[Iterable[Address]] change_address=None, # type: Optional[Address] security_level=None, # type: Optional[int] ): # type: (...) -> dict """ Prepares transactions to be broadcast to the Tangle, by generating the correct bundle, as well as choosing and signing the inputs (for value transfers). :param transfers: Transaction objects to prepare. :param inputs: List of addresses used to fund the transfer. Ignored for zero-value transfers. If not provided, addresses will be selected automatically by scanning the Tangle for unspent inputs. Depending on how many transfers you've already sent with your seed, this process could take awhile. :param change_address: If inputs are provided, any unspent amount will be sent to this address. If not specified, a change address will be generated automatically. :param security_level: Number of iterations to use when generating new addresses (see :py:meth:`get_new_addresses`). This value must be between 1 and 3, inclusive. If not set, defaults to :py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`. :return: Dict with the following structure:: { 'trytes': List[TransactionTrytes], Raw trytes for the transactions in the bundle, ready to be provided to :py:meth:`send_trytes`. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#preparetransfers """ return extended.PrepareTransferCommand(self.adapter)( seed=self.seed, transfers=transfers, inputs=inputs, changeAddress=change_address, securityLevel=security_level, )
Promotes a transaction by adding spam on top of it.
def promote_transaction( self, transaction, depth=3, min_weight_magnitude=None, ): # type: (TransactionHash, int, Optional[int]) -> dict """ Promotes a transaction by adding spam on top of it. :return: Dict with the following structure:: { 'bundle': Bundle, The newly-published bundle. } """ if min_weight_magnitude is None: min_weight_magnitude = self.default_min_weight_magnitude return extended.PromoteTransactionCommand(self.adapter)( transaction=transaction, depth=depth, minWeightMagnitude=min_weight_magnitude, )
Takes a tail transaction hash as input gets the bundle associated with the transaction and then replays the bundle by attaching it to the Tangle.
def replay_bundle( self, transaction, depth=3, min_weight_magnitude=None, ): # type: (TransactionHash, int, Optional[int]) -> dict """ Takes a tail transaction hash as input, gets the bundle associated with the transaction and then replays the bundle by attaching it to the Tangle. :param transaction: Transaction hash. Must be a tail. :param depth: Depth at which to attach the bundle. Defaults to 3. :param min_weight_magnitude: Min weight magnitude, used by the node to calibrate Proof of Work. If not provided, a default value will be used. :return: Dict with the following structure:: { 'trytes': List[TransactionTrytes], Raw trytes that were published to the Tangle. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer """ if min_weight_magnitude is None: min_weight_magnitude = self.default_min_weight_magnitude return extended.ReplayBundleCommand(self.adapter)( transaction=transaction, depth=depth, minWeightMagnitude=min_weight_magnitude, )
Prepares a set of transfers and creates the bundle then attaches the bundle to the Tangle and broadcasts and stores the transactions.
def send_transfer( self, transfers, # type: Iterable[ProposedTransaction] depth=3, # type: int inputs=None, # type: Optional[Iterable[Address]] change_address=None, # type: Optional[Address] min_weight_magnitude=None, # type: Optional[int] security_level=None, # type: Optional[int] ): # type: (...) -> dict """ Prepares a set of transfers and creates the bundle, then attaches the bundle to the Tangle, and broadcasts and stores the transactions. :param transfers: Transfers to include in the bundle. :param depth: Depth at which to attach the bundle. Defaults to 3. :param inputs: List of inputs used to fund the transfer. Not needed for zero-value transfers. :param change_address: If inputs are provided, any unspent amount will be sent to this address. If not specified, a change address will be generated automatically. :param min_weight_magnitude: Min weight magnitude, used by the node to calibrate Proof of Work. If not provided, a default value will be used. :param security_level: Number of iterations to use when generating new addresses (see :py:meth:`get_new_addresses`). This value must be between 1 and 3, inclusive. If not set, defaults to :py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`. :return: Dict with the following structure:: { 'bundle': Bundle, The newly-published bundle. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer """ if min_weight_magnitude is None: min_weight_magnitude = self.default_min_weight_magnitude return extended.SendTransferCommand(self.adapter)( seed=self.seed, depth=depth, transfers=transfers, inputs=inputs, changeAddress=change_address, minWeightMagnitude=min_weight_magnitude, securityLevel=security_level, )
Attaches transaction trytes to the Tangle then broadcasts and stores them.
def send_trytes(self, trytes, depth=3, min_weight_magnitude=None): # type: (Iterable[TransactionTrytes], int, Optional[int]) -> dict """ Attaches transaction trytes to the Tangle, then broadcasts and stores them. :param trytes: Transaction encoded as a tryte sequence. :param depth: Depth at which to attach the bundle. Defaults to 3. :param min_weight_magnitude: Min weight magnitude, used by the node to calibrate Proof of Work. If not provided, a default value will be used. :return: Dict with the following structure:: { 'trytes': List[TransactionTrytes], Raw trytes that were published to the Tangle. } References: - https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes """ if min_weight_magnitude is None: min_weight_magnitude = self.default_min_weight_magnitude return extended.SendTrytesCommand(self.adapter)( trytes=trytes, depth=depth, minWeightMagnitude=min_weight_magnitude, )
Given a URI returns a properly - configured adapter instance.
def resolve_adapter(uri): # type: (AdapterSpec) -> BaseAdapter """ Given a URI, returns a properly-configured adapter instance. """ if isinstance(uri, BaseAdapter): return uri parsed = compat.urllib_parse.urlsplit(uri) # type: SplitResult if not parsed.scheme: raise with_context( exc=InvalidUri( 'URI must begin with "<protocol>://" (e.g., "udp://").', ), context={ 'parsed': parsed, 'uri': uri, }, ) try: adapter_type = adapter_registry[parsed.scheme] except KeyError: raise with_context( exc=InvalidUri('Unrecognized protocol {protocol!r}.'.format( protocol=parsed.scheme, )), context={ 'parsed': parsed, 'uri': uri, }, ) return adapter_type.configure(parsed)
Sends an API request to the node.
def send_request(self, payload, **kwargs): # type: (dict, dict) -> dict """ Sends an API request to the node. :param payload: JSON payload. :param kwargs: Additional keyword arguments for the adapter. :return: Decoded response from the node. :raise: - :py:class:`BadApiResponse` if a non-success response was received. """ raise NotImplementedError( 'Not implemented in {cls}.'.format(cls=type(self).__name__), )
Sends a message to the instance s logger if configured.
def _log(self, level, message, context=None): # type: (int, Text, Optional[dict]) -> None """ Sends a message to the instance's logger, if configured. """ if self._logger: self._logger.log(level, message, extra={'context': context or {}})
Sends the actual HTTP request.
def _send_http_request(self, url, payload, method='post', **kwargs): # type: (Text, Optional[Text], Text, dict) -> Response """ Sends the actual HTTP request. Split into its own method so that it can be mocked during unit tests. """ kwargs.setdefault( 'timeout', self.timeout if self.timeout else get_default_timeout(), ) if self.authentication: kwargs.setdefault('auth', auth.HTTPBasicAuth(*self.authentication)) self._log( level=DEBUG, message='Sending {method} to {url}: {payload!r}'.format( method=method, payload=payload, url=url, ), context={ 'request_method': method, 'request_kwargs': kwargs, 'request_payload': payload, 'request_url': url, }, ) response = request(method=method, url=url, data=payload, **kwargs) self._log( level=DEBUG, message='Receiving {method} from {url}: {response!r}'.format( method=method, response=response.content, url=url, ), context={ 'request_method': method, 'request_kwargs': kwargs, 'request_payload': payload, 'request_url': url, 'response_headers': response.headers, 'response_content': response.content, }, ) return response
Interprets the HTTP response from the node.
def _interpret_response(self, response, payload, expected_status): # type: (Response, dict, Container[int]) -> dict """ Interprets the HTTP response from the node. :param response: The response object received from :py:meth:`_send_http_request`. :param payload: The request payload that was sent (used for debugging). :param expected_status: The response should match one of these status codes to be considered valid. """ raw_content = response.text if not raw_content: raise with_context( exc=BadApiResponse( 'Empty {status} response from node.'.format( status=response.status_code, ), ), context={ 'request': payload, }, ) try: decoded = json.loads(raw_content) # type: dict # :bc: py2k doesn't have JSONDecodeError except ValueError: raise with_context( exc=BadApiResponse( 'Non-JSON {status} response from node: ' '{raw_content}'.format( status=response.status_code, raw_content=raw_content, ) ), context={ 'request': payload, 'raw_response': raw_content, }, ) if not isinstance(decoded, dict): raise with_context( exc=BadApiResponse( 'Malformed {status} response from node: {decoded!r}'.format( status=response.status_code, decoded=decoded, ), ), context={ 'request': payload, 'response': decoded, }, ) if response.status_code in expected_status: return decoded error = None try: if response.status_code == codes['bad_request']: error = decoded['error'] elif response.status_code == codes['internal_server_error']: error = decoded['exception'] except KeyError: pass raise with_context( exc=BadApiResponse( '{status} response from node: {error}'.format( error=error or decoded, status=response.status_code, ), ), context={ 'request': payload, 'response': decoded, }, )
Sets the response that the adapter will return for the specified command.
def seed_response(self, command, response): # type: (Text, dict) -> MockAdapter """ Sets the response that the adapter will return for the specified command. You can seed multiple responses per command; the adapter will put them into a FIFO queue. When a request comes in, the adapter will pop the corresponding response off of the queue. Example: .. code-block:: python adapter.seed_response('sayHello', {'message': 'Hi!'}) adapter.seed_response('sayHello', {'message': 'Hello!'}) adapter.send_request({'command': 'sayHello'}) # {'message': 'Hi!'} adapter.send_request({'command': 'sayHello'}) # {'message': 'Hello!'} """ if command not in self.responses: self.responses[command] = deque() self.responses[command].append(response) return self
Absorbs a digest into the sponge.
def add_digest(self, digest): # type: (Digest) -> None """ Absorbs a digest into the sponge. .. important:: Keep track of the order that digests are added! To spend inputs from a multisig address, you must provide the private keys in the same order! References: - https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs """ if self._address: raise ValueError('Cannot add digests once an address is extracted.') self._sponge.absorb(digest.as_trits()) self._digests.append(digest)
Returns the new multisig address.
def get_address(self): # type: () -> MultisigAddress """ Returns the new multisig address. Note that you can continue to add digests after extracting an address; the next address will use *all* of the digests that have been added so far. """ if not self._digests: raise ValueError( 'Must call ``add_digest`` at least once ' 'before calling ``get_address``.', ) if not self._address: address_trits = [0] * HASH_LENGTH self._sponge.squeeze(address_trits) self._address = MultisigAddress.from_trits( address_trits, digests=self._digests[:], ) return self._address
Generates and returns one or more addresses at the specified index ( es ).
def get_addresses(self, start, count=1, step=1): # type: (int, int, int) -> List[Address] """ Generates and returns one or more addresses at the specified index(es). This is a one-time operation; if you want to create lots of addresses across multiple contexts, consider invoking :py:meth:`create_iterator` and sharing the resulting generator object instead. Warning: This method may take awhile to run if the starting index and/or the number of requested addresses is a large number! :param start: Starting index. Must be >= 0. :param count: Number of addresses to generate. Must be > 0. :param step: Number of indexes to advance after each address. This may be any non-zero (positive or negative) integer. :return: Always returns a list, even if only one address is generated. The returned list will contain ``count`` addresses, except when ``step * count < start`` (only applies when ``step`` is negative). """ if count < 1: raise with_context( exc=ValueError('``count`` must be positive.'), context={ 'start': start, 'count': count, 'step': step, }, ) if not step: raise with_context( exc=ValueError('``step`` must not be zero.'), context={ 'start': start, 'count': count, 'step': step, }, ) generator = self.create_iterator(start, step) addresses = [] for _ in range(count): try: next_addy = next(generator) except StopIteration: break else: addresses.append(next_addy) return addresses
Creates an iterator that can be used to progressively generate new addresses.
def create_iterator(self, start=0, step=1): # type: (int, int) -> Generator[Address, None, None] """ Creates an iterator that can be used to progressively generate new addresses. :param start: Starting index. Warning: This method may take awhile to reset if ``start`` is a large number! :param step: Number of indexes to advance after each address. Warning: The generator may take awhile to advance between iterations if ``step`` is a large number! """ key_iterator = ( KeyGenerator(self.seed).create_iterator( start, step, self.security_level, ) ) while True: yield self._generate_address(key_iterator)
Generates an address from a private key digest.
def address_from_digest(digest): # type: (Digest) -> Address """ Generates an address from a private key digest. """ address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: List[int] sponge = Kerl() sponge.absorb(digest.as_trits()) sponge.squeeze(address_trits) return Address.from_trits( trits=address_trits, key_index=digest.key_index, security_level=digest.security_level, )
Generates a new address.
def _generate_address(self, key_iterator): # type: (KeyIterator) -> Address """ Generates a new address. Used in the event of a cache miss. """ if self.checksum: return ( self.address_from_digest( digest=self._get_digest(key_iterator), ).with_valid_checksum() ) else: return self.address_from_digest(self._get_digest(key_iterator))
Finds transactions matching the specified criteria fetches the corresponding trytes and converts them into Transaction objects.
def find_transaction_objects(adapter, **kwargs): # type: (BaseAdapter, **Iterable) -> List[Transaction] """ Finds transactions matching the specified criteria, fetches the corresponding trytes and converts them into Transaction objects. """ ft_response = FindTransactionsCommand(adapter)(**kwargs) hashes = ft_response['hashes'] if hashes: gt_response = GetTrytesCommand(adapter)(hashes=hashes) return list(map( Transaction.from_tryte_string, gt_response.get('trytes') or [], )) # type: List[Transaction] return []
Scans the Tangle for used addresses.
def iter_used_addresses( adapter, # type: BaseAdapter seed, # type: Seed start, # type: int security_level=None, # type: Optional[int] ): # type: (...) -> Generator[Tuple[Address, List[TransactionHash]], None, None] """ Scans the Tangle for used addresses. This is basically the opposite of invoking ``getNewAddresses`` with ``stop=None``. """ if security_level is None: security_level = AddressGenerator.DEFAULT_SECURITY_LEVEL ft_command = FindTransactionsCommand(adapter) for addy in AddressGenerator(seed, security_level).create_iterator(start): ft_response = ft_command(addresses=[addy]) if ft_response['hashes']: yield addy, ft_response['hashes'] else: break # Reset the command so that we can call it again. ft_command.reset()
Given a set of transaction hashes returns the corresponding bundles sorted by tail transaction timestamp.
def get_bundles_from_transaction_hashes( adapter, transaction_hashes, inclusion_states, ): # type: (BaseAdapter, Iterable[TransactionHash], bool) -> List[Bundle] """ Given a set of transaction hashes, returns the corresponding bundles, sorted by tail transaction timestamp. """ transaction_hashes = list(transaction_hashes) if not transaction_hashes: return [] my_bundles = [] # type: List[Bundle] # Sort transactions into tail and non-tail. tail_transaction_hashes = set() non_tail_bundle_hashes = set() gt_response = GetTrytesCommand(adapter)(hashes=transaction_hashes) all_transactions = list(map( Transaction.from_tryte_string, gt_response['trytes'], )) # type: List[Transaction] for txn in all_transactions: if txn.is_tail: tail_transaction_hashes.add(txn.hash) else: # Capture the bundle ID instead of the transaction hash so # that we can query the node to find the tail transaction # for that bundle. non_tail_bundle_hashes.add(txn.bundle_hash) if non_tail_bundle_hashes: for txn in find_transaction_objects( adapter=adapter, bundles=list(non_tail_bundle_hashes), ): if txn.is_tail: if txn.hash not in tail_transaction_hashes: all_transactions.append(txn) tail_transaction_hashes.add(txn.hash) # Filter out all non-tail transactions. tail_transactions = [ txn for txn in all_transactions if txn.hash in tail_transaction_hashes ] # Attach inclusion states, if requested. if inclusion_states: gli_response = GetLatestInclusionCommand(adapter)( hashes=list(tail_transaction_hashes), ) for txn in tail_transactions: txn.is_confirmed = gli_response['states'].get(txn.hash) # Find the bundles for each transaction. for txn in tail_transactions: gb_response = GetBundlesCommand(adapter)(transaction=txn.hash) txn_bundles = gb_response['bundles'] # type: List[Bundle] if inclusion_states: for bundle in txn_bundles: bundle.is_confirmed = txn.is_confirmed my_bundles.extend(txn_bundles) return list(sorted( my_bundles, key=lambda bundle_: bundle_.tail_transaction.timestamp, ))
Adds inputs to spend in the bundle.
def add_inputs(self, inputs): # type: (Iterable[MultisigAddress]) -> None """ Adds inputs to spend in the bundle. Note that each input may require multiple transactions, in order to hold the entire signature. :param inputs: MultisigAddresses to use as the inputs for this bundle. Note: at this time, only a single multisig input is supported. """ if self.hash: raise RuntimeError('Bundle is already finalized.') count = 0 for addy in inputs: if count > 0: raise ValueError( '{cls} only supports 1 input.'.format(cls=type(self).__name__), ) if not isinstance(addy, MultisigAddress): raise with_context( exc = TypeError( 'Incorrect input type for {cls} ' '(expected {expected}, actual {actual}).'.format( actual = type(addy).__name__, cls = type(self).__name__, expected = MultisigAddress.__name__, ), ), context = { 'actual_input': addy, }, ) security_level = addy.security_level if security_level < 1: raise with_context( exc = ValueError( 'Unable to determine security level for {type} ' '(is ``digests`` populated correctly?).'.format( type = type(addy).__name__, ), ), context = { 'actual_input': addy, 'security_level': security_level, }, ) if not addy.balance: raise with_context( exc = ValueError( 'Cannot add input with empty/unknown balance to {type} ' '(use ``Iota.get_balances`` to get balance first).'.format( type = type(self).__name__, ), ), context = { 'actual_input': addy, }, ) self._create_input_transactions(addy) count += 1
Determines which codec to use for the specified encoding.
def check_trytes_codec(encoding): """ Determines which codec to use for the specified encoding. References: - https://docs.python.org/3/library/codecs.html#codecs.register """ if encoding == AsciiTrytesCodec.name: return AsciiTrytesCodec.get_codec_info() elif encoding == AsciiTrytesCodec.compat_name: warn( '"{old_codec}" codec will be removed in PyOTA v2.1. ' 'Use "{new_codec}" instead.'.format( new_codec=AsciiTrytesCodec.name, old_codec=AsciiTrytesCodec.compat_name, ), DeprecationWarning, ) return AsciiTrytesCodec.get_codec_info() return None
Returns information used by the codecs library to configure the codec for use.
def get_codec_info(cls): """ Returns information used by the codecs library to configure the codec for use. """ codec = cls() codec_info = { 'encode': codec.encode, 'decode': codec.decode, } # In Python 2, all codecs are made equal. # In Python 3, some codecs are more equal than others. if PY3: codec_info['_is_text_encoding'] = False return CodecInfo(**codec_info)
Encodes a byte string into trytes.
def encode(self, input, errors='strict'): """ Encodes a byte string into trytes. """ if isinstance(input, memoryview): input = input.tobytes() if not isinstance(input, (binary_type, bytearray)): raise with_context( exc=TypeError( "Can't encode {type}; byte string expected.".format( type=type(input).__name__, )), context={ 'input': input, }, ) # :bc: In Python 2, iterating over a byte string yields # characters instead of integers. if not isinstance(input, bytearray): input = bytearray(input) trytes = bytearray() for c in input: second, first = divmod(c, len(self.alphabet)) trytes.append(self.alphabet[first]) trytes.append(self.alphabet[second]) return binary_type(trytes), len(input)
Decodes a tryte string into bytes.
def decode(self, input, errors='strict'): """ Decodes a tryte string into bytes. """ if isinstance(input, memoryview): input = input.tobytes() if not isinstance(input, (binary_type, bytearray)): raise with_context( exc=TypeError( "Can't decode {type}; byte string expected.".format( type=type(input).__name__, )), context={ 'input': input, }, ) # :bc: In Python 2, iterating over a byte string yields # characters instead of integers. if not isinstance(input, bytearray): input = bytearray(input) bytes_ = bytearray() for i in range(0, len(input), 2): try: first, second = input[i:i + 2] except ValueError: if errors == 'strict': raise with_context( exc=TrytesDecodeError( "'{name}' codec can't decode value; " "tryte sequence has odd length.".format( name=self.name, ), ), context={ 'input': input, }, ) elif errors == 'replace': bytes_ += b'?' continue try: bytes_.append( self.index[first] + (self.index[second] * len(self.index)) ) except ValueError: # This combination of trytes yields a value > 255 when # decoded. # Naturally, we can't represent this using ASCII. if errors == 'strict': raise with_context( exc=TrytesDecodeError( "'{name}' codec can't decode trytes {pair} " "at position {i}-{j}: " "ordinal not in range(255)".format( name=self.name, pair=chr(first) + chr(second), i=i, j=i + 1, ), ), context={ 'input': input, } ) elif errors == 'replace': bytes_ += b'?' return binary_type(bytes_), len(input)
Find addresses matching the command parameters.
def _find_addresses(self, seed, index, count, security_level, checksum): # type: (Seed, int, Optional[int], int, bool) -> List[Address] """ Find addresses matching the command parameters. """ generator = AddressGenerator(seed, security_level, checksum) if count is None: # Connect to Tangle and find the first address without any # transactions. for addy in generator.create_iterator(start=index): # We use addy.address here because FindTransactions does # not work on an address with a checksum response = FindTransactionsCommand(self.adapter)( addresses=[addy.address], ) if not response.get('hashes'): return [addy] return generator.get_addresses(start=index, count=count)
Adds a route to the wrapper.
def add_route(self, command, adapter): # type: (Text, AdapterSpec) -> RoutingWrapper """ Adds a route to the wrapper. :param command: The name of the command to route (e.g., "attachToTangle"). :param adapter: The adapter object or URI to route requests to. """ if not isinstance(adapter, BaseAdapter): try: adapter = self.adapter_aliases[adapter] except KeyError: self.adapter_aliases[adapter] = adapter = resolve_adapter( adapter ) self.routes[command] = adapter return self
Creates a Transaction object from a sequence of trytes.
def from_tryte_string(cls, trytes, hash_=None): # type: (TrytesCompatible, Optional[TransactionHash]) -> Transaction """ Creates a Transaction object from a sequence of trytes. :param trytes: Raw trytes. Should be exactly 2673 trytes long. :param hash_: The transaction hash, if available. If not provided, it will be computed from the transaction trytes. """ tryte_string = TransactionTrytes(trytes) if not hash_: hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int] sponge = Curl() sponge.absorb(tryte_string.as_trits()) sponge.squeeze(hash_trits) hash_ = TransactionHash.from_trits(hash_trits) return cls( hash_=hash_, signature_message_fragment=Fragment(tryte_string[0:2187]), address=Address(tryte_string[2187:2268]), value=int_from_trits(tryte_string[2268:2295].as_trits()), legacy_tag=Tag(tryte_string[2295:2322]), timestamp=int_from_trits(tryte_string[2322:2331].as_trits()), current_index=int_from_trits(tryte_string[2331:2340].as_trits()), last_index=int_from_trits(tryte_string[2340:2349].as_trits()), bundle_hash=BundleHash(tryte_string[2349:2430]), trunk_transaction_hash=TransactionHash(tryte_string[2430:2511]), branch_transaction_hash=TransactionHash(tryte_string[2511:2592]), tag=Tag(tryte_string[2592:2619]), attachment_timestamp=int_from_trits( tryte_string[2619:2628].as_trits()), attachment_timestamp_lower_bound=int_from_trits( tryte_string[2628:2637].as_trits()), attachment_timestamp_upper_bound=int_from_trits( tryte_string[2637:2646].as_trits()), nonce=Nonce(tryte_string[2646:2673]), )
Returns a JSON - compatible representation of the object.
def as_json_compatible(self): # type: () -> dict """ Returns a JSON-compatible representation of the object. References: - :py:class:`iota.json.JsonEncoder`. """ return { 'hash_': self.hash, 'signature_message_fragment': self.signature_message_fragment, 'address': self.address, 'value': self.value, 'legacy_tag': self.legacy_tag, 'timestamp': self.timestamp, 'current_index': self.current_index, 'last_index': self.last_index, 'bundle_hash': self.bundle_hash, 'trunk_transaction_hash': self.trunk_transaction_hash, 'branch_transaction_hash': self.branch_transaction_hash, 'tag': self.tag, 'attachment_timestamp': self.attachment_timestamp, 'attachment_timestamp_lower_bound': self.attachment_timestamp_lower_bound, 'attachment_timestamp_upper_bound': self.attachment_timestamp_upper_bound, 'nonce': self.nonce, }
Returns a TryteString representation of the transaction.
def as_tryte_string(self): # type: () -> TransactionTrytes """ Returns a TryteString representation of the transaction. """ return TransactionTrytes( self.signature_message_fragment + self.address.address + self.value_as_trytes + self.legacy_tag + self.timestamp_as_trytes + self.current_index_as_trytes + self.last_index_as_trytes + self.bundle_hash + self.trunk_transaction_hash + self.branch_transaction_hash + self.tag + self.attachment_timestamp_as_trytes + self.attachment_timestamp_lower_bound_as_trytes + self.attachment_timestamp_upper_bound_as_trytes + self.nonce )