text
stringlengths 2
999k
|
|---|
import json
import os
import sys
try:
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request, build_opener, HTTPHandler
from urllib.error import HTTPError
except ImportError:
from urlparse import urlparse
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError, build_opener, HTTPHandler
EMAIL = os.environ['CLARIFAI_USER_EMAIL']
PASSWORD = os.environ['CLARIFAI_USER_PASSWORD']
BASE = 'https://api.clarifai.com/v2'
def _request(method, url, payload={}, headers={}):
opener = build_opener(HTTPHandler)
full_url = '%s%s' % (BASE, url)
request = Request(full_url, data=json.dumps(payload).encode())
for k in headers.keys():
request.add_header(k, headers[k])
request.get_method = lambda: method
return json.loads(opener.open(request).read().decode())
def create_app(env_name):
session_token, user_id = _login()
url = '/users/%s/apps' % user_id
payload = {'apps': [{'name': 'auto-created-in-%s-ci-test-run' % env_name}]}
response = _request(method='POST', url=url, payload=payload, headers=_auth_headers(session_token))
_raise_on_http_error(response)
data = response
app_id = data['apps'][0]['id']
# This print needs to be present so we can read the value in CI.
print(app_id)
def create_key(app_id):
session_token, user_id = _login()
url = '/users/%s/keys' % user_id
payload = {
'keys': [{
'description': 'Auto-created in a CI test run',
'scopes': ['All'],
'apps': [{'id': app_id, 'user_id': user_id}]
}]
}
response = _request(method='POST', url=url, payload=payload, headers=_auth_headers(session_token))
_raise_on_http_error(response)
data = response
key_id = data['keys'][0]['id']
# This print needs to be present so we can read the value in CI.
print(key_id)
def delete(app_id):
session_token, user_id = _login()
# All the related keys will be deleted automatically when the app is deleted
_delete_app(session_token, user_id, app_id)
def create_sample_workflow(api_key):
url = '/workflows'
payload = {
'workflows': [
{
'id': 'food-and-general',
'nodes': [
{
'id': 'food-workflow-node',
'model': {
'id': 'bd367be194cf45149e75f01d59f77ba7',
'model_version': {
'id': 'dfebc169854e429086aceb8368662641'
}
}
},
{
'id': 'general-workflow-node',
'model': {
'id': 'aaa03c23b3724a16a56b629203edc62c',
'model_version': {
'id': 'aa9ca48295b37401f8af92ad1af0d91d'
}
}
}
]
}
]
}
response = _request(method='POST', url=url, payload=payload, headers=_auth_headers_for_api_key_key(api_key))
_raise_on_http_error(response)
def _delete_app(session_token, user_id, app_id):
url = '/users/%s/apps/%s' % (user_id, app_id)
response = _request(method='DELETE', url=url, headers=_auth_headers(session_token))
_raise_on_http_error(response)
def _auth_headers(session_token):
headers = {'Content-Type': 'application/json', 'X-Clarifai-Session-Token': session_token}
return headers
def _auth_headers_for_api_key_key(api_key):
headers = {'Content-Type': 'application/json', 'Authorization': 'Key ' + api_key}
return headers
def _login():
url = '/login'
payload = {'email': EMAIL, 'password': PASSWORD}
response = _request(method='POST', url=url, payload=payload)
_raise_on_http_error(response)
data = response
user_id = data['v2_user_id']
session_token = data['session_token']
return session_token, user_id
def _raise_on_http_error(response):
# TODO: Make this work with urllib.
# if int(response.status_code) // 100 != 2:
# raise Exception('Unexpected response %s: %s' % (response.status_code, response.text))
pass
def run(arguments):
command = arguments[0] if arguments else '--help'
if command == '--create-app':
if len(arguments) != 2:
raise Exception('--create-app takes one argument')
env_name = arguments[1]
create_app(env_name)
elif command == '--create-key':
if len(arguments) != 2:
raise Exception('--create-key takes one argument')
app_id = arguments[1]
create_key(app_id)
elif command == '--delete-app':
if len(arguments) != 2:
raise Exception('--delete-app takes one argument')
app_id = arguments[1]
delete(app_id)
elif command == '--create-workflow':
if len(arguments) != 2:
raise Exception('--create-workflow takes one argument')
api_key = arguments[1]
create_sample_workflow(api_key)
elif command == '--help':
print('''DESCRIPTION: Creates and delete applications and API keys
ARGUMENTS:
--create-app [env_name] ... Creates a new application.
--create-key [app_id] ... Creates a new API key.
--delete-app [app_id] ... Deletes an application (API keys that use it are deleted as well).
--create-workflow [api_key] ... Creates a sample workflow to be used in int. tests.
--help ... This text.''')
else:
print('Unknown argument. Please see --help')
exit(1)
if __name__ == '__main__':
run(arguments=sys.argv[1:])
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test drag calibration experiment."""
from test.base import QiskitExperimentsTestCase
import unittest
import numpy as np
from qiskit.circuit import Parameter
from qiskit.exceptions import QiskitError
from qiskit.pulse import DriveChannel, Drag
import qiskit.pulse as pulse
from qiskit.qobj.utils import MeasLevel
from qiskit import transpile
from qiskit_experiments.exceptions import CalibrationError
from qiskit_experiments.library import RoughDrag, RoughDragCal
from qiskit_experiments.test.mock_iq_backend import DragBackend
from qiskit_experiments.calibration_management.basis_gate_library import FixedFrequencyTransmon
from qiskit_experiments.calibration_management import Calibrations
class TestDragEndToEnd(QiskitExperimentsTestCase):
"""Test the drag experiment."""
def setUp(self):
"""Setup some schedules."""
super().setUp()
beta = Parameter("β")
with pulse.build(name="xp") as xp:
pulse.play(Drag(duration=160, amp=0.208519, sigma=40, beta=beta), DriveChannel(0))
self.x_plus = xp
self.test_tol = 0.05
def test_reps(self):
"""Test that setting reps raises and error if reps is not of length three."""
drag = RoughDrag(0, self.x_plus)
with self.assertRaises(CalibrationError):
drag.set_experiment_options(reps=[1, 2, 3, 4])
def test_end_to_end(self):
"""Test the drag experiment end to end."""
backend = DragBackend(gate_name="Drag(xp)")
drag = RoughDrag(1, self.x_plus)
expdata = drag.run(backend)
self.assertExperimentDone(expdata)
result = expdata.analysis_results(1)
self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol)
self.assertEqual(result.quality, "good")
# Small leakage will make the curves very flat, in this case one should
# rather increase beta.
backend = DragBackend(error=0.0051, gate_name="Drag(xp)")
drag = RoughDrag(0, self.x_plus)
drag.analysis.set_options(p0={"beta": 1.2})
exp_data = drag.run(backend)
self.assertExperimentDone(exp_data)
result = exp_data.analysis_results(1)
self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol)
self.assertEqual(result.quality, "good")
# Large leakage will make the curves oscillate quickly.
backend = DragBackend(error=0.05, gate_name="Drag(xp)")
drag = RoughDrag(1, self.x_plus, betas=np.linspace(-4, 4, 31))
drag.set_run_options(shots=200)
drag.analysis.set_options(p0={"beta": 1.8, "freq0": 0.08, "freq1": 0.16, "freq2": 0.32})
exp_data = drag.run(backend)
self.assertExperimentDone(exp_data)
result = exp_data.analysis_results(1)
meas_level = exp_data.metadata["job_metadata"][-1]["run_options"]["meas_level"]
self.assertEqual(meas_level, MeasLevel.CLASSIFIED)
self.assertTrue(abs(result.value.n - backend.ideal_beta) < self.test_tol)
self.assertEqual(result.quality, "good")
class TestDragCircuits(QiskitExperimentsTestCase):
"""Test the circuits of the drag calibration."""
def setUp(self):
"""Setup some schedules."""
super().setUp()
beta = Parameter("β")
with pulse.build(name="xp") as xp:
pulse.play(Drag(duration=160, amp=0.208519, sigma=40, beta=beta), DriveChannel(0))
self.x_plus = xp
def test_default_circuits(self):
"""Test the default circuit."""
backend = DragBackend(error=0.005, gate_name="Drag(xp)")
drag = RoughDrag(0, self.x_plus)
drag.set_experiment_options(reps=[2, 4, 8])
drag.backend = DragBackend(gate_name="Drag(xp)")
circuits = drag.circuits()
for idx, expected in enumerate([4, 8, 16]):
ops = transpile(circuits[idx * 51], backend).count_ops()
self.assertEqual(ops["Drag(xp)"], expected)
def test_raise_multiple_parameter(self):
"""Check that the experiment raises with unassigned parameters."""
beta = Parameter("β")
amp = Parameter("amp")
with pulse.build(name="xp") as xp:
pulse.play(Drag(duration=160, amp=amp, sigma=40, beta=beta), DriveChannel(0))
with self.assertRaises(QiskitError):
RoughDrag(1, xp, betas=np.linspace(-3, 3, 21))
class TestRoughDragCalUpdate(QiskitExperimentsTestCase):
"""Test that a Drag calibration experiment properly updates the calibrations."""
def setUp(self):
"""Setup the tests"""
super().setUp()
library = FixedFrequencyTransmon()
self.backend = DragBackend(gate_name="Drag(x)")
self.cals = Calibrations.from_backend(self.backend, library)
self.test_tol = 0.05
def test_update(self):
"""Test that running RoughDragCal updates the calibrations."""
qubit = 0
prev_beta = self.cals.get_parameter_value("β", (0,), "x")
self.assertEqual(prev_beta, 0)
expdata = RoughDragCal(qubit, self.cals, backend=self.backend).run()
self.assertExperimentDone(expdata)
new_beta = self.cals.get_parameter_value("β", (0,), "x")
self.assertTrue(abs(new_beta - self.backend.ideal_beta) < self.test_tol)
self.assertTrue(abs(new_beta) > self.test_tol)
def test_dragcal_experiment_config(self):
"""Test RoughDragCal config can round trip"""
exp = RoughDragCal(0, self.cals, backend=self.backend)
loaded_exp = RoughDragCal.from_config(exp.config())
self.assertNotEqual(exp, loaded_exp)
self.assertTrue(self.json_equiv(exp, loaded_exp))
@unittest.skip("Calibration experiments are not yet JSON serializable")
def test_dragcal_roundtrip_serializable(self):
"""Test round trip JSON serialization"""
exp = RoughDragCal(0, self.cals)
self.assertRoundTripSerializable(exp, self.json_equiv)
def test_drag_experiment_config(self):
"""Test RoughDrag config can roundtrip"""
with pulse.build(name="xp") as sched:
pulse.play(pulse.Drag(160, 0.5, 40, Parameter("β")), pulse.DriveChannel(0))
exp = RoughDrag(0, backend=self.backend, schedule=sched)
loaded_exp = RoughDrag.from_config(exp.config())
self.assertNotEqual(exp, loaded_exp)
self.assertTrue(self.json_equiv(exp, loaded_exp))
@unittest.skip("Schedules are not yet JSON serializable")
def test_drag_roundtrip_serializable(self):
"""Test round trip JSON serialization"""
with pulse.build(name="xp") as sched:
pulse.play(pulse.Drag(160, 0.5, 40, Parameter("β")), pulse.DriveChannel(0))
exp = RoughDrag(0, backend=self.backend, schedule=sched)
self.assertRoundTripSerializable(exp, self.json_equiv)
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# ZTE.ZXDSL531.get_dot11_associations
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetdot11associations import IGetDot11Associations
from noc.core.text import strip_html_tags
rx_mac = re.compile(
"(?P<mac>[0-9A-F]{2}:[0-9A-F]{2}:[0-9A-F]{2}:[0-9A-F]{2}:[0-9A-F]{2}:[0-9A-F]{2})"
)
class Script(BaseScript):
name = "ZTE.ZXDSL531.get_dot11_associations"
interface = IGetDot11Associations
def execute(self):
if self.access_profile.scheme == self.TELNET:
v = self.cli("wlctl authe_sta_list")
elif self.access_profile.scheme == self.HTTP:
v = self.http.get("/wlclientview.cmd")
v = strip_html_tags(v)
else:
raise Exception("Unsupported access scheme")
r = []
for l in v.split("\n"):
m = rx_mac.search(l)
if m:
r.append({"mac": m.group("mac")})
return r
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('bulk', '0006_auto_20150302_1750'),
]
operations = [
migrations.AlterField(
model_name='sovereigntyholder',
name='last_refresh',
field=models.DateTimeField(default=datetime.datetime(2015, 3, 2, 19, 35, 37, 7218, tzinfo=utc)),
preserve_default=True,
),
]
|
import os
import dj_database_url
### Basic config
BASE = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
DEBUG = TEMPLATE_DEBUG = True
SITE_ID = 1
SECRET_KEY = 'its-a-secret-to-everybody'
# Until Sentry works on Py3, do errors the old-fashioned way.
ADMINS = []
# General project information
# These are available in the template as SITE_INFO.<title>
SITE_VARIABLES = {
'site_name': 'Python.org',
'site_descript': 'The official home of the Python Programming Language',
}
### Databases
DATABASES = {
'default': dj_database_url.config(default='postgres:///python.org')
}
### Locale settings
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATE_FORMAT = 'Y-m-d'
### Files (media and static)
MEDIA_ROOT = os.path.join(BASE, 'media')
MEDIA_URL = '/m/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(BASE, 'static-root')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE, 'static'),
]
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
### Authentication
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
LOGIN_REDIRECT_URL = 'home'
ACCOUNT_LOGOUT_REDIRECT_URL = 'home'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
SOCIALACCOUNT_EMAIL_REQUIRED = True
SOCIALACCOUNT_EMAIL_VERIFICATION = True
SOCIALACCOUNT_QUERY_EMAIL = True
### Templates
TEMPLATE_DIRS = [
os.path.join(BASE, 'templates')
]
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"pydotorg.context_processors.site_info",
"pydotorg.context_processors.url_name",
"pydotorg.context_processors.get_host_with_scheme",
]
### URLs, WSGI, middleware, etc.
ROOT_URLCONF = 'pydotorg.urls'
MIDDLEWARE_CLASSES = (
'pydotorg.middleware.AdminNoCaching',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'waffle.middleware.WaffleMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'pages.middleware.PageFallbackMiddleware',
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
)
AUTH_USER_MODEL = 'users.User'
WSGI_APPLICATION = 'pydotorg.wsgi.application'
### Apps
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.redirects',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.comments',
'django.contrib.admin',
'django.contrib.admindocs',
'django_comments_xtd',
'jsonfield',
'pipeline',
'sitetree',
'timedelta',
'imagekit',
'haystack',
'honeypot',
'waffle',
'users',
'boxes',
'cms',
'companies',
'feedbacks',
'community',
'jobs',
'pages',
'sponsors',
'successstories',
'events',
'minutes',
'peps',
'blogs',
'downloads',
'codesamples',
'work_groups',
'allauth',
'allauth.account',
'allauth.socialaccount',
#'allauth.socialaccount.providers.facebook',
#'allauth.socialaccount.providers.github',
#'allauth.socialaccount.providers.openid',
#'allauth.socialaccount.providers.twitter',
# Tastypie needs the `users` app to be already loaded.
'tastypie',
'debug_toolbar',
]
# Fixtures
FIXTURE_DIRS = (
os.path.join(BASE, 'fixtures'),
)
### Testing
SKIP_NETWORK_TESTS = True
### Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
### Development
DEV_FIXTURE_URL = 'https://www.python.org/m/fixtures/dev-fixtures.json.gz'
### Comments
COMMENTS_APP = 'django_comments_xtd'
COMMENTS_XTD_MAX_THREAD_LEVEL = 0
COMMENTS_XTD_FORM_CLASS = "jobs.forms.JobCommentForm"
### Honeypot
HONEYPOT_FIELD_NAME = 'email_body_text'
HONEYPOT_VALUE = 'write your message'
### Blog Feed URL
PYTHON_BLOG_FEED_URL = "http://feeds.feedburner.com/PythonInsider"
PYTHON_BLOG_URL = "http://blog.python.org"
### Registration mailing lists
MAILING_LIST_PSF_MEMBERS = "psf-members-announce-request@python.org"
### PEP Repo Location
PEP_REPO_PATH = ''
### Fastly ###
FASTLY_API_KEY = False # Set to Fastly API key in production to allow pages to
# be purged on save
# Jobs
JOB_THRESHOLD_DAYS = 90
JOB_FROM_EMAIL = 'jobs@python.org'
### Pipeline
from .pipeline import (
PIPELINE_CSS, PIPELINE_JS,
PIPELINE_COMPILERS,
PIPELINE_SASS_BINARY, PIPELINE_SASS_ARGUMENTS,
PIPELINE_CSS_COMPRESSOR, PIPELINE_JS_COMPRESSOR,
)
### django-waffle settings
WAFFLE_OVERRIDE = True
|
from django.db import migrations
from api.user.models import CustomUser
class Migration(migrations.Migration):
def seed_data(apps, schema_editor):
user = CustomUser(
name = 'admin',
email = 'admin@admin.dev',
is_staff = True,
is_superuser = True,
phone = "9876554321",
gender = 'Male'
)
user.set_password('qwerty')
user.save()
dependencies = [ ]
operations = [
migrations.RunPython(seed_data),
]
|
# Copyright 2019-2020 The Lux Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .context import lux
import pytest
import random
import pandas as pd
import warnings
# Suite of test that checks if data_type inferred correctly by Lux
def test_check_cars():
lux.config.set_SQL_connection("")
df = pd.read_csv("lux/data/car.csv")
df.maintain_metadata()
assert df.data_type["Name"] == "nominal"
assert df.data_type["MilesPerGal"] == "quantitative"
assert df.data_type["Cylinders"] == "nominal"
assert df.data_type["Displacement"] == "quantitative"
assert df.data_type["Horsepower"] == "quantitative"
assert df.data_type["Weight"] == "quantitative"
assert df.data_type["Acceleration"] == "quantitative"
assert df.data_type["Year"] == "temporal"
assert df.data_type["Origin"] == "nominal"
def test_check_int_id():
df = pd.read_csv(
"https://github.com/lux-org/lux-datasets/blob/master/data/instacart_sample.csv?raw=true"
)
df._repr_html_()
inverted_data_type = lux.config.executor.invert_data_type(df.data_type)
assert len(inverted_data_type["id"]) == 3
assert (
"<code>order_id</code>, <code>product_id</code>, <code>user_id</code> is not visualized since it resembles an ID field."
in df._message.to_html()
)
def test_check_str_id():
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/churn.csv?raw=true")
df._repr_html_()
assert (
"<code>customerID</code> is not visualized since it resembles an ID field.</li>"
in df._message.to_html()
)
def test_check_hpi():
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/hpi.csv?raw=true")
df.maintain_metadata()
assert df.data_type == {
"HPIRank": "quantitative",
"Country": "geographical",
"SubRegion": "nominal",
"AverageLifeExpectancy": "quantitative",
"AverageWellBeing": "quantitative",
"HappyLifeYears": "quantitative",
"Footprint": "quantitative",
"InequalityOfOutcomes": "quantitative",
"InequalityAdjustedLifeExpectancy": "quantitative",
"InequalityAdjustedWellbeing": "quantitative",
"HappyPlanetIndex": "quantitative",
"GDPPerCapita": "quantitative",
"Population": "quantitative",
}
def test_check_airbnb():
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/airbnb_nyc.csv?raw=true")
df.maintain_metadata()
assert df.data_type == {
"id": "id",
"name": "nominal",
"host_id": "id",
"host_name": "nominal",
"neighbourhood_group": "nominal",
"neighbourhood": "nominal",
"latitude": "quantitative",
"longitude": "quantitative",
"room_type": "nominal",
"price": "quantitative",
"minimum_nights": "quantitative",
"number_of_reviews": "quantitative",
"last_review": "temporal",
"reviews_per_month": "quantitative",
"calculated_host_listings_count": "quantitative",
"availability_365": "quantitative",
}
def test_check_airports():
df = pd.read_csv(
"https://raw.githubusercontent.com/altair-viz/vega_datasets/master/vega_datasets/_data/airports.csv"
)
df.maintain_metadata()
assert df.data_type == {
"iata": "id",
"name": "nominal",
"city": "nominal",
"state": "geographical",
"country": "geographical",
"latitude": "quantitative",
"longitude": "quantitative",
}
def test_check_datetime():
df = pd.DataFrame(
{
"a": ["2020-01-01"],
"b": ["20-01-01"],
"c": ["20-jan-01"],
"d": ["20-january-01"],
"e": ["2020 January 01"],
"f": ["2020 January 01 00:00:00 pm PT"],
"g": ["2020 January 01 13:00:00"],
"h": ["2020 January 01 23:59:59 GTC-6"],
}
)
df.maintain_metadata()
assert df.data_type == {
"a": "temporal",
"b": "temporal",
"c": "temporal",
"d": "temporal",
"e": "temporal",
"f": "temporal",
"g": "temporal",
"h": "temporal",
}
def test_check_datetime_numeric_values():
car_df = pd.read_csv("lux/data/car.csv")
car_df = car_df.rename(columns={"Year": "blah"})
car_df.maintain_metadata()
assert car_df.data_type["blah"] == "temporal"
spotify_df = pd.read_csv(
"https://raw.githubusercontent.com/lux-org/lux-datasets/master/data/spotify.csv"
)
spotify_df = spotify_df.rename(columns={"year": "blah"})
spotify_df.maintain_metadata()
assert spotify_df.data_type["blah"] == "temporal"
assert spotify_df.data_type["release_date"] == "temporal"
def test_check_stock():
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/stocks.csv?raw=true")
df.maintain_metadata()
assert df.data_type == {
"symbol": "nominal",
"monthdate": "temporal",
"price": "quantitative",
}, "Stock dataset type detection error"
def test_check_college():
df = pd.read_csv("lux/data/college.csv")
df.maintain_metadata()
assert df.data_type == {
"Name": "nominal",
"PredominantDegree": "nominal",
"HighestDegree": "nominal",
"FundingModel": "nominal",
"Region": "nominal",
"Geography": "nominal",
"AdmissionRate": "quantitative",
"ACTMedian": "quantitative",
"SATAverage": "quantitative",
"AverageCost": "quantitative",
"Expenditure": "quantitative",
"AverageFacultySalary": "quantitative",
"MedianDebt": "quantitative",
"AverageAgeofEntry": "quantitative",
"MedianFamilyIncome": "quantitative",
"MedianEarnings": "quantitative",
}
def test_float_categorical():
values = [
{"A": 6.0, "B": 1.0, "C": 1.0, "D": 3.0, "E": 2.0, "F": 5.0},
{"A": 5.0, "B": 2.0, "C": 2.0, "D": 2.0, "E": 2.0, "F": 3.0},
{"A": 3.0, "B": 6.0, "C": 3.0, "D": 3.0, "E": 2.0, "F": 5.0},
{"A": 6.0, "B": 3.0, "C": 3.0, "D": 2.0, "E": 2.0, "F": 2.0},
{"A": 7.0, "B": 4.0, "C": 2.0, "D": 2.0, "E": 2.0, "F": 4.0},
{"A": 5.0, "B": 3.0, "C": 6.0, "D": 3.0, "E": 3.0, "F": 4.0},
{"A": 3.0, "B": 4.0, "C": 3.0, "D": 6.0, "E": 5.0, "F": 5.0},
{"A": 3.0, "B": 3.0, "C": 2.0, "D": 2.0, "E": 4.0, "F": 5.0},
{"A": 3.0, "B": 2.0, "C": 2.0, "D": 2.0, "E": 2.0, "F": 4.0},
{"A": 1.0, "B": 2.0, "C": 2.0, "D": 2.0, "E": 2.0, "F": 6.0},
{"A": 3.0, "B": 3.0, "C": 2.0, "D": 3.0, "E": 3.0, "F": 5.0},
{"A": 7.0, "B": 1.0, "C": 1.0, "D": 2.0, "E": 2.0, "F": 3.0},
{"A": 6.0, "B": 2.0, "C": 2.0, "D": 2.0, "E": 2.0, "F": 3.0},
{"A": 2.0, "B": 3.0, "C": 2.0, "D": 3.0, "E": 3.0, "F": 4.0},
{"A": 6.0, "B": 2.0, "C": 3.0, "D": 3.0, "E": 3.0, "F": 5.0},
]
df = pd.DataFrame(values)
df.maintain_metadata()
inverted_data_type = lux.config.executor.invert_data_type(df.data_type)
assert inverted_data_type["nominal"] == [
"A",
"B",
"C",
"D",
"E",
"F",
], "Float column should be detected as categorical"
for x in list(df.dtypes):
assert x == "float64", "Source dataframe preserved as float dtype"
def test_set_data_type():
df = pd.read_csv(
"https://github.com/lux-org/lux-datasets/blob/master/data/real_estate_tutorial.csv?raw=true"
)
with pytest.warns(UserWarning) as w:
df._repr_html_()
assert "starter template that you can use" in str(w[-1].message)
assert "df.set_data_type" in str(w[-1].message)
df.set_data_type({"Month": "nominal", "Year": "nominal"})
assert df.data_type["Month"] == "nominal"
assert df.data_type["Year"] == "nominal"
with warnings.catch_warnings() as w:
warnings.simplefilter("always")
df._repr_html_()
assert not w
def test_set_data_type_invalid():
df = pd.read_csv(
"https://github.com/lux-org/lux-datasets/blob/master/data/real_estate_tutorial.csv?raw=true"
)
with pytest.raises(ValueError):
df.set_data_type({"Month": "nomnal", "Year": "nomnal"})
def test_set_wrong_data_type():
df = pd.read_csv(
"https://github.com/lux-org/lux-datasets/blob/master/data/real_estate_tutorial.csv?raw=true"
)
df.set_data_type({"Year": "quantitative"})
assert df.data_type["Year"] == "quantitative"
def test_id_with_label():
df = pd.read_csv(
"https://github.com/lux-org/lux-datasets/blob/master/data/state_timeseries.csv?raw=true"
)
df.maintain_metadata()
assert df.data_type == {"Date": "temporal", "State": "geographical", "Value": "quantitative"}
def test_ID_random():
"""Tests whether a ID column not satisfying other properties of an ID gets recognized."""
values = [
{"ID": random.randint(0, 1000), "A": 6.0, "B": 1.0, "C": 1.0, "D": 3.0, "E": 2.0, "F": 5.0}
for x in range(1000)
]
df = pd.DataFrame(values)
df.maintain_metadata()
assert df.data_type == {
"ID": "quantitative",
"A": "nominal",
"B": "nominal",
"C": "nominal",
"D": "nominal",
"E": "nominal",
"F": "nominal",
}
def test_ID():
"""Tests different ways of writing id"""
values = [{"ID": x, "A": 6.0, "B": 1.0, "C": 1.0, "D": 3.0, "E": 2.0, "F": 5.0} for x in range(1000)]
df = pd.DataFrame(values)
df.maintain_metadata()
assert df.data_type == {
"ID": "id",
"A": "nominal",
"B": "nominal",
"C": "nominal",
"D": "nominal",
"E": "nominal",
"F": "nominal",
}
def test_id_aug_test():
"""Tests in a different dataset
Reference: https://www.kaggle.com/arashnic/hr-analytics-job-change-of-data-scientists
"""
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/aug_test.csv?raw=true")
df.maintain_metadata()
assert df.data_type == {
"enrollee_id": "id",
"city": "nominal",
"city_development_index": "quantitative",
"gender": "nominal",
"relevent_experience": "nominal",
"enrolled_university": "nominal",
"education_level": "nominal",
"major_discipline": "nominal",
"experience": "nominal",
"company_size": "nominal",
"company_type": "nominal",
"last_new_job": "nominal",
"training_hours": "quantitative",
}
def test_id_music_data():
"""Tests in a different dataset if a column not named as an ID is recognized as an identification.
Reference: https://www.kaggle.com/yamaerenay/spotify-dataset-19212020-160k-tracks
"""
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/spotify.csv?raw=true")
df["unique_num"] = df["id"]
df.drop(columns=["id"])
df.maintain_metadata()
assert df.data_type == {
"valence": "quantitative",
"year": "temporal",
"acousticness": "quantitative",
"artists": "nominal",
"danceability": "quantitative",
"duration_ms": "quantitative",
"energy": "quantitative",
"explicit": "nominal",
"unique_num": "id",
"instrumentalness": "quantitative",
"key": "nominal",
"liveness": "quantitative",
"loudness": "quantitative",
"mode": "nominal",
"name": "nominal",
"popularity": "quantitative",
"release_date": "temporal",
"speechiness": "quantitative",
"tempo": "quantitative",
"id": "id",
}
def test_id_absenteeism_data():
""" Tests whether an id named column is not recognized because even though it is named an id, it is not with its nature. """
df = pd.read_csv("https://github.com/lux-org/lux-datasets/blob/master/data/absenteeism.csv?raw=true")
df.maintain_metadata()
assert df.data_type == {
"ID": "quantitative",
"Reason for absence": "quantitative",
"Month of absence": "nominal",
"Day of the week": "nominal",
"Seasons": "nominal",
"Transportation expense": "quantitative",
"Distance from Residence to Work": "quantitative",
"Service time": "nominal",
"Age": "quantitative",
"Work load Average/day ": "quantitative",
"Hit target": "nominal",
"Disciplinary failure": "nominal",
"Education": "nominal",
"Son": "nominal",
"Social drinker": "nominal",
"Social smoker": "nominal",
"Pet": "nominal",
"Weight": "quantitative",
"Height": "nominal",
"Body mass index": "nominal",
"Absenteeism time in hours": "nominal",
}
|
# INCOMPLETE / UNSUCCESSFUL
# find median of two sorted arrays
import ipdb
class Solution(object):
def findMedianSortedArrays(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: float
"""
n1 = len(nums1)
n2 = len(nums2)
# special cases
if n1==0:
return self.median_sorted_array(nums2)
if n2==0:
return self.median_sorted_array(nums1)
N = n1 + n2
l1, r1 = 0, n1-1
l2, r2 = 0, n2-1
while True:
idx1 = (l1+r1)//2
# find index of largest element in nums2 smaller than v1
idx2 = self.find_largest_elem(nums2[l2:r2+1], nums1[idx1])
t2 = l2 + -1 if idx2 is None else idx2
# arr1[idx1] is at index 'n' in the joint array
n = idx1 + 1 + t2
if n < N//2 - 1:
# this should not be done if idx2 is None
next_l1 = (l1+r1)//2
next_l2 = (l2+r2)//2 if idx2 is not None else l2
next_r1, next_r2 = r1, r2
elif n == N//2 - 1:
next_val = self.next_num(nums1, nums2, idx1, t2)
if N%2==1:
return next_val
else:
return (nums1[idx1] + next_val)/2
elif n == N//2:
if N%2==1:
return nums1[idx1]
else:
prev_val = self.prev_num(nums1, nums2, idx1-1, t2)
return (prev_val + nums1[idx1])/2
else:
next_r1 = (l1+r1)//2
next_r2 = (l2+r2)//2 if idx2 is not None else r2
next_l1, next_l2 = l1, l2
l1, l2, r1, r2 = next_l1, next_l2, next_r1, next_r2
# if (l1,l2,r1,r2) == (next_l1,next_l2,next_r1,next_r2):
if r1-l1<=1 and r2-l2<=1:
# ipdb.set_trace()
# sort them until median index is reached
if n<=N//2-1:
while n!=N//2-1:
idx1, t2, val = self.next_indices_and_num(nums1, nums2, idx1, t2)
n += 1
next_val = self.next_num(nums1, nums2, idx1, t2)
if N%2==1:
return next_val
else:
return (val + next_val)/2
else:
while n!=N//2-1:
idx1, t2, next_val = self.prev_indices_and_num(nums1, nums2, idx1, t2)
n -= 1
if N%2==1:
return next_val
else:
val = self.prev_num(nums1, nums2, idx1, t2)
return (val + next_val)/2
ipdb.set_trace()
def median_sorted_array(self, arr):
# median of a sorted array
n = len(arr)
if n%2==1:
return arr[n//2]
return (arr[n//2-1] + arr[n//2])/2
def find_largest_elem(self, arr, val):
li = 0
ri = len(arr)
done = False
while not done:
if arr[(li+ri)//2] >= val:
ri = (li+ri)//2
else:
li = (li+ri)//2
done = li==ri or li+1==ri
if arr[li]<val:
return li
return None
def next_indices_and_num(self, arr1, arr2, idx1, idx2):
if idx1>=len(arr1)-1:
return idx1, idx2+1, arr2[idx2+1]
if idx2>=len(arr2)-1:
return idx1+1, idx2, arr1[idx1+1]
if arr1[idx1+1] < arr2[idx2+1]:
return idx1+1, idx2, arr1[idx1+1]
else:
return idx1, idx2+1, arr2[idx2+1]
def prev_indices_and_num(self, arr1, arr2, idx1, idx2):
if idx1<0:
return idx1, idx2-1, arr2[idx2]
if idx2<0:
return idx1-1, idx2, arr1[idx1]
if arr1[idx1] >= arr2[idx2]:
return idx1-1, idx2, arr1[idx1]
else:
return idx1, idx2-1, arr2[idx2]
def next_num(self, arr1, arr2, idx1, idx2):
return self.next_indices_and_num(arr1, arr2, idx1, idx2)[-1]
def prev_num(self, arr1, arr2, idx1, idx2):
return self.prev_indices_and_num(arr1, arr2, idx1, idx2)[-1]
nums1 = [1]
nums2 = [2,3,4,5,6]
sol = Solution()
median = sol.findMedianSortedArrays(nums1, nums2)
print(median)
|
import siteScripts.timeout.scraper as timeoutScraper
import logging
from webscraper.models.landmark import Landmark
from webscraper.services.csv import saveLandmarksCSV
def main():
# File to save landmarks
f = "landmarks.csv"
# Scrapers
timeOutLandmarks = timeoutScraper.scrape()
# Save Data
saveLandmarksCSV(timeOutLandmarks, f)
if __name__ == '__main__':
logging.config.fileConfig(fname="./logs/logging.conf",
disable_existing_loggers=False)
logger = logging.getLogger(__name__)
logger.info("Let's Begin")
main()
|
#!/usr/bin/env python
import sys
from typing import Sequence, Set
import argparse
import numpy
import pandas
_zero_svs_are_outliers = True
_outlier_std_threshold = 5.0
_column_order = ["CHROM", "SVTYPE", "Mean", "Median", "STD",
"Outlier_Sample", "Outlier_Number", "Outlier_Cate"]
def read_statfile(statfile: str) -> pandas.DataFrame:
"""
Special function needed to read in stats data table because
a) pandas doesn't understand that the '#' means header
b) there are multiple stats files concatenated together, resulting in headers being randomly mixed in
Args:
statfile: str
File name with concatenated tab-separated tables of variant stats
Returns:
stats_data: pandas.DataFrame
Table of variant stats
"""
with open(statfile, 'r') as f_in:
# get column header from first line, stripping '#'
columns = f_in.readline().lstrip('#').split()
# read rest of tsv file, using these columns as header and ignoring any future lines starting with '#'
return pandas.read_csv(statfile, sep='\t', comment='#', names=columns)
def pick_outliers_by_group(
chrom: str,
sv_type: str,
check_stats: pandas.DataFrame,
all_samples: Set[str],
zero_svs_are_outliers: bool = _zero_svs_are_outliers,
outlier_std_threshold: float = _outlier_std_threshold
) -> pandas.DataFrame:
"""
For given combination of contig and SV type, find samples that have outlier number of SVs. Return table of outliers
along with statistics about SV count.
Args:
chrom: str
Contig for checking SV counts
sv_type: str
SV type for checking SV counts
check_stats: pandas.DataFrame
Table with SV counts on this contig with this sv_type
all_samples: Set[str]
Set of all sample IDs in cohort
zero_svs_are_outliers: bool
Whether to treat samples with no counts as automatic outliers, or explicitly code as zero counts
outlier_std_threshold: float
Threshold for outlier status as multiple of standard deviation of SV counts
Returns:
outliers: pandas.DataFrame
Table of outliers
"""
# find samples that are missing: they have 0 SVs of this type on this contig
missing_samples = pandas.DataFrame(
tuple(
{"CHROM": chrom, "SVTYPE": sv_type, "SAMPLE": sample_id, "NUM": 0}
for sample_id in all_samples.difference(check_stats["SAMPLE"])
)
)
if zero_svs_are_outliers:
# THIS IS THE ORIGINAL PIPELINE BEHAVIOR
# compute basic stats about observed nonzero SV counts
count_mean = check_stats["NUM"].mean()
count_median = check_stats["NUM"].median()
count_std = check_stats["NUM"].std()
# Amongst samples that have SVs, find counts deviating by more than set multiple of std from the median
is_outlier = numpy.abs(
check_stats["NUM"] - count_median) > outlier_std_threshold * count_std
# Treat missing samples as outliers.
outliers = pandas.concat(
(missing_samples, check_stats.loc[is_outlier]), axis=0)
else:
# THIS FINDS FEWER, MORE MEANINGFUL OUTLIERS
# Which samples are missing / included but have zero counts is unpredictable.
# 1) concatenate all samples together
check_stats = pandas.concat((check_stats, missing_samples), axis=0)
# 2) compute stats from non-zero SV counts
nonzero = check_stats["NUM"] > 0
count_mean = check_stats.loc[nonzero, "NUM"].mean()
count_median = check_stats.loc[nonzero, "NUM"].median()
count_std = check_stats.loc[nonzero, "NUM"].std()
# 3) check outliers by usual means from those stats
# Set threshold to be set multiple of greater of: std of counts, sqrt(median of counts)
# (i.e. greater of std or expected Poisson std)
# Find counts those deviating by more than threshold from the median (including zeros)
is_outlier = (
numpy.abs(check_stats["NUM"] - count_median) >
outlier_std_threshold * numpy.maximum(count_std, numpy.sqrt(count_median))
)
outliers = check_stats.loc[is_outlier].copy()
if outliers.empty:
return pandas.DataFrame([], columns=_column_order)
# augment outlier table with some statistics
outliers["Mean"] = count_mean
outliers["Median"] = count_median
outliers["STD"] = count_std
outliers["Outlier_Cate"] = numpy.where(
outliers["NUM"] > count_median, "high", "low")
# rename and re-order columns
return outliers.rename({"NUM": "Outlier_Number", "SAMPLE": "Outlier_Sample"}, axis=1).reindex(_column_order, axis=1)
def pick_outliers(
stats_data: pandas.DataFrame,
zero_svs_are_outliers: bool = _zero_svs_are_outliers,
outlier_std_threshold: float = _outlier_std_threshold
) -> pandas.DataFrame:
"""
Find samples that have outlier number of SVs when broken down by contig and SV type. Return table of outliers
along with statistics about SV count.
Args:
stats_data: pandas.DataFrame
Table with SV counts
zero_svs_are_outliers: bool
Whether to treat samples with no counts as automatic outliers, or explicitly code as zero counts
outlier_std_threshold: float
Threshold for outlier status as multiple of standard deviation of SV counts
Returns:
outliers: pandas.DataFrame
Table of outliers
"""
# get set of all samples in stats data
all_samples = set(stats_data["SAMPLE"])
# loop over unique combinations of contig and sv type
# find outliers from each unique combination
# and concatenate those outliers into one table
outliers = pandas.concat(
tuple(
pick_outliers_by_group(
chrom=chrom, sv_type=sv_type, check_stats=check_stats, all_samples=all_samples,
zero_svs_are_outliers=zero_svs_are_outliers, outlier_std_threshold=outlier_std_threshold
)
for (chrom, sv_type), check_stats in stats_data.groupby(
["CHROM", "SVTYPE"], sort=False, as_index=False, group_keys=False
)
),
axis=0
)
return outliers
def write_outliers_file(
outliers: pandas.DataFrame,
outname: str,
outlier_type: str
):
"""
Write outliers of the appropriate type ("low" or "high") to TSV file.
Args:
outliers: pandas.DataFrame
Table of outlier data
outname: str
Base name of outlier TSV file. Final file name will have ".low" or ".high" appended to it.
outlier_type: str
"low" or "high".
"""
# write outliers to tsv. Add "#" in front of header
with open(outname + "." + outlier_type, 'w') as f_out:
f_out.write("#") # add '#' in front of header
outlier_wanted = outliers["Outlier_Cate"] == outlier_type
outliers.loc[outlier_wanted].to_csv(f_out, sep='\t', index=False)
def calc_num_svs_pick_outlier(
statfile: str,
outname: str,
zero_svs_are_outliers: bool = _zero_svs_are_outliers,
outlier_std_threshold: float = _outlier_std_threshold
):
"""
Find samples that have outlier number of SVs when broken down by contig and SV type.
Write two tables of outliers, along with statistics about SV count: one for those with above-median counts ("high")
and one for those at median or below ("low").
Args:
statfile: str
TSV file with table with SV counts
outname: str
Base name for saving outlier files. Low file will have ".low" appended to the name, and high file will have
".high"
zero_svs_are_outliers: bool
Whether to treat samples with no counts as automatic outliers, or explicitly code as zero counts
outlier_std_threshold: float
Threshold for outlier status as multiple of standard deviation of SV counts
"""
stats_data = read_statfile(statfile)
outliers = pick_outliers(stats_data, zero_svs_are_outliers=zero_svs_are_outliers,
outlier_std_threshold=outlier_std_threshold)
write_outliers_file(outliers, outname, "low")
write_outliers_file(outliers, outname, "high")
def _parse_arguments(argv: Sequence[str]) -> argparse.Namespace:
# noinspection PyTypeChecker
parser = argparse.ArgumentParser(
description="Find outliers in SV counts broken down by contig and SV type",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("statfile", type=str,
help="name of stats concatinated from all samples")
parser.add_argument("outname", type=str, help="name of output file")
parser.add_argument("-z", "--zero-counts-are-not-outliers", action="store_true",
help="don't make zero SV counts an automatic outlier, check deviation from median as usual")
parser.add_argument("-t", "--outlier-std-threshold", type=float, default=_outlier_std_threshold,
help="threshold multiple of std of counts for outliers")
return parser.parse_args(argv[1:])
if __name__ == "__main__":
args = _parse_arguments(sys.argv)
calc_num_svs_pick_outlier(statfile=args.statfile, outname=args.outname,
zero_svs_are_outliers=not args.zero_counts_are_not_outliers,
outlier_std_threshold=args.outlier_std_threshold)
|
"""
Copyright (c) 2018-2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import numpy as np
from mo.front.caffe.extractors.inner_product import inner_product_ext
from mo.front.common.partial_infer.inner_product import caffe_inner_product
from mo.utils.unittest.extractors import FakeMultiParam, FakeModelLayer
class FakeProtoLayer:
def __init__(self, val):
self.inner_product_param = val
class TestInnerProduct(unittest.TestCase):
def test_inner_product_ext(self):
params = {
'num_output': 10,
'bias_term': True
}
mean_blob = np.array([1., 2.])
variance_blob = np.array([3., 4.])
blobs = [mean_blob, variance_blob]
res = inner_product_ext(FakeProtoLayer(FakeMultiParam(params)),
FakeModelLayer(blobs))
exp_res = {
'type': 'FullyConnected',
'out-size': 10,
'infer': caffe_inner_product,
'weights': mean_blob,
'biases': variance_blob,
'embedded_inputs': [
(1, 'weights', {
'bin': 'weights'
}),
(2, 'biases', {
'bin': 'biases'
})
]
}
for i in exp_res:
if i in ('weights', 'biases'):
np.testing.assert_array_equal(res[i], exp_res[i])
else:
self.assertEqual(res[i], exp_res[i])
|
import urllib
import concurrent.futures
import threading
|
"""
pysteps.io.exporter
===================
Methods for exporting forecasts of 2d precipitation fields into various file
formats.
Each exporter method in this module has its own initialization function that
implements the following interface::
initialize_forecast_exporter_xxx(filename, startdate, timestep,
num_timesteps, shape, num_ens_members,
metadata, incremental=None)
where xxx is the name (or abbreviation) of the file format.
This function creates the file and writes the metadata. The datasets are written
by calling :py:func:`pysteps.io.exporters.export_forecast_dataset`, and
the file is closed by calling :py:func:`pysteps.io.exporters.close_forecast_file`.
The arguments in the above are defined as follows:
.. tabularcolumns:: |p{2cm}|p{2cm}|L|
+---------------+-------------------+-----------------------------------------+
| Argument | Type/values | Description |
+===============+===================+=========================================+
| filename | str | name of the output file |
+---------------+-------------------+-----------------------------------------+
| startdate | datetime.datetime | start date of the forecast |
+---------------+-------------------+-----------------------------------------+
| timestep | int | time step of the forecast (minutes) |
+---------------+-------------------+-----------------------------------------+
| n_timesteps | int | number of time steps in the forecast |
| | | this argument is ignored if |
| | | incremental is set to 'timestep'. |
+---------------+-------------------+-----------------------------------------+
| shape | tuple | two-element tuple defining the shape |
| | | (height,width) of the forecast grids |
+---------------+-------------------+-----------------------------------------+
| n_ens_members | int | number of ensemble members in the |
| | | forecast. This argument is ignored if |
| | | incremental is set to 'member' |
+---------------+-------------------+-----------------------------------------+
| metadata | dict | metadata dictionary containing the |
| | | projection,x1,x2,y1,y2 and unit |
| | | attributes described in the |
| | | documentation of pysteps.io.importers |
+---------------+-------------------+-----------------------------------------+
| incremental | {None, 'timestep',| Allow incremental writing of datasets |
| | 'member'} | into the netCDF file |
| | | the available options are: |
| | | 'timestep' = write a forecast or a |
| | | forecast ensemble for a given |
| | | time step |
| | | 'member' = write a forecast sequence |
| | | for a given ensemble member |
+---------------+-------------------+-----------------------------------------+
The return value is a dictionary containing an exporter object. This can be
used with :py:func:`pysteps.io.exporters.export_forecast_dataset` to write
datasets into the given file format.
Available Exporters
-------------------
.. autosummary::
:toctree: ../generated/
initialize_forecast_exporter_kineros
initialize_forecast_exporter_netcdf
Generic functions
-----------------
.. autosummary::
:toctree: ../generated/
export_forecast_dataset
close_forecast_file
"""
from datetime import datetime
import numpy as np
import os
from pysteps.exceptions import MissingOptionalDependency
try:
import netCDF4
netcdf4_imported = True
except ImportError:
netcdf4_imported = False
try:
import pyproj
pyproj_imported = True
except ImportError:
pyproj_imported = False
# TODO(exporters): This is a draft version of the kineros exporter.
# Revise the variable names and
# the structure of the file if necessary.
def initialize_forecast_exporter_kineros(filename, startdate, timestep,
n_timesteps, shape, n_ens_members,
metadata, incremental=None):
"""Initialize a KINEROS2 Rainfall .pre file as specified
in https://www.tucson.ars.ag.gov/kineros/.
Grid points are treated as individual rain gauges and a separate file is
produced for each ensemble member.
Parameters
----------
filename : str
Name of the output file.
startdate : datetime.datetime
Start date of the forecast as datetime object.
timestep : int
Time step of the forecast (minutes).
n_timesteps : int
Number of time steps in the forecast this argument is ignored if
incremental is set to 'timestep'.
shape : tuple of int
Two-element tuple defining the shape (height,width) of the forecast
grids.
n_ens_members : int
Number of ensemble members in the forecast. This argument is ignored if
incremental is set to 'member'.
metadata: dict
Metadata dictionary containing the projection,x1,x2,y1,y2 and unit
attributes described in the documentation of
:py:mod:`pysteps.io.importers`.
incremental : {None}, optional
Currently not implemented for this method.
Returns
-------
exporter : dict
The return value is a dictionary containing an exporter object. This c
an be used with :py:func:`pysteps.io.exporters.export_forecast_dataset`
to write datasets into the given file format.
"""
if incremental is not None:
raise ValueError("unknown option %s: incremental writing is not supported" % incremental)
exporter = {}
basefn, extfn = os.path.splitext(filename)
if extfn == "":
extfn = ".pre"
# one file for each member
n_ens_members = np.min((99, n_ens_members))
fns = []
for i in range(n_ens_members):
fn = "%s_N%02d%s" % (basefn, i, extfn)
with open(fn, "w") as fd:
# write header
fd.writelines("! pysteps-generated nowcast.\n")
fd.writelines("! created the %s.\n" % datetime.now().strftime("%c"))
# TODO(exporters): Add pySTEPS version here
fd.writelines("! Member = %02d.\n" % i)
fd.writelines("! Startdate = %s.\n" % startdate.strftime("%c"))
fns.append(fn)
fd.close()
h, w = shape
if metadata["unit"] == "mm/h":
var_name = "Intensity"
var_long_name = "Intensity in mm/hr"
var_unit = "mm/hr"
elif metadata["unit"] == "mm":
var_name = "Depth"
var_long_name = "Accumulated depth in mm"
var_unit = "mm"
else:
raise ValueError("unsupported unit %s" % metadata["unit"])
xr = np.linspace(metadata["x1"], metadata["x2"], w+1)[:-1]
xr += 0.5 * (xr[1] - xr[0])
yr = np.linspace(metadata["y1"], metadata["y2"], h+1)[:-1]
yr += 0.5 * (yr[1] - yr[0])
X, Y = np.meshgrid(xr, yr)
XY_coords = np.stack([X, Y])
exporter["method"] = "kineros"
exporter["ncfile"] = fns
exporter["XY_coords"] = XY_coords
exporter["var_name"] = var_name
exporter["var_long_name"] = var_long_name
exporter["var_unit"] = var_unit
exporter["startdate"] = startdate
exporter["timestep"] = timestep
exporter["metadata"] = metadata
exporter["incremental"] = incremental
exporter["num_timesteps"] = n_timesteps
exporter["num_ens_members"] = n_ens_members
exporter["shape"] = shape
return exporter
# TODO(exporters): This is a draft version of the netcdf exporter.
# Revise the variable names and
# the structure of the file if necessary.
def initialize_forecast_exporter_netcdf(filename, startdate, timestep,
n_timesteps, shape, n_ens_members,
metadata, product='precip_intensity',
incremental=None):
"""Initialize a netCDF forecast exporter.
Parameters
----------
filename : str
Name of the output file.
startdate : datetime.datetime
Start date of the forecast as datetime object.
timestep : int
Time step of the forecast (minutes).
n_timesteps : int
Number of time steps in the forecast this argument is ignored if
incremental is set to 'timestep'.
shape : tuple of int
Two-element tuple defining the shape (height,width) of the forecast
grids.
n_ens_members : int
Number of ensemble members in the forecast. This argument is ignored if
incremental is set to 'member'.
metadata: dict
Metadata dictionary containing the projection,x1,x2,y1,y2 and unit
attributes described in the documentation of
:py:mod:`pysteps.io.importers`.
product: str
product name can be 'precip_intensity' for intensity export,
'precip_probability' for probability export.
incremental : {None,'timestep','member'}, optional
Allow incremental writing of datasets into the netCDF file.\n
The available options are: 'timestep' = write a forecast or a forecast
ensemble for a given time step; 'member' = write a forecast sequence
for a given ensemble member. If set to None, incremental writing is
disabled.
Returns
-------
exporter : dict
The return value is a dictionary containing an exporter object. This c
an be used with :py:func:`pysteps.io.exporters.export_forecast_dataset`
to write datasets into the given file format.
"""
if not netcdf4_imported:
raise MissingOptionalDependency(
"netCDF4 package is required for netcdf "
"exporters but it is not installed")
if not pyproj_imported:
raise MissingOptionalDependency(
"pyproj package is required for netcdf "
"exporters but it is not installed")
if incremental not in [None, "timestep", "member"]:
raise ValueError("unknown option %s: incremental must be 'timestep' or 'member'" % incremental)
if incremental == "timestep":
n_timesteps = None
elif incremental == "member":
n_ens_members = None
elif incremental is not None:
raise ValueError("unknown argument value incremental='%s': must be 'timestep' or 'member'" % str(incremental))
exporter = {}
filename = os.path.realpath(filename)
if not os.path.exists(os.path.dirname(filename)):
os.mkdir(os.path.dirname(filename))
ncf = netCDF4.Dataset(filename, 'w', format="NETCDF4")
ncf.Conventions = "CF-1.7"
ncf.title = "pysteps-generated nowcast"
ncf.institution = "the pySTEPS community (https://pysteps.github.io)"
ncf.source = "pysteps" # TODO(exporters): Add pySTEPS version here
ncf.history = ""
ncf.references = ""
ncf.comment = ""
h, w = shape
# if product != 'precip_probability':
# ncf.createDimension("ens_number", size=n_ens_members)
ncf.createDimension("time", size=n_timesteps)
ncf.createDimension("y", size=h)
ncf.createDimension("x", size=w)
# necessary settings for probability nowcasting
ncf.datetime = str(startdate)
if product == 'precip_probability':
#TODO: Add this metadata unit percent in the source
metadata["unit"] = "percent"
if metadata["unit"] == "mm/h":
var_name = "precip_intensity"
var_standard_name = None
var_long_name = "instantaneous precipitation rate"
var_unit = "mm h-1"
elif metadata["unit"] == "percent":
var_name = "precip_probability"
var_standard_name = None
var_long_name = "probablistic precipitation"
var_unit = "percent"
elif metadata["unit"] == "mm":
var_name = "precip_accum"
var_standard_name = None
var_long_name = "accumulated precipitation"
var_unit = "mm"
elif metadata["unit"] == "dBZ":
var_name = "reflectivity"
var_long_name = "equivalent reflectivity factor"
var_standard_name = "equivalent_reflectivity_factor"
var_unit = "dBZ"
else:
raise ValueError("unknown unit %s" % metadata["unit"])
xr = np.linspace(metadata["x1"], metadata["x2"], w+1)[:-1]
xr += 0.5 * (xr[1] - xr[0])
yr = np.linspace(metadata["y1"], metadata["y2"], h+1)[:-1]
yr += 0.5 * (yr[1] - yr[0])
var_xc = ncf.createVariable("xc", np.float32, dimensions=("x",))
var_xc[:] = xr
var_xc.axis = 'X'
var_xc.standard_name = "projection_x_coordinate"
var_xc.long_name = "x-coordinate in Cartesian system"
# TODO(exporters): Don't hard-code the unit.
var_xc.units = 'm'
var_yc = ncf.createVariable("yc", np.float32, dimensions=("y",))
var_yc[:] = yr
var_yc.axis = 'Y'
var_yc.standard_name = "projection_y_coordinate"
var_yc.long_name = "y-coordinate in Cartesian system"
# TODO(exporters): Don't hard-code the unit.
var_yc.units = 'm'
X, Y = np.meshgrid(xr, yr)
pr = pyproj.Proj(metadata["projection"])
lon,lat = pr(X.flatten(), Y.flatten(), inverse=True)
lon, lat = pr(X.flatten(), Y.flatten(), inverse=True)
new_long, new_lat = np.zeros((h, w), dtype=np.float), np.zeros((h, w), dtype=np.float)
idx = 0
for row in range(h):
for col in range(w):
new_long[row][col] = lon[idx]
idx += 1
idx = 0
for row in range(h):
for col in range(w):
new_lat[row][col] = lat[idx]
idx += 1
var_lon = ncf.createVariable("lon", np.float32, dimensions=("y", "x"))
var_lon[:] = new_long
var_lon.standard_name = "longitude"
var_lon.long_name = "longitude coordinate"
# TODO(exporters): Don't hard-code the unit.
var_lon.units = "degrees_east"
var_lat = ncf.createVariable("lat", np.float, dimensions=("y", "x"))
var_lat[:] = new_lat
var_lat.standard_name = "latitude"
var_lat.long_name = "latitude coordinate"
# TODO(exporters): Don't hard-code the unit.
var_lat.units = "degrees_north"
ncf.projection = metadata["projection"]
grid_mapping_var_name, grid_mapping_name, grid_mapping_params = \
_convert_proj4_to_grid_mapping(metadata["projection"])
# skip writing the grid mapping if a matching name was not found
if grid_mapping_var_name is not None:
var_gm = ncf.createVariable(grid_mapping_var_name, np.int,
dimensions=())
var_gm.grid_mapping_name = grid_mapping_name
for i in grid_mapping_params.items():
var_gm.setncattr(i[0], i[1])
# if product != 'precip_probability':
# var_ens_num = ncf.createVariable("ens_number", np.int,
# dimensions=("ens_number",))
# if incremental != "member":
# var_ens_num[:] = list(range(1, n_ens_members+1))
# var_ens_num.long_name = "ensemble member"
# var_ens_num.units = ""
var_time = ncf.createVariable("time", np.int, dimensions=("time",))
if incremental != "timestep":
if product == 'precip_probability':
var_time[:] = [i*timestep for i in range(1, n_timesteps+1)]
else:
var_time[:] = [i*timestep*60 for i in range(1, n_timesteps+1)]
var_time.long_name = "forecast time"
startdate_str = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S")
var_time.units = "minutes since %s" % startdate_str if product == 'precip_probability' \
else "seconds since %s" % startdate_str
dimensions = ("time", "y", "x")
var_F = ncf.createVariable(var_name, np.float32,
dimensions=dimensions,
zlib=True, complevel=9)
if var_standard_name is not None:
var_F.standard_name = var_standard_name
var_F.long_name = var_long_name
var_F.coordinates = "y x"
var_F.units = var_unit
exporter["method"] = "netcdf"
exporter["ncfile"] = ncf
exporter["var_F"] = var_F
# if product != 'precip_probability':
# exporter["var_ens_num"] = var_ens_num
exporter["var_time"] = var_time
exporter["var_name"] = var_name
exporter["startdate"] = startdate
exporter["timestep"] = timestep
exporter["metadata"] = metadata
exporter["incremental"] = incremental
exporter["num_timesteps"] = n_timesteps
exporter["num_ens_members"] = n_ens_members
exporter["shape"] = shape
return exporter
def export_forecast_dataset(F, exporter, mask=None):
"""Write a forecast array into a file.
The written dataset has dimensions
(num_ens_members,num_timesteps,shape[0],shape[1]), where shape refers to
the shape of the two-dimensional forecast grids. If the exporter was
initialized with incremental!=None, the array is appended to the existing
dataset either along the ensemble member or time axis.
Parameters
----------
exporter : dict
An exporter object created with any initialization method implemented
in :py:mod:`pysteps.io.exporters`.
F : array_like
The array to write. The required shape depends on the choice of the
'incremental' parameter the exporter was initialized with:
:TODO: Update this table incorporating 'precip_probability'
+-----------------+---------------------------------------------------+
| incremental | required shape |
+=================+===================================================+
| None | (num_ens_members,num_timesteps,shape[0],shape[1]) |
+-----------------+---------------------------------------------------+
| 'timestep' | (num_ens_members,shape[0],shape[1]) |
+-----------------+---------------------------------------------------+
| 'member' | (num_timesteps,shape[0],shape[1]) |
+-----------------+---------------------------------------------------+
"""
if exporter["method"] == "netcdf" and not netcdf4_imported:
raise MissingOptionalDependency(
"netCDF4 package is required for netcdf "
"exporters but it is not installed")
if exporter["incremental"] is None:
shp = (exporter["num_timesteps"], exporter["shape"][0], exporter["shape"][1])
if F.shape != shp:
raise ValueError("F has invalid shape: %s != %s" % (str(F.shape),str(shp)))
elif exporter["incremental"] == "timestep":
shp = (exporter["num_ens_members"], exporter["shape"][0],
exporter["shape"][1])
if F.shape != shp:
raise ValueError("F has invalid shape: %s != %s" % (str(F.shape),str(shp)))
elif exporter["incremental"] == "member":
shp = (exporter["num_timesteps"], exporter["shape"][0],
exporter["shape"][1])
if F.shape != shp:
raise ValueError("F has invalid shape: %s != %s" % (str(F.shape),str(shp)))
if exporter["method"] == "netcdf":
_export_netcdf(F, exporter, mask)
elif exporter["method"] == "kineros":
_export_kineros(F, exporter)
else:
raise ValueError("unknown exporter method %s" % exporter["method"])
def close_forecast_file(exporter):
"""Close the file associated with a forecast exporter.
Finish writing forecasts and close the file associated with a forecast
exporter.
Parameters
----------
exporter : dict
An exporter object created with any initialization method implemented
in :py:mod:`pysteps.io.exporters`.
"""
if exporter["method"] == "kineros":
pass # no need to close the file
else:
exporter["ncfile"].close()
def _export_kineros(F, exporter):
num_timesteps = exporter["num_timesteps"]
num_ens_members = exporter["num_ens_members"]
startdate = exporter["startdate"]
timestep = exporter["timestep"]
xgrid = exporter["XY_coords"][0, :, :].flatten()
ygrid = exporter["XY_coords"][1, :, :].flatten()
timemin = [(t + 1)*timestep for t in range(num_timesteps)]
for n in range(num_ens_members):
fn = exporter["ncfile"][n]
F_ = F[n, :, :, :].reshape((num_timesteps, -1))
if exporter["var_name"] == "Depth":
F_ = np.cumsum(F_, axis=0)
with open(fn, "a") as fd:
for m in range(F_.shape[1]):
fd.writelines("BEGIN RG%03d\n" % (m + 1))
fd.writelines(" X = %.2f, Y = %.2f\n" % (xgrid[m], ygrid[m]))
fd.writelines(" N = %i\n" % num_timesteps)
fd.writelines(" TIME %s\n" % exporter["var_name"].upper())
fd.writelines("! (min) (%s)\n" % exporter["var_unit"])
for t in range(num_timesteps):
line_new = "{:6.1f} {:11.2f}\n".format(timemin[t], F_[t, m])
fd.writelines(line_new)
fd.writelines("END\n\n")
def _export_netcdf(F, exporter, mask=None):
var_F = exporter["var_F"]
if exporter["incremental"] is None:
var_F[:] = F[:,::-1,:]
elif exporter["incremental"] == "timestep":
var_F[:, var_F.shape[1], :, :] = F
var_time = exporter["var_time"]
var_time[len(var_time)-1] = len(var_time) * exporter["timestep"] * 60
else:
var_F[var_F.shape[0], :, :, :] = F
var_ens_num = exporter["var_time"]
var_ens_num[len(var_ens_num)-1] = len(var_ens_num)
# TODO(exporters): Write methods for converting Proj.4 projection definitions
# into CF grid mapping attributes. Currently this has been implemented for
# the stereographic projection.
# The conversions implemented here are take from:
# https://github.com/cf-convention/cf-convention.github.io/blob/master/wkt-proj-4.md
def _convert_proj4_to_grid_mapping(proj4str):
tokens = proj4str.split('+')
d = {}
for t in tokens[1:]:
t = t.split('=')
if len(t) > 1:
d[t[0]] = t[1].strip()
params = {}
# TODO(exporters): implement more projection types here
if d["proj"] == "stere":
grid_mapping_var_name = "polar_stereographic"
grid_mapping_name = "polar_stereographic"
v = d["lon_0"] if d["lon_0"][-1] not in ["E", "W"] else d["lon_0"][:-1]
params["straight_vertical_longitude_from_pole"] = float(v)
v = d["lat_0"] if d["lat_0"][-1] not in ["N", "S"] else d["lat_0"][:-1]
params["latitude_of_projection_origin"] = float(v)
if "lat_ts" in list(d.keys()):
params["standard_parallel"] = float(d["lat_ts"])
elif "k_0" in list(d.keys()):
params["scale_factor_at_projection_origin"] = float(d["k_0"])
params["false_easting"] = float(d["x_0"])
params["false_northing"] = float(d["y_0"])
elif d["proj"] == "sterea":
grid_mapping_var_name = "oblique_stereographic"
grid_mapping_name = "oblique_stereographic"
v = d["lon_0"] if d["lon_0"][-1] not in ["E", "W"] else d["lon_0"][:-1]
params["longitude_of_projection_origin"] = float(v)
v = d["lat_0"] if d["lat_0"][-1] not in ["N", "S"] else d["lat_0"][:-1]
params["latitude_of_projection_origin"] = float(v)
if "lat_ts" in list(d.keys()):
params["standard_parallel"] = float(d["lat_ts"])
elif "k_0" in list(d.keys()):
params["scale_factor_at_projection_origin"] = float(d["k_0"])
params["false_easting"] = float(d["x_0"])
params["false_northing"] = float(d["y_0"])
elif d["proj"] == "aea": # Albers Conical Equal Area
grid_mapping_var_name = "proj"
grid_mapping_name = "albers_conical_equal_area"
params["false_easting"] = float(d["x_0"]) if "x_0" in d else float(0)
params["false_northing"] = float(d["y_0"]) if "y_0" in d else float(0)
v = d["lon_0"] if "lon_0" in d else float(0)
params["longitude_of_central_meridian"] = float(v)
v = d["lat_0"] if "lat_0" in d else float(0)
params["latitude_of_projection_origin"] = float(v)
v1 = d["lat_1"] if "lat_1" in d else float(0)
v2 = d["lat_2"] if "lat_2" in d else float(0)
params["standard_parallel"] = (float(v1), float(v2))
else:
print('unknown projection', d["proj"])
return None, None, None
return grid_mapping_var_name, grid_mapping_name, params
|
from bs4 import BeautifulSoup
import requests
import test
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import datetime
def update_inRange():
print("Today's Date : ",datetime.date.today())
today = datetime.date.today() - datetime.timedelta(1)
yesterday_month = today.strftime("%b")
yesterday_dayno = today.strftime("%d")
yesterday_full_Date = today.strftime("%d %B, %Y")
compareable_date = today.strftime("%b/%d")
print(compareable_date)
print()
print("----- Start -----")
print()
# Test
# todays_month = 'Sep'
# todays_day = '15'
# Test
print("Yesterday was : ",yesterday_full_Date)
scope = ["https://spreadsheets.google.com/feeds", 'https://www.googleapis.com/auth/spreadsheets',
"https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name("CodeforcesAutoTracker-b2030a7afa6c.json", scope);
client = gspread.authorize(creds)
sheet = client.open("Codeforces Auto Tracker - Akif Islam").worksheet('Sheet2')
data = sheet.get_all_records()
# pprint(data)
date_column = sheet.col_values(1)
no_of_total_submission_column = sheet.col_values(2)
no_of_total_accepted_column = sheet.col_values(3)
source_link = "https://codeforces.com/submissions/miss.progga"
source = requests.get(source_link).text
soup = BeautifulSoup(source, "lxml").find('table', class_="status-frame-datatable")
submission_time = []
# 1. Collecting all dates from 50 submission of First Page of Codeforces Submission
for data in soup.findAll('span', class_="format-time"):
submission_time.append(data.text[0:6])
print("Submission's Time : ", submission_time)
print("OK !")
print()
# Execution
submission_count = int(0)
total_accepted = []
accepted_count = int(0)
accpeted_nonduplicate_set = []
# Total Accepted Count from 50s :
for data in soup.findAll('span', class_="submissionVerdictWrapper"):
total_accepted.append(data.text)
print(total_accepted)
print(len(total_accepted))
print(len(submission_time))
#Total Submission Count
for i in range(0,len(submission_time),1):
if submission_time[i][0:3] == yesterday_month and submission_time[i][4:6] == yesterday_dayno:
submission_count += 1
if(total_accepted[i]== "Accepted"):
str = test.get_problemlist()[i] + " Accepted"
accpeted_nonduplicate_set.append(str)
# Total Submission Count
accpeted_nonduplicate_set = set(accpeted_nonduplicate_set)
print("Accepted List : ",accpeted_nonduplicate_set)
accepted_count = len(accpeted_nonduplicate_set)
print("Total Submission : ", submission_count)
print("Total Accepted : ", accepted_count)
insert_list = [yesterday_full_Date, submission_count, accepted_count]
print(insert_list)
previous_date = sheet.cell(len(date_column), 1).value[0:2]
# if sheet.cell(len(date_column),1)[0:1] != todays_day :
if previous_date != yesterday_dayno:
sheet.insert_row(insert_list, (len(date_column) + 1))
else:
print("Duplicate Date Found ! ")
print()
print("----- Finished !-----")
print()
update_inRange()
|
print("Challenge 1:")
# A message for user
message = "This is goind to be tricky ;"
Message = "Very tricky!"
print(message) # show the message on the screen
# Perform mathematical operations
result = 2**3
print("2**3 =", result)
result = 5 - 3
print("5 - 3 =", result)
print("Challenge complete!")
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.urls import reverse_lazy
from django.utils.translation import ugettext as _
class Blog(models.Model):
"""Blog of blogs"""
BLOG_STATUS = {
('1normal', _('status_published')),
('2temp', _('status_draft')),
('5hidden', _('status_pending')),
('6deleted', _('status_deleted')),
}
status = models.CharField(
max_length=10, choices=BLOG_STATUS, default='1normal')
user = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now_add=True)
ip = models.GenericIPAddressField()
category = models.CharField(max_length=23, blank=True)
title = models.CharField(max_length=41)
content = models.TextField()
view_count = models.IntegerField(default=0)
comment_count = models.IntegerField(default=0)
like_count = models.IntegerField(default=0)
like_users = models.TextField(default='', blank=True)
image = models.ImageField(
upload_to="featured_images/%Y-%m/", blank=True)
tags = models.TextField(default='', blank=True)
def get_absolute_url(self):
"""Back to list"""
return reverse_lazy('blogs:show_blogs', args=[1])
def get_post_url(self):
"""Back to post"""
return reverse_lazy('blogs:show_post', args=[self.id])
def get_edit_url(self):
"""Stay editing"""
return reverse_lazy('blogs:edit_post', args=[self.id])
def get_status_text(self):
"""Get status text"""
if self.status == '1normal':
return _('status_normal')
elif self.status == '2temp':
return _('status_draft')
elif self.status == '5hidden':
return _('status_pending')
elif self.status == '6deleted':
return _('status_deleted')
class Comment(models.Model):
"""Comment of blogs"""
COMMENT_STATUS = {
('1normal', _('status_normal')),
('6deleted', _('status_deleted')),
('7spam', _('status_spam')),
}
post_id = models.IntegerField(default=0)
comment_id = models.IntegerField(default=0)
status = models.CharField(
max_length=10, choices=COMMENT_STATUS, default='1normal')
userid = models.CharField(max_length=settings.ID_MAX_LENGTH, blank=True)
username = models.CharField(max_length=settings.USERNAME_MAX, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
ip = models.GenericIPAddressField()
content = models.TextField(max_length=settings.COMMENT_TEXT_MAX)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutStringManipulation(Koan):
def test_use_format_to_interpolate_variables(self):
value1 = 'one'
value2 = 2
string = "The values are {0} and {1}".format(value1, value2)
self.assertEqual("The values are one and 2", string)
def test_formatted_values_can_be_shown_in_any_order_or_be_repeated(self):
value1 = 'doh'
value2 = 'DOH'
string = "The values are {1}, {0}, {0} and {1}!".format(value1, value2)
self.assertEqual("The values are DOH, doh, doh and DOH!", string)
def test_any_python_expression_may_be_interpolated(self):
import math # import a standard python module with math functions
decimal_places = 4
string = "The square root of 5 is {0:.{1}f}".format(math.sqrt(5),
decimal_places)
self.assertEqual("The square root of 5 is 2.2361", string)
def test_you_can_get_a_substring_from_a_string(self):
string = "Bacon, lettuce and tomato"
self.assertEqual("let", string[7:10])
def test_you_can_get_a_single_character_from_a_string(self):
string = "Bacon, lettuce and tomato"
self.assertEqual("a", string[1])
def test_single_characters_can_be_represented_by_integers(self):
self.assertEqual(97, ord('a'))
self.assertEqual(True, ord('b') == (ord('a') + 1))
def test_strings_can_be_split(self):
string = "Sausage Egg Cheese"
words = string.split()
self.assertListEqual(["Sausage", "Egg", "Cheese"], words)
def test_strings_can_be_split_with_different_patterns(self):
import re # import python regular expression library
string = "the,rain;in,spain"
pattern = re.compile(',|;')
words = pattern.split(string)
self.assertListEqual(["the", "rain", "in", "spain"], words)
# Pattern is a Python regular expression pattern which matches ',' or ';'
def test_raw_strings_do_not_interpret_escape_characters(self):
string = r'\n'
self.assertNotEqual('\n', string)
self.assertEqual('\\n', string)
self.assertEqual(2, len(string))
# Useful in regular expressions, file paths, URLs, etc.
def test_strings_can_be_joined(self):
words = ["Now", "is", "the", "time"]
self.assertEqual("Now is the time", ' '.join(words))
def test_strings_can_change_case(self):
self.assertEqual("Guido", 'guido'.capitalize())
self.assertEqual("GUIDO", 'guido'.upper())
self.assertEqual("timbot", 'TimBot'.lower())
self.assertEqual("Guido Van Rossum", 'guido van rossum'.title())
self.assertEqual("tOtAlLy AwEsOmE", 'ToTaLlY aWeSoMe'.swapcase())
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
It's used to accept requests from the device to spawn and kill instances of the
chrome test server on the host.
"""
import BaseHTTPServer
import json
import logging
import os
import select
import struct
import subprocess
import threading
import time
import urlparse
import constants
from forwarder import Forwarder
import ports
# Path that are needed to import necessary modules when launching a testserver.
os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
% (os.path.join(constants.CHROME_DIR, 'third_party'),
os.path.join(constants.CHROME_DIR, 'third_party', 'tlslite'),
os.path.join(constants.CHROME_DIR, 'third_party', 'pyftpdlib', 'src'),
os.path.join(constants.CHROME_DIR, 'net', 'tools', 'testserver'),
os.path.join(constants.CHROME_DIR, 'sync', 'tools', 'testserver')))
SERVER_TYPES = {
'http': '',
'ftp': '-f',
'sync': '', # Sync uses its own script, and doesn't take a server type arg.
'tcpecho': '--tcp-echo',
'udpecho': '--udp-echo',
}
# The timeout (in seconds) of starting up the Python test server.
TEST_SERVER_STARTUP_TIMEOUT = 10
def _CheckPortStatus(port, expected_status):
"""Returns True if port has expected_status.
Args:
port: the port number.
expected_status: boolean of expected status.
Returns:
Returns True if the status is expected. Otherwise returns False.
"""
for timeout in range(1, 5):
if ports.IsHostPortUsed(port) == expected_status:
return True
time.sleep(timeout)
return False
def _GetServerTypeCommandLine(server_type):
"""Returns the command-line by the given server type.
Args:
server_type: the server type to be used (e.g. 'http').
Returns:
A string containing the command-line argument.
"""
if server_type not in SERVER_TYPES:
raise NotImplementedError('Unknown server type: %s' % server_type)
if server_type == 'udpecho':
raise Exception('Please do not run UDP echo tests because we do not have '
'a UDP forwarder tool.')
return SERVER_TYPES[server_type]
class TestServerThread(threading.Thread):
"""A thread to run the test server in a separate process."""
def __init__(self, ready_event, arguments, adb, tool, build_type):
"""Initialize TestServerThread with the following argument.
Args:
ready_event: event which will be set when the test server is ready.
arguments: dictionary of arguments to run the test server.
adb: instance of AndroidCommands.
tool: instance of runtime error detection tool.
build_type: 'Release' or 'Debug'.
"""
threading.Thread.__init__(self)
self.wait_event = threading.Event()
self.stop_flag = False
self.ready_event = ready_event
self.ready_event.clear()
self.arguments = arguments
self.adb = adb
self.tool = tool
self.test_server_process = None
self.is_ready = False
self.host_port = self.arguments['port']
assert isinstance(self.host_port, int)
self._test_server_forwarder = None
# The forwarder device port now is dynamically allocated.
self.forwarder_device_port = 0
# Anonymous pipe in order to get port info from test server.
self.pipe_in = None
self.pipe_out = None
self.command_line = []
self.build_type = build_type
def _WaitToStartAndGetPortFromTestServer(self):
"""Waits for the Python test server to start and gets the port it is using.
The port information is passed by the Python test server with a pipe given
by self.pipe_out. It is written as a result to |self.host_port|.
Returns:
Whether the port used by the test server was successfully fetched.
"""
assert self.host_port == 0 and self.pipe_out and self.pipe_in
(in_fds, _, _) = select.select([self.pipe_in, ], [], [],
TEST_SERVER_STARTUP_TIMEOUT)
if len(in_fds) == 0:
logging.error('Failed to wait to the Python test server to be started.')
return False
# First read the data length as an unsigned 4-byte value. This
# is _not_ using network byte ordering since the Python test server packs
# size as native byte order and all Chromium platforms so far are
# configured to use little-endian.
# TODO(jnd): Change the Python test server and local_test_server_*.cc to
# use a unified byte order (either big-endian or little-endian).
data_length = os.read(self.pipe_in, struct.calcsize('=L'))
if data_length:
(data_length,) = struct.unpack('=L', data_length)
assert data_length
if not data_length:
logging.error('Failed to get length of server data.')
return False
port_json = os.read(self.pipe_in, data_length)
if not port_json:
logging.error('Failed to get server data.')
return False
logging.info('Got port json data: %s', port_json)
port_json = json.loads(port_json)
if port_json.has_key('port') and isinstance(port_json['port'], int):
self.host_port = port_json['port']
return _CheckPortStatus(self.host_port, True)
logging.error('Failed to get port information from the server data.')
return False
def _GenerateCommandLineArguments(self):
"""Generates the command line to run the test server.
Note that all options are processed by following the definitions in
testserver.py.
"""
if self.command_line:
return
# The following arguments must exist.
type_cmd = _GetServerTypeCommandLine(self.arguments['server-type'])
if type_cmd:
self.command_line.append(type_cmd)
self.command_line.append('--port=%d' % self.host_port)
# Use a pipe to get the port given by the instance of Python test server
# if the test does not specify the port.
if self.host_port == 0:
(self.pipe_in, self.pipe_out) = os.pipe()
self.command_line.append('--startup-pipe=%d' % self.pipe_out)
self.command_line.append('--host=%s' % self.arguments['host'])
data_dir = self.arguments['data-dir'] or 'chrome/test/data'
if not os.path.isabs(data_dir):
data_dir = os.path.join(constants.CHROME_DIR, data_dir)
self.command_line.append('--data-dir=%s' % data_dir)
# The following arguments are optional depending on the individual test.
if self.arguments.has_key('log-to-console'):
self.command_line.append('--log-to-console')
if self.arguments.has_key('auth-token'):
self.command_line.append('--auth-token=%s' % self.arguments['auth-token'])
if self.arguments.has_key('https'):
self.command_line.append('--https')
if self.arguments.has_key('cert-and-key-file'):
self.command_line.append('--cert-and-key-file=%s' % os.path.join(
constants.CHROME_DIR, self.arguments['cert-and-key-file']))
if self.arguments.has_key('ocsp'):
self.command_line.append('--ocsp=%s' % self.arguments['ocsp'])
if self.arguments.has_key('https-record-resume'):
self.command_line.append('--https-record-resume')
if self.arguments.has_key('ssl-client-auth'):
self.command_line.append('--ssl-client-auth')
if self.arguments.has_key('tls-intolerant'):
self.command_line.append('--tls-intolerant=%s' %
self.arguments['tls-intolerant'])
if self.arguments.has_key('ssl-client-ca'):
for ca in self.arguments['ssl-client-ca']:
self.command_line.append('--ssl-client-ca=%s' %
os.path.join(constants.CHROME_DIR, ca))
if self.arguments.has_key('ssl-bulk-cipher'):
for bulk_cipher in self.arguments['ssl-bulk-cipher']:
self.command_line.append('--ssl-bulk-cipher=%s' % bulk_cipher)
def run(self):
logging.info('Start running the thread!')
self.wait_event.clear()
self._GenerateCommandLineArguments()
command = constants.CHROME_DIR
if self.arguments['server-type'] == 'sync':
command = [os.path.join(command, 'sync', 'tools', 'testserver',
'sync_testserver.py')] + self.command_line
else:
command = [os.path.join(command, 'net', 'tools', 'testserver',
'testserver.py')] + self.command_line
logging.info('Running: %s', command)
self.process = subprocess.Popen(command)
if self.process:
if self.pipe_out:
self.is_ready = self._WaitToStartAndGetPortFromTestServer()
else:
self.is_ready = _CheckPortStatus(self.host_port, True)
if self.is_ready:
self._test_server_forwarder = Forwarder(self.adb, self.build_type)
self._test_server_forwarder.Run(
[(0, self.host_port)], self.tool, '127.0.0.1')
# Check whether the forwarder is ready on the device.
self.is_ready = False
device_port = self._test_server_forwarder.DevicePortForHostPort(
self.host_port)
if device_port:
for timeout in range(1, 5):
if ports.IsDevicePortUsed(self.adb, device_port, 'LISTEN'):
self.is_ready = True
self.forwarder_device_port = device_port
break
time.sleep(timeout)
# Wake up the request handler thread.
self.ready_event.set()
# Keep thread running until Stop() gets called.
while not self.stop_flag:
time.sleep(1)
if self.process.poll() is None:
self.process.kill()
if self._test_server_forwarder:
self._test_server_forwarder.Close()
self.process = None
self.is_ready = False
if self.pipe_out:
os.close(self.pipe_in)
os.close(self.pipe_out)
self.pipe_in = None
self.pipe_out = None
logging.info('Test-server has died.')
self.wait_event.set()
def Stop(self):
"""Blocks until the loop has finished.
Note that this must be called in another thread.
"""
if not self.process:
return
self.stop_flag = True
self.wait_event.wait()
class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler used to process http GET/POST request."""
def _SendResponse(self, response_code, response_reason, additional_headers,
contents):
"""Generates a response sent to the client from the provided parameters.
Args:
response_code: number of the response status.
response_reason: string of reason description of the response.
additional_headers: dict of additional headers. Each key is the name of
the header, each value is the content of the header.
contents: string of the contents we want to send to client.
"""
self.send_response(response_code, response_reason)
self.send_header('Content-Type', 'text/html')
# Specify the content-length as without it the http(s) response will not
# be completed properly (and the browser keeps expecting data).
self.send_header('Content-Length', len(contents))
for header_name in additional_headers:
self.send_header(header_name, additional_headers[header_name])
self.end_headers()
self.wfile.write(contents)
self.wfile.flush()
def _StartTestServer(self):
"""Starts the test server thread."""
logging.info('Handling request to spawn a test server.')
content_type = self.headers.getheader('content-type')
if content_type != 'application/json':
raise Exception('Bad content-type for start request.')
content_length = self.headers.getheader('content-length')
if not content_length:
content_length = 0
try:
content_length = int(content_length)
except:
raise Exception('Bad content-length for start request.')
logging.info(content_length)
test_server_argument_json = self.rfile.read(content_length)
logging.info(test_server_argument_json)
assert not self.server.test_server_instance
ready_event = threading.Event()
self.server.test_server_instance = TestServerThread(
ready_event,
json.loads(test_server_argument_json),
self.server.adb,
self.server.tool,
self.server.build_type)
self.server.test_server_instance.setDaemon(True)
self.server.test_server_instance.start()
ready_event.wait()
if self.server.test_server_instance.is_ready:
self._SendResponse(200, 'OK', {}, json.dumps(
{'port': self.server.test_server_instance.forwarder_device_port,
'message': 'started'}))
logging.info('Test server is running on port: %d.',
self.server.test_server_instance.host_port)
else:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during starting a test server.')
def _KillTestServer(self):
"""Stops the test server instance."""
# There should only ever be one test server at a time. This may do the
# wrong thing if we try and start multiple test servers.
if not self.server.test_server_instance:
return
port = self.server.test_server_instance.host_port
logging.info('Handling request to kill a test server on port: %d.', port)
self.server.test_server_instance.Stop()
# Make sure the status of test server is correct before sending response.
if _CheckPortStatus(port, False):
self._SendResponse(200, 'OK', {}, 'killed')
logging.info('Test server on port %d is killed', port)
else:
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during killing a test server.')
self.server.test_server_instance = None
def do_POST(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
logging.info('Action for POST method is: %s.', action)
if action == '/start':
self._StartTestServer()
else:
self._SendResponse(400, 'Unknown request.', {}, '')
logging.info('Encounter unknown request: %s.', action)
def do_GET(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
logging.info('Action for GET method is: %s.', action)
for param in params:
logging.info('%s=%s', param, params[param][0])
if action == '/kill':
self._KillTestServer()
elif action == '/ping':
# The ping handler is used to check whether the spawner server is ready
# to serve the requests. We don't need to test the status of the test
# server when handling ping request.
self._SendResponse(200, 'OK', {}, 'ready')
logging.info('Handled ping request and sent response.')
else:
self._SendResponse(400, 'Unknown request', {}, '')
logging.info('Encounter unknown request: %s.', action)
class SpawningServer(object):
"""The class used to start/stop a http server."""
def __init__(self, test_server_spawner_port, adb, tool, build_type):
logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
SpawningServerRequestHandler)
self.port = test_server_spawner_port
self.server.adb = adb
self.server.tool = tool
self.server.test_server_instance = None
self.server.build_type = build_type
def _Listen(self):
logging.info('Starting test server spawner')
self.server.serve_forever()
def Start(self):
listener_thread = threading.Thread(target=self._Listen)
listener_thread.setDaemon(True)
listener_thread.start()
time.sleep(1)
def Stop(self):
if self.server.test_server_instance:
self.server.test_server_instance.Stop()
self.server.shutdown()
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ReinstallRequestV2(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'instance_groups': 'list[InstanceGroupsV2]',
'ambari_stack_details': 'AmbariStackDetails',
'blueprint_name': 'str',
'kerberos_password': 'str',
'kerberos_principal': 'str'
}
attribute_map = {
'instance_groups': 'instanceGroups',
'ambari_stack_details': 'ambariStackDetails',
'blueprint_name': 'blueprintName',
'kerberos_password': 'kerberosPassword',
'kerberos_principal': 'kerberosPrincipal'
}
def __init__(self, instance_groups=None, ambari_stack_details=None, blueprint_name=None, kerberos_password=None, kerberos_principal=None):
"""
ReinstallRequestV2 - a model defined in Swagger
"""
self._instance_groups = None
self._ambari_stack_details = None
self._blueprint_name = None
self._kerberos_password = None
self._kerberos_principal = None
if instance_groups is not None:
self.instance_groups = instance_groups
if ambari_stack_details is not None:
self.ambari_stack_details = ambari_stack_details
self.blueprint_name = blueprint_name
if kerberos_password is not None:
self.kerberos_password = kerberos_password
if kerberos_principal is not None:
self.kerberos_principal = kerberos_principal
@property
def instance_groups(self):
"""
Gets the instance_groups of this ReinstallRequestV2.
collection of instance groupst
:return: The instance_groups of this ReinstallRequestV2.
:rtype: list[InstanceGroupsV2]
"""
return self._instance_groups
@instance_groups.setter
def instance_groups(self, instance_groups):
"""
Sets the instance_groups of this ReinstallRequestV2.
collection of instance groupst
:param instance_groups: The instance_groups of this ReinstallRequestV2.
:type: list[InstanceGroupsV2]
"""
self._instance_groups = instance_groups
@property
def ambari_stack_details(self):
"""
Gets the ambari_stack_details of this ReinstallRequestV2.
details of the Ambari stack
:return: The ambari_stack_details of this ReinstallRequestV2.
:rtype: AmbariStackDetails
"""
return self._ambari_stack_details
@ambari_stack_details.setter
def ambari_stack_details(self, ambari_stack_details):
"""
Sets the ambari_stack_details of this ReinstallRequestV2.
details of the Ambari stack
:param ambari_stack_details: The ambari_stack_details of this ReinstallRequestV2.
:type: AmbariStackDetails
"""
self._ambari_stack_details = ambari_stack_details
@property
def blueprint_name(self):
"""
Gets the blueprint_name of this ReinstallRequestV2.
blueprint name for the cluster
:return: The blueprint_name of this ReinstallRequestV2.
:rtype: str
"""
return self._blueprint_name
@blueprint_name.setter
def blueprint_name(self, blueprint_name):
"""
Sets the blueprint_name of this ReinstallRequestV2.
blueprint name for the cluster
:param blueprint_name: The blueprint_name of this ReinstallRequestV2.
:type: str
"""
if blueprint_name is None:
raise ValueError("Invalid value for `blueprint_name`, must not be `None`")
self._blueprint_name = blueprint_name
@property
def kerberos_password(self):
"""
Gets the kerberos_password of this ReinstallRequestV2.
kerberos admin password
:return: The kerberos_password of this ReinstallRequestV2.
:rtype: str
"""
return self._kerberos_password
@kerberos_password.setter
def kerberos_password(self, kerberos_password):
"""
Sets the kerberos_password of this ReinstallRequestV2.
kerberos admin password
:param kerberos_password: The kerberos_password of this ReinstallRequestV2.
:type: str
"""
if kerberos_password is not None and len(kerberos_password) > 50:
raise ValueError("Invalid value for `kerberos_password`, length must be less than or equal to `50`")
if kerberos_password is not None and len(kerberos_password) < 5:
raise ValueError("Invalid value for `kerberos_password`, length must be greater than or equal to `5`")
self._kerberos_password = kerberos_password
@property
def kerberos_principal(self):
"""
Gets the kerberos_principal of this ReinstallRequestV2.
kerberos principal
:return: The kerberos_principal of this ReinstallRequestV2.
:rtype: str
"""
return self._kerberos_principal
@kerberos_principal.setter
def kerberos_principal(self, kerberos_principal):
"""
Sets the kerberos_principal of this ReinstallRequestV2.
kerberos principal
:param kerberos_principal: The kerberos_principal of this ReinstallRequestV2.
:type: str
"""
self._kerberos_principal = kerberos_principal
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ReinstallRequestV2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import functools
import os
import re
import signal
import sys
from six import unichr
import traceback
import matplotlib
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import (
_Backend, FigureCanvasBase, FigureManagerBase, NavigationToolbar2,
TimerBase, cursors, ToolContainerBase, StatusbarBase)
import matplotlib.backends.qt_editor.figureoptions as figureoptions
from matplotlib.backends.qt_editor.formsubplottool import UiSubplotTool
from matplotlib.figure import Figure
from matplotlib.backend_managers import ToolManager
from matplotlib import backend_tools
from .qt_compat import (
QtCore, QtGui, QtWidgets, _getSaveFileName, is_pyqt5, __version__, QT_API)
backend_version = __version__
# SPECIAL_KEYS are keys that do *not* return their unicode name
# instead they have manually specified names
SPECIAL_KEYS = {QtCore.Qt.Key_Control: 'control',
QtCore.Qt.Key_Shift: 'shift',
QtCore.Qt.Key_Alt: 'alt',
QtCore.Qt.Key_Meta: 'super',
QtCore.Qt.Key_Return: 'enter',
QtCore.Qt.Key_Left: 'left',
QtCore.Qt.Key_Up: 'up',
QtCore.Qt.Key_Right: 'right',
QtCore.Qt.Key_Down: 'down',
QtCore.Qt.Key_Escape: 'escape',
QtCore.Qt.Key_F1: 'f1',
QtCore.Qt.Key_F2: 'f2',
QtCore.Qt.Key_F3: 'f3',
QtCore.Qt.Key_F4: 'f4',
QtCore.Qt.Key_F5: 'f5',
QtCore.Qt.Key_F6: 'f6',
QtCore.Qt.Key_F7: 'f7',
QtCore.Qt.Key_F8: 'f8',
QtCore.Qt.Key_F9: 'f9',
QtCore.Qt.Key_F10: 'f10',
QtCore.Qt.Key_F11: 'f11',
QtCore.Qt.Key_F12: 'f12',
QtCore.Qt.Key_Home: 'home',
QtCore.Qt.Key_End: 'end',
QtCore.Qt.Key_PageUp: 'pageup',
QtCore.Qt.Key_PageDown: 'pagedown',
QtCore.Qt.Key_Tab: 'tab',
QtCore.Qt.Key_Backspace: 'backspace',
QtCore.Qt.Key_Enter: 'enter',
QtCore.Qt.Key_Insert: 'insert',
QtCore.Qt.Key_Delete: 'delete',
QtCore.Qt.Key_Pause: 'pause',
QtCore.Qt.Key_SysReq: 'sysreq',
QtCore.Qt.Key_Clear: 'clear', }
# define which modifier keys are collected on keyboard events.
# elements are (mpl names, Modifier Flag, Qt Key) tuples
SUPER = 0
ALT = 1
CTRL = 2
SHIFT = 3
MODIFIER_KEYS = [('super', QtCore.Qt.MetaModifier, QtCore.Qt.Key_Meta),
('alt', QtCore.Qt.AltModifier, QtCore.Qt.Key_Alt),
('ctrl', QtCore.Qt.ControlModifier, QtCore.Qt.Key_Control),
('shift', QtCore.Qt.ShiftModifier, QtCore.Qt.Key_Shift),
]
if sys.platform == 'darwin':
# in OSX, the control and super (aka cmd/apple) keys are switched, so
# switch them back.
SPECIAL_KEYS.update({QtCore.Qt.Key_Control: 'cmd', # cmd/apple key
QtCore.Qt.Key_Meta: 'control',
})
MODIFIER_KEYS[0] = ('cmd', QtCore.Qt.ControlModifier,
QtCore.Qt.Key_Control)
MODIFIER_KEYS[2] = ('ctrl', QtCore.Qt.MetaModifier,
QtCore.Qt.Key_Meta)
cursord = {
cursors.MOVE: QtCore.Qt.SizeAllCursor,
cursors.HAND: QtCore.Qt.PointingHandCursor,
cursors.POINTER: QtCore.Qt.ArrowCursor,
cursors.SELECT_REGION: QtCore.Qt.CrossCursor,
cursors.WAIT: QtCore.Qt.WaitCursor,
}
# make place holder
qApp = None
def _create_qApp():
"""
Only one qApp can exist at a time, so check before creating one.
"""
global qApp
if qApp is None:
app = QtWidgets.QApplication.instance()
if app is None:
# check for DISPLAY env variable on X11 build of Qt
if is_pyqt5():
try:
from PyQt5 import QtX11Extras
is_x11_build = True
except ImportError:
is_x11_build = False
else:
is_x11_build = hasattr(QtGui, "QX11Info")
if is_x11_build:
display = os.environ.get('DISPLAY')
if display is None or not re.search(r':\d', display):
raise RuntimeError('Invalid DISPLAY variable')
qApp = QtWidgets.QApplication([b"matplotlib"])
qApp.lastWindowClosed.connect(qApp.quit)
else:
qApp = app
if is_pyqt5():
try:
qApp.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps)
qApp.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
except AttributeError:
pass
def _allow_super_init(__init__):
"""
Decorator for ``__init__`` to allow ``super().__init__`` on PyQt4/PySide2.
"""
if QT_API == "PyQt5":
return __init__
else:
# To work around lack of cooperative inheritance in PyQt4, PySide,
# and PySide2, when calling FigureCanvasQT.__init__, we temporarily
# patch QWidget.__init__ by a cooperative version, that first calls
# QWidget.__init__ with no additional arguments, and then finds the
# next class in the MRO with an __init__ that does support cooperative
# inheritance (i.e., not defined by the PyQt4, PySide, PySide2, sip
# or Shiboken packages), and manually call its `__init__`, once again
# passing the additional arguments.
qwidget_init = QtWidgets.QWidget.__init__
def cooperative_qwidget_init(self, *args, **kwargs):
qwidget_init(self)
mro = type(self).__mro__
next_coop_init = next(
cls for cls in mro[mro.index(QtWidgets.QWidget) + 1:]
if cls.__module__.split(".")[0] not in [
"PyQt4", "sip", "PySide", "PySide2", "Shiboken"])
next_coop_init.__init__(self, *args, **kwargs)
@functools.wraps(__init__)
def wrapper(self, **kwargs):
try:
QtWidgets.QWidget.__init__ = cooperative_qwidget_init
__init__(self, **kwargs)
finally:
# Restore __init__
QtWidgets.QWidget.__init__ = qwidget_init
return wrapper
class TimerQT(TimerBase):
'''
Subclass of :class:`backend_bases.TimerBase` that uses Qt timer events.
Attributes
----------
interval : int
The time between timer events in milliseconds. Default is 1000 ms.
single_shot : bool
Boolean flag indicating whether this timer should
operate as single shot (run once and then stop). Defaults to False.
callbacks : list
Stores list of (func, args) tuples that will be called upon timer
events. This list can be manipulated directly, or the functions
`add_callback` and `remove_callback` can be used.
'''
def __init__(self, *args, **kwargs):
TimerBase.__init__(self, *args, **kwargs)
# Create a new timer and connect the timeout() signal to the
# _on_timer method.
self._timer = QtCore.QTimer()
self._timer.timeout.connect(self._on_timer)
self._timer_set_interval()
def _timer_set_single_shot(self):
self._timer.setSingleShot(self._single)
def _timer_set_interval(self):
self._timer.setInterval(self._interval)
def _timer_start(self):
self._timer.start()
def _timer_stop(self):
self._timer.stop()
class FigureCanvasQT(QtWidgets.QWidget, FigureCanvasBase):
# map Qt button codes to MouseEvent's ones:
buttond = {QtCore.Qt.LeftButton: 1,
QtCore.Qt.MidButton: 2,
QtCore.Qt.RightButton: 3,
# QtCore.Qt.XButton1: None,
# QtCore.Qt.XButton2: None,
}
@_allow_super_init
def __init__(self, figure):
_create_qApp()
super(FigureCanvasQT, self).__init__(figure=figure)
self.figure = figure
# We don't want to scale up the figure DPI more than once.
# Note, we don't handle a signal for changing DPI yet.
figure._original_dpi = figure.dpi
self._update_figure_dpi()
# In cases with mixed resolution displays, we need to be careful if the
# dpi_ratio changes - in this case we need to resize the canvas
# accordingly. We could watch for screenChanged events from Qt, but
# the issue is that we can't guarantee this will be emitted *before*
# the first paintEvent for the canvas, so instead we keep track of the
# dpi_ratio value here and in paintEvent we resize the canvas if
# needed.
self._dpi_ratio_prev = None
self._draw_pending = False
self._is_drawing = False
self._draw_rect_callback = lambda painter: None
self.setAttribute(QtCore.Qt.WA_OpaquePaintEvent)
self.setMouseTracking(True)
self.resize(*self.get_width_height())
# Key auto-repeat enabled by default
self._keyautorepeat = True
palette = QtGui.QPalette(QtCore.Qt.white)
self.setPalette(palette)
def _update_figure_dpi(self):
dpi = self._dpi_ratio * self.figure._original_dpi
self.figure._set_dpi(dpi, forward=False)
@property
def _dpi_ratio(self):
# Not available on Qt4 or some older Qt5.
try:
# self.devicePixelRatio() returns 0 in rare cases
return self.devicePixelRatio() or 1
except AttributeError:
return 1
def _update_dpi(self):
# As described in __init__ above, we need to be careful in cases with
# mixed resolution displays if dpi_ratio is changing between painting
# events.
# Return whether we triggered a resizeEvent (and thus a paintEvent)
# from within this function.
if self._dpi_ratio != self._dpi_ratio_prev:
# We need to update the figure DPI.
self._update_figure_dpi()
self._dpi_ratio_prev = self._dpi_ratio
# The easiest way to resize the canvas is to emit a resizeEvent
# since we implement all the logic for resizing the canvas for
# that event.
event = QtGui.QResizeEvent(self.size(), self.size())
self.resizeEvent(event)
# resizeEvent triggers a paintEvent itself, so we exit this one
# (after making sure that the event is immediately handled).
return True
return False
def get_width_height(self):
w, h = FigureCanvasBase.get_width_height(self)
return int(w / self._dpi_ratio), int(h / self._dpi_ratio)
def enterEvent(self, event):
FigureCanvasBase.enter_notify_event(self, guiEvent=event)
def leaveEvent(self, event):
QtWidgets.QApplication.restoreOverrideCursor()
FigureCanvasBase.leave_notify_event(self, guiEvent=event)
def mouseEventCoords(self, pos):
"""Calculate mouse coordinates in physical pixels
Qt5 use logical pixels, but the figure is scaled to physical
pixels for rendering. Transform to physical pixels so that
all of the down-stream transforms work as expected.
Also, the origin is different and needs to be corrected.
"""
dpi_ratio = self._dpi_ratio
x = pos.x()
# flip y so y=0 is bottom of canvas
y = self.figure.bbox.height / dpi_ratio - pos.y()
return x * dpi_ratio, y * dpi_ratio
def mousePressEvent(self, event):
x, y = self.mouseEventCoords(event.pos())
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasBase.button_press_event(self, x, y, button,
guiEvent=event)
def mouseDoubleClickEvent(self, event):
x, y = self.mouseEventCoords(event.pos())
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasBase.button_press_event(self, x, y,
button, dblclick=True,
guiEvent=event)
def mouseMoveEvent(self, event):
x, y = self.mouseEventCoords(event)
FigureCanvasBase.motion_notify_event(self, x, y, guiEvent=event)
def mouseReleaseEvent(self, event):
x, y = self.mouseEventCoords(event)
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasBase.button_release_event(self, x, y, button,
guiEvent=event)
if is_pyqt5():
def wheelEvent(self, event):
x, y = self.mouseEventCoords(event)
# from QWheelEvent::delta doc
if event.pixelDelta().x() == 0 and event.pixelDelta().y() == 0:
steps = event.angleDelta().y() / 120
else:
steps = event.pixelDelta().y()
if steps:
FigureCanvasBase.scroll_event(
self, x, y, steps, guiEvent=event)
else:
def wheelEvent(self, event):
x = event.x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.y()
# from QWheelEvent::delta doc
steps = event.delta() / 120
if event.orientation() == QtCore.Qt.Vertical:
FigureCanvasBase.scroll_event(
self, x, y, steps, guiEvent=event)
def keyPressEvent(self, event):
key = self._get_key(event)
if key is not None:
FigureCanvasBase.key_press_event(self, key, guiEvent=event)
def keyReleaseEvent(self, event):
key = self._get_key(event)
if key is not None:
FigureCanvasBase.key_release_event(self, key, guiEvent=event)
@property
def keyAutoRepeat(self):
"""
If True, enable auto-repeat for key events.
"""
return self._keyautorepeat
@keyAutoRepeat.setter
def keyAutoRepeat(self, val):
self._keyautorepeat = bool(val)
def resizeEvent(self, event):
# _dpi_ratio_prev will be set the first time the canvas is painted, and
# the rendered buffer is useless before anyways.
if self._dpi_ratio_prev is None:
return
w = event.size().width() * self._dpi_ratio
h = event.size().height() * self._dpi_ratio
dpival = self.figure.dpi
winch = w / dpival
hinch = h / dpival
self.figure.set_size_inches(winch, hinch, forward=False)
# pass back into Qt to let it finish
QtWidgets.QWidget.resizeEvent(self, event)
# emit our resize events
FigureCanvasBase.resize_event(self)
def sizeHint(self):
w, h = self.get_width_height()
return QtCore.QSize(w, h)
def minumumSizeHint(self):
return QtCore.QSize(10, 10)
def _get_key(self, event):
if not self._keyautorepeat and event.isAutoRepeat():
return None
event_key = event.key()
event_mods = int(event.modifiers()) # actually a bitmask
# get names of the pressed modifier keys
# bit twiddling to pick out modifier keys from event_mods bitmask,
# if event_key is a MODIFIER, it should not be duplicated in mods
mods = [name for name, mod_key, qt_key in MODIFIER_KEYS
if event_key != qt_key and (event_mods & mod_key) == mod_key]
try:
# for certain keys (enter, left, backspace, etc) use a word for the
# key, rather than unicode
key = SPECIAL_KEYS[event_key]
except KeyError:
# unicode defines code points up to 0x0010ffff
# QT will use Key_Codes larger than that for keyboard keys that are
# are not unicode characters (like multimedia keys)
# skip these
# if you really want them, you should add them to SPECIAL_KEYS
MAX_UNICODE = 0x10ffff
if event_key > MAX_UNICODE:
return None
key = unichr(event_key)
# qt delivers capitalized letters. fix capitalization
# note that capslock is ignored
if 'shift' in mods:
mods.remove('shift')
else:
key = key.lower()
mods.reverse()
return '+'.join(mods + [key])
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of
:class:`backend_bases.Timer`. This is useful for getting
periodic events through the backend's native event
loop. Implemented only for backends with GUIs.
Other Parameters
----------------
interval : scalar
Timer interval in milliseconds
callbacks : list
Sequence of (func, args, kwargs) where ``func(*args, **kwargs)``
will be executed by the timer every *interval*.
"""
return TimerQT(*args, **kwargs)
def flush_events(self):
qApp.processEvents()
def start_event_loop(self, timeout=0):
if hasattr(self, "_event_loop") and self._event_loop.isRunning():
raise RuntimeError("Event loop already running")
self._event_loop = event_loop = QtCore.QEventLoop()
if timeout:
timer = QtCore.QTimer.singleShot(timeout * 1000, event_loop.quit)
event_loop.exec_()
def stop_event_loop(self, event=None):
if hasattr(self, "_event_loop"):
self._event_loop.quit()
def draw(self):
"""Render the figure, and queue a request for a Qt draw.
"""
# The renderer draw is done here; delaying causes problems with code
# that uses the result of the draw() to update plot elements.
if self._is_drawing:
return
self._is_drawing = True
try:
super(FigureCanvasQT, self).draw()
finally:
self._is_drawing = False
self.update()
def draw_idle(self):
"""Queue redraw of the Agg buffer and request Qt paintEvent.
"""
# The Agg draw needs to be handled by the same thread matplotlib
# modifies the scene graph from. Post Agg draw request to the
# current event loop in order to ensure thread affinity and to
# accumulate multiple draw requests from event handling.
# TODO: queued signal connection might be safer than singleShot
if not (self._draw_pending or self._is_drawing):
self._draw_pending = True
QtCore.QTimer.singleShot(0, self._draw_idle)
def _draw_idle(self):
if self.height() < 0 or self.width() < 0:
self._draw_pending = False
if not self._draw_pending:
return
try:
self.draw()
except Exception:
# Uncaught exceptions are fatal for PyQt5, so catch them instead.
traceback.print_exc()
finally:
self._draw_pending = False
def drawRectangle(self, rect):
# Draw the zoom rectangle to the QPainter. _draw_rect_callback needs
# to be called at the end of paintEvent.
if rect is not None:
def _draw_rect_callback(painter):
pen = QtGui.QPen(QtCore.Qt.black, 1 / self._dpi_ratio,
QtCore.Qt.DotLine)
painter.setPen(pen)
painter.drawRect(*(pt / self._dpi_ratio for pt in rect))
else:
def _draw_rect_callback(painter):
return
self._draw_rect_callback = _draw_rect_callback
self.update()
class MainWindow(QtWidgets.QMainWindow):
closing = QtCore.Signal()
def closeEvent(self, event):
self.closing.emit()
QtWidgets.QMainWindow.closeEvent(self, event)
class FigureManagerQT(FigureManagerBase):
"""
Attributes
----------
canvas : `FigureCanvas`
The FigureCanvas instance
num : int or str
The Figure number
toolbar : qt.QToolBar
The qt.QToolBar
window : qt.QMainWindow
The qt.QMainWindow
"""
def __init__(self, canvas, num):
FigureManagerBase.__init__(self, canvas, num)
self.canvas = canvas
self.window = MainWindow()
self.window.closing.connect(canvas.close_event)
self.window.closing.connect(self._widgetclosed)
self.window.setWindowTitle("Figure %d" % num)
image = os.path.join(matplotlib.rcParams['datapath'],
'images', 'matplotlib.svg')
self.window.setWindowIcon(QtGui.QIcon(image))
# Give the keyboard focus to the figure instead of the
# manager; StrongFocus accepts both tab and click to focus and
# will enable the canvas to process event w/o clicking.
# ClickFocus only takes the focus is the window has been
# clicked
# on. http://qt-project.org/doc/qt-4.8/qt.html#FocusPolicy-enum or
# http://doc.qt.digia.com/qt/qt.html#FocusPolicy-enum
self.canvas.setFocusPolicy(QtCore.Qt.StrongFocus)
self.canvas.setFocus()
self.window._destroying = False
self.toolmanager = self._get_toolmanager()
self.toolbar = self._get_toolbar(self.canvas, self.window)
self.statusbar = None
if self.toolmanager:
backend_tools.add_tools_to_manager(self.toolmanager)
if self.toolbar:
backend_tools.add_tools_to_container(self.toolbar)
self.statusbar = StatusbarQt(self.window, self.toolmanager)
if self.toolbar is not None:
self.window.addToolBar(self.toolbar)
if not self.toolmanager:
# add text label to status bar
statusbar_label = QtWidgets.QLabel()
self.window.statusBar().addWidget(statusbar_label)
self.toolbar.message.connect(statusbar_label.setText)
tbs_height = self.toolbar.sizeHint().height()
else:
tbs_height = 0
# resize the main window so it will display the canvas with the
# requested size:
cs = canvas.sizeHint()
sbs = self.window.statusBar().sizeHint()
self._status_and_tool_height = tbs_height + sbs.height()
height = cs.height() + self._status_and_tool_height
self.window.resize(cs.width(), height)
self.window.setCentralWidget(self.canvas)
if matplotlib.is_interactive():
self.window.show()
self.canvas.draw_idle()
def notify_axes_change(fig):
# This will be called whenever the current axes is changed
if self.toolbar is not None:
self.toolbar.update()
self.canvas.figure.add_axobserver(notify_axes_change)
self.window.raise_()
def full_screen_toggle(self):
if self.window.isFullScreen():
self.window.showNormal()
else:
self.window.showFullScreen()
def _widgetclosed(self):
if self.window._destroying:
return
self.window._destroying = True
try:
Gcf.destroy(self.num)
except AttributeError:
pass
# It seems that when the python session is killed,
# Gcf can get destroyed before the Gcf.destroy
# line is run, leading to a useless AttributeError.
def _get_toolbar(self, canvas, parent):
# must be inited after the window, drawingArea and figure
# attrs are set
if matplotlib.rcParams['toolbar'] == 'toolbar2':
toolbar = NavigationToolbar2QT(canvas, parent, False)
elif matplotlib.rcParams['toolbar'] == 'toolmanager':
toolbar = ToolbarQt(self.toolmanager, self.window)
else:
toolbar = None
return toolbar
def _get_toolmanager(self):
if matplotlib.rcParams['toolbar'] == 'toolmanager':
toolmanager = ToolManager(self.canvas.figure)
else:
toolmanager = None
return toolmanager
def resize(self, width, height):
'set the canvas size in pixels'
self.window.resize(width, height + self._status_and_tool_height)
def show(self):
self.window.show()
self.window.activateWindow()
self.window.raise_()
def destroy(self, *args):
# check for qApp first, as PySide deletes it in its atexit handler
if QtWidgets.QApplication.instance() is None:
return
if self.window._destroying:
return
self.window._destroying = True
if self.toolbar:
self.toolbar.destroy()
self.window.close()
def get_window_title(self):
return six.text_type(self.window.windowTitle())
def set_window_title(self, title):
self.window.setWindowTitle(title)
class NavigationToolbar2QT(NavigationToolbar2, QtWidgets.QToolBar):
message = QtCore.Signal(str)
def __init__(self, canvas, parent, coordinates=True):
""" coordinates: should we show the coordinates on the right? """
self.canvas = canvas
self.parent = parent
self.coordinates = coordinates
self._actions = {}
"""A mapping of toolitem method names to their QActions"""
QtWidgets.QToolBar.__init__(self, parent)
NavigationToolbar2.__init__(self, canvas)
def _icon(self, name):
if is_pyqt5():
name = name.replace('.png', '_large.png')
pm = QtGui.QPixmap(os.path.join(self.basedir, name))
if hasattr(pm, 'setDevicePixelRatio'):
pm.setDevicePixelRatio(self.canvas._dpi_ratio)
return QtGui.QIcon(pm)
def _init_toolbar(self):
self.basedir = os.path.join(matplotlib.rcParams['datapath'], 'images')
for text, tooltip_text, image_file, callback in self.toolitems:
if text is None:
self.addSeparator()
else:
a = self.addAction(self._icon(image_file + '.png'),
text, getattr(self, callback))
self._actions[callback] = a
if callback in ['zoom', 'pan']:
a.setCheckable(True)
if tooltip_text is not None:
a.setToolTip(tooltip_text)
if text == 'Subplots':
a = self.addAction(self._icon("qt4_editor_options.png"),
'Customize', self.edit_parameters)
a.setToolTip('Edit axis, curve and image parameters')
self.buttons = {}
# Add the x,y location widget at the right side of the toolbar
# The stretch factor is 1 which means any resizing of the toolbar
# will resize this label instead of the buttons.
if self.coordinates:
self.locLabel = QtWidgets.QLabel("", self)
self.locLabel.setAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self.locLabel.setSizePolicy(
QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Ignored))
labelAction = self.addWidget(self.locLabel)
labelAction.setVisible(True)
# reference holder for subplots_adjust window
self.adj_window = None
# Esthetic adjustments - we need to set these explicitly in PyQt5
# otherwise the layout looks different - but we don't want to set it if
# not using HiDPI icons otherwise they look worse than before.
if is_pyqt5():
self.setIconSize(QtCore.QSize(24, 24))
self.layout().setSpacing(12)
if is_pyqt5():
# For some reason, self.setMinimumHeight doesn't seem to carry over to
# the actual sizeHint, so override it instead in order to make the
# aesthetic adjustments noted above.
def sizeHint(self):
size = super(NavigationToolbar2QT, self).sizeHint()
size.setHeight(max(48, size.height()))
return size
def edit_parameters(self):
allaxes = self.canvas.figure.get_axes()
if not allaxes:
QtWidgets.QMessageBox.warning(
self.parent, "Error", "There are no axes to edit.")
return
elif len(allaxes) == 1:
axes, = allaxes
else:
titles = []
for axes in allaxes:
name = (axes.get_title() or
" - ".join(filter(None, [axes.get_xlabel(),
axes.get_ylabel()])) or
"<anonymous {} (id: {:#x})>".format(
type(axes).__name__, id(axes)))
titles.append(name)
item, ok = QtWidgets.QInputDialog.getItem(
self.parent, 'Customize', 'Select axes:', titles, 0, False)
if ok:
axes = allaxes[titles.index(six.text_type(item))]
else:
return
figureoptions.figure_edit(axes, self)
def _update_buttons_checked(self):
# sync button checkstates to match active mode
self._actions['pan'].setChecked(self._active == 'PAN')
self._actions['zoom'].setChecked(self._active == 'ZOOM')
def pan(self, *args):
super(NavigationToolbar2QT, self).pan(*args)
self._update_buttons_checked()
def zoom(self, *args):
super(NavigationToolbar2QT, self).zoom(*args)
self._update_buttons_checked()
def set_message(self, s):
self.message.emit(s)
if self.coordinates:
self.locLabel.setText(s)
def set_cursor(self, cursor):
self.canvas.setCursor(cursord[cursor])
def draw_rubberband(self, event, x0, y0, x1, y1):
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
rect = [int(val) for val in (x0, y0, x1 - x0, y1 - y0)]
self.canvas.drawRectangle(rect)
def remove_rubberband(self):
self.canvas.drawRectangle(None)
def configure_subplots(self):
image = os.path.join(matplotlib.rcParams['datapath'],
'images', 'matplotlib.png')
dia = SubplotToolQt(self.canvas.figure, self.parent)
dia.setWindowIcon(QtGui.QIcon(image))
dia.exec_()
def save_figure(self, *args):
filetypes = self.canvas.get_supported_filetypes_grouped()
sorted_filetypes = sorted(six.iteritems(filetypes))
default_filetype = self.canvas.get_default_filetype()
startpath = os.path.expanduser(
matplotlib.rcParams['savefig.directory'])
start = os.path.join(startpath, self.canvas.get_default_filename())
filters = []
selectedFilter = None
for name, exts in sorted_filetypes:
exts_list = " ".join(['*.%s' % ext for ext in exts])
filter = '%s (%s)' % (name, exts_list)
if default_filetype in exts:
selectedFilter = filter
filters.append(filter)
filters = ';;'.join(filters)
fname, filter = _getSaveFileName(self.parent,
"Choose a filename to save to",
start, filters, selectedFilter)
if fname:
# Save dir for next time, unless empty str (i.e., use cwd).
if startpath != "":
matplotlib.rcParams['savefig.directory'] = (
os.path.dirname(six.text_type(fname)))
try:
self.canvas.figure.savefig(six.text_type(fname))
except Exception as e:
QtWidgets.QMessageBox.critical(
self, "Error saving file", six.text_type(e),
QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.NoButton)
class SubplotToolQt(UiSubplotTool):
def __init__(self, targetfig, parent):
UiSubplotTool.__init__(self, None)
self._figure = targetfig
for lower, higher in [("bottom", "top"), ("left", "right")]:
self._widgets[lower].valueChanged.connect(
lambda val: self._widgets[higher].setMinimum(val + .001))
self._widgets[higher].valueChanged.connect(
lambda val: self._widgets[lower].setMaximum(val - .001))
self._attrs = ["top", "bottom", "left", "right", "hspace", "wspace"]
self._defaults = {attr: vars(self._figure.subplotpars)[attr]
for attr in self._attrs}
# Set values after setting the range callbacks, but before setting up
# the redraw callbacks.
self._reset()
for attr in self._attrs:
self._widgets[attr].valueChanged.connect(self._on_value_changed)
for action, method in [("Export values", self._export_values),
("Tight layout", self._tight_layout),
("Reset", self._reset),
("Close", self.close)]:
self._widgets[action].clicked.connect(method)
def _export_values(self):
# Explicitly round to 3 decimals (which is also the spinbox precision)
# to avoid numbers of the form 0.100...001.
dialog = QtWidgets.QDialog()
layout = QtWidgets.QVBoxLayout()
dialog.setLayout(layout)
text = QtWidgets.QPlainTextEdit()
text.setReadOnly(True)
layout.addWidget(text)
text.setPlainText(
",\n".join("{}={:.3}".format(attr, self._widgets[attr].value())
for attr in self._attrs))
# Adjust the height of the text widget to fit the whole text, plus
# some padding.
size = text.maximumSize()
size.setHeight(
QtGui.QFontMetrics(text.document().defaultFont())
.size(0, text.toPlainText()).height() + 20)
text.setMaximumSize(size)
dialog.exec_()
def _on_value_changed(self):
self._figure.subplots_adjust(**{attr: self._widgets[attr].value()
for attr in self._attrs})
self._figure.canvas.draw_idle()
def _tight_layout(self):
self._figure.tight_layout()
for attr in self._attrs:
widget = self._widgets[attr]
widget.blockSignals(True)
widget.setValue(vars(self._figure.subplotpars)[attr])
widget.blockSignals(False)
self._figure.canvas.draw_idle()
def _reset(self):
for attr, value in self._defaults.items():
self._widgets[attr].setValue(value)
class ToolbarQt(ToolContainerBase, QtWidgets.QToolBar):
def __init__(self, toolmanager, parent):
ToolContainerBase.__init__(self, toolmanager)
QtWidgets.QToolBar.__init__(self, parent)
self._toolitems = {}
self._groups = {}
self._last = None
@property
def _icon_extension(self):
if is_pyqt5():
return '_large.png'
return '.png'
def add_toolitem(
self, name, group, position, image_file, description, toggle):
button = QtWidgets.QToolButton(self)
button.setIcon(self._icon(image_file))
button.setText(name)
if description:
button.setToolTip(description)
def handler():
self.trigger_tool(name)
if toggle:
button.setCheckable(True)
button.toggled.connect(handler)
else:
button.clicked.connect(handler)
self._last = button
self._toolitems.setdefault(name, [])
self._add_to_group(group, name, button, position)
self._toolitems[name].append((button, handler))
def _add_to_group(self, group, name, button, position):
gr = self._groups.get(group, [])
if not gr:
sep = self.addSeparator()
gr.append(sep)
before = gr[position]
widget = self.insertWidget(before, button)
gr.insert(position, widget)
self._groups[group] = gr
def _icon(self, name):
pm = QtGui.QPixmap(name)
if hasattr(pm, 'setDevicePixelRatio'):
pm.setDevicePixelRatio(self.toolmanager.canvas._dpi_ratio)
return QtGui.QIcon(pm)
def toggle_toolitem(self, name, toggled):
if name not in self._toolitems:
return
for button, handler in self._toolitems[name]:
button.toggled.disconnect(handler)
button.setChecked(toggled)
button.toggled.connect(handler)
def remove_toolitem(self, name):
for button, handler in self._toolitems[name]:
button.setParent(None)
del self._toolitems[name]
class StatusbarQt(StatusbarBase, QtWidgets.QLabel):
def __init__(self, window, *args, **kwargs):
StatusbarBase.__init__(self, *args, **kwargs)
QtWidgets.QLabel.__init__(self)
window.statusBar().addWidget(self)
def set_message(self, s):
self.setText(s)
class ConfigureSubplotsQt(backend_tools.ConfigureSubplotsBase):
def trigger(self, *args):
image = os.path.join(matplotlib.rcParams['datapath'],
'images', 'matplotlib.png')
parent = self.canvas.manager.window
dia = SubplotToolQt(self.figure, parent)
dia.setWindowIcon(QtGui.QIcon(image))
dia.exec_()
class SaveFigureQt(backend_tools.SaveFigureBase):
def trigger(self, *args):
filetypes = self.canvas.get_supported_filetypes_grouped()
sorted_filetypes = sorted(six.iteritems(filetypes))
default_filetype = self.canvas.get_default_filetype()
startpath = os.path.expanduser(
matplotlib.rcParams['savefig.directory'])
start = os.path.join(startpath, self.canvas.get_default_filename())
filters = []
selectedFilter = None
for name, exts in sorted_filetypes:
exts_list = " ".join(['*.%s' % ext for ext in exts])
filter = '%s (%s)' % (name, exts_list)
if default_filetype in exts:
selectedFilter = filter
filters.append(filter)
filters = ';;'.join(filters)
parent = self.canvas.manager.window
fname, filter = _getSaveFileName(parent,
"Choose a filename to save to",
start, filters, selectedFilter)
if fname:
# Save dir for next time, unless empty str (i.e., use cwd).
if startpath != "":
matplotlib.rcParams['savefig.directory'] = (
os.path.dirname(six.text_type(fname)))
try:
self.canvas.figure.savefig(six.text_type(fname))
except Exception as e:
QtWidgets.QMessageBox.critical(
self, "Error saving file", six.text_type(e),
QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.NoButton)
class SetCursorQt(backend_tools.SetCursorBase):
def set_cursor(self, cursor):
self.canvas.setCursor(cursord[cursor])
class RubberbandQt(backend_tools.RubberbandBase):
def draw_rubberband(self, x0, y0, x1, y1):
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
rect = [int(val) for val in (x0, y0, x1 - x0, y1 - y0)]
self.canvas.drawRectangle(rect)
def remove_rubberband(self):
self.canvas.drawRectangle(None)
backend_tools.ToolSaveFigure = SaveFigureQt
backend_tools.ToolConfigureSubplots = ConfigureSubplotsQt
backend_tools.ToolSetCursor = SetCursorQt
backend_tools.ToolRubberband = RubberbandQt
def error_msg_qt(msg, parent=None):
if not isinstance(msg, six.string_types):
msg = ','.join(map(str, msg))
QtWidgets.QMessageBox.warning(None, "Matplotlib",
msg, QtGui.QMessageBox.Ok)
def exception_handler(type, value, tb):
"""Handle uncaught exceptions
It does not catch SystemExit
"""
msg = ''
# get the filename attribute if available (for IOError)
if hasattr(value, 'filename') and value.filename is not None:
msg = value.filename + ': '
if hasattr(value, 'strerror') and value.strerror is not None:
msg += value.strerror
else:
msg += six.text_type(value)
if len(msg):
error_msg_qt(msg)
@_Backend.export
class _BackendQT5(_Backend):
FigureCanvas = FigureCanvasQT
FigureManager = FigureManagerQT
@staticmethod
def trigger_manager_draw(manager):
manager.canvas.draw_idle()
@staticmethod
def mainloop():
# allow KeyboardInterrupt exceptions to close the plot window.
signal.signal(signal.SIGINT, signal.SIG_DFL)
qApp.exec_()
|
class ClientStorage:
def getAllClients(self):
pass
def postClient(self, name, surname, email):
pass
def delClient(self, id_client):
pass
def patchClient(self, id_client, new_name, new_surname, new_email):
pass
|
#!/usr/bin/env python3
# coding:utf-8
from zipfile import ZipFile
comments = []
filename = "90052"
channel = ZipFile("channel.zip", 'r')
while filename.isdigit():
filename += ".txt"
f = channel.open(filename, 'r')
line = f.readline()
f.close()
t = channel.getinfo(filename).comment
comments.append(str(t, encoding="utf-8")) # bytes -> str
filename = bytes.decode(line.split()[-1]) # bytes -> str
print(''.join(comments))
|
""" Nom recursive version of fibo. """
# pythran export fibo(int)
# runas fibo(7)
def fibo(n):
""" fibonaccie compuation. """
a, b = 1, 1
for _ in range(n):
a, b = a + b, a
return a
|
import json
import uuid
from enum import Enum
import web3
from eth_account.messages import defunct_hash_message
from web3 import Web3
from common.logger import get_logger
logger = get_logger(__name__)
class ContractType(Enum):
REGISTRY = "REGISTRY"
MPE = "MPE"
RFAI = "RFAI"
class BlockChainUtil(object):
def __init__(self, provider_type, provider):
if provider_type == "HTTP_PROVIDER":
self.provider = Web3.HTTPProvider(provider)
elif provider_type == "WS_PROVIDER":
self.provider = web3.providers.WebsocketProvider(provider)
else:
raise Exception("Only HTTP_PROVIDER and WS_PROVIDER provider type are supported.")
self.web3_object = Web3(self.provider)
def load_contract(self, path):
with open(path) as f:
contract = json.load(f)
return contract
def read_contract_address(self, net_id, path, key):
contract = self.load_contract(path)
return Web3.toChecksumAddress(contract[str(net_id)][key])
def contract_instance(self, contract_abi, address):
return self.web3_object.eth.contract(abi=contract_abi, address=address)
def get_contract_instance(self, base_path, contract_name, net_id):
contract_network_path, contract_abi_path = self.get_contract_file_paths(base_path, contract_name)
contract_address = self.read_contract_address(net_id=net_id, path=contract_network_path,
key='address')
contract_abi = self.load_contract(contract_abi_path)
logger.debug(f"contract address is {contract_address}")
contract_instance = self.contract_instance(contract_abi=contract_abi, address=contract_address)
return contract_instance
def generate_signature(self, data_types, values, signer_key):
signer_key = "0x" + signer_key if not signer_key.startswith("0x") else signer_key
message = web3.Web3.soliditySha3(data_types, values)
signature = self.web3_object.eth.account.signHash(defunct_hash_message(message), signer_key)
return signature.signature.hex()
def generate_signature_bytes(self, data_types, values, signer_key):
signer_key = "0x" + signer_key if not signer_key.startswith("0x") else signer_key
message = web3.Web3.soliditySha3(data_types, values)
signature = self.web3_object.eth.account.signHash(defunct_hash_message(message), signer_key)
return bytes(signature.signature)
def get_nonce(self, address):
""" transaction count includes pending transaction also. """
nonce = self.web3_object.eth.getTransactionCount(address)
return nonce
def sign_transaction_with_private_key(self, private_key, transaction_object):
return self.web3_object.eth.account.signTransaction(transaction_object, private_key).rawTransaction
def create_transaction_object(self, *positional_inputs, method_name, address, contract_path, contract_address_path,
net_id):
nonce = self.get_nonce(address=address)
self.contract = self.load_contract(path=contract_path)
self.contract_address = self.read_contract_address(net_id=net_id, path=contract_address_path, key='address')
self.contract_instance = self.contract_instance(contract_abi=self.contract, address=self.contract_address)
print("gas_price == ", self.web3_object.eth.gasPrice)
print("nonce == ", nonce)
gas_price = 3 * (self.web3_object.eth.gasPrice)
transaction_object = getattr(self.contract_instance.functions, method_name)(
*positional_inputs).buildTransaction({
"from": address,
"nonce": nonce,
"gasPrice": gas_price,
"chainId": net_id
})
return transaction_object
def process_raw_transaction(self, raw_transaction):
return self.web3_object.eth.sendRawTransaction(raw_transaction).hex()
def create_account(self):
account = self.web3_object.eth.account.create(uuid.uuid4().hex)
return account.address, account.privateKey.hex()
def get_current_block_no(self):
return self.web3_object.eth.blockNumber
def get_transaction_receipt_from_blockchain(self, transaction_hash):
return self.web3_object.eth.getTransactionReceipt(transaction_hash)
def get_contract_file_paths(self, base_path, contract_name):
if contract_name == ContractType.REGISTRY.value:
json_file = "Registry.json"
elif contract_name == ContractType.MPE.value:
json_file = "MultiPartyEscrow.json"
elif contract_name == ContractType.RFAI.value:
json_file = "ServiceRequest.json"
else:
raise Exception("Invalid contract Type {}".format(contract_name))
contract_network_path = base_path + "/{}/{}".format("networks", json_file)
contract_abi_path = base_path + "/{}/{}".format("abi", json_file)
return contract_network_path, contract_abi_path
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Nicira Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for configuring Quantum
"""
import os
from paste import deploy
from quantum.api.v2 import attributes
from quantum.common import utils
from quantum.openstack.common import cfg
from quantum.openstack.common import log as logging
from quantum.openstack.common import rpc
from quantum.version import version_info as quantum_version
LOG = logging.getLogger(__name__)
core_opts = [
cfg.StrOpt('bind_host', default='0.0.0.0',
help=_("The host IP to bind to")),
cfg.IntOpt('bind_port', default=9696,
help=_("The port to bind to")),
cfg.StrOpt('api_paste_config', default="api-paste.ini",
help=_("The API paste config file to use")),
cfg.StrOpt('api_extensions_path', default="",
help=_("The path for API extensions")),
cfg.StrOpt('policy_file', default="policy.json",
help=_("The policy file to use")),
cfg.StrOpt('auth_strategy', default='keystone',
help=_("The type of authentication to use")),
cfg.StrOpt('core_plugin',
help=_("The core plugin Quantum will use")),
cfg.ListOpt('service_plugins', default=[],
help=_("The service plugins Quantum will use")),
cfg.StrOpt('base_mac', default="fa:16:3e:00:00:00",
help=_("The base MAC address Quantum will use for VIFs")),
cfg.IntOpt('mac_generation_retries', default=16,
help=_("How many times Quantum will retry MAC generation")),
cfg.BoolOpt('allow_bulk', default=True,
help=_("Allow the usage of the bulk API")),
cfg.IntOpt('max_dns_nameservers', default=5,
help=_("Maximum number of DNS nameservers")),
cfg.IntOpt('max_subnet_host_routes', default=20,
help=_("Maximum number of host routes per subnet")),
cfg.IntOpt('dhcp_lease_duration', default=120,
help=_("DHCP lease duration")),
cfg.BoolOpt('allow_overlapping_ips', default=False,
help=_("Allow overlapping IP support in Quantum")),
cfg.StrOpt('host', default=utils.get_hostname(),
help=_("The hostname Quantum is running on")),
cfg.BoolOpt('force_gateway_on_subnet', default=False,
help=_("Ensure that configured gateway is on subnet")),
]
core_cli_opts = [
cfg.StrOpt('state_path', default='/var/lib/quantum'),
]
# Register the configuration options
cfg.CONF.register_opts(core_opts)
cfg.CONF.register_cli_opts(core_cli_opts)
# Ensure that the control exchange is set correctly
rpc.set_defaults(control_exchange='quantum')
def parse(args):
cfg.CONF(args=args, project='quantum',
version='%%prog %s' % quantum_version.version_string_with_vcs())
# Validate that the base_mac is of the correct format
msg = attributes._validate_regex(cfg.CONF.base_mac,
attributes.MAC_PATTERN)
if msg:
msg = _("Base MAC: %s") % msg
raise Exception(msg)
def setup_logging(conf):
"""
Sets up the logging options for a log with supplied name
:param conf: a cfg.ConfOpts object
"""
product_name = "quantum"
logging.setup(product_name)
log_root = logging.getLogger(product_name).logger
log_root.propagate = 0
LOG.info(_("Logging enabled!"))
def load_paste_app(app_name):
"""
Builds and returns a WSGI app from a paste config file.
:param app_name: Name of the application to load
:raises RuntimeError when config file cannot be located or application
cannot be loaded from config file
"""
config_path = os.path.abspath(cfg.CONF.find_file(
cfg.CONF.api_paste_config))
LOG.info(_("Config paste file: %s"), config_path)
try:
app = deploy.loadapp("config:%s" % config_path, name=app_name)
except (LookupError, ImportError):
msg = _("Unable to load %(app_name)s from "
"configuration file %(config_path)s.") % locals()
LOG.exception(msg)
raise RuntimeError(msg)
return app
|
import RPi.GPIO as G
import time as t
G.setmode(G.BCM)
G.setup(19, G.OUT)
G.setup(26, G.IN)# pull_up_down=G.PUD_UP)
G.setup(21, G.OUT)
G.setup(20, G.IN, pull_up_down=G.PUD_UP)
print("setup done")
G.output(21, True)
print("output on")
while True:
input_sensor = G.input(26)
if input_sensor == False:
print("sensor triggered")
G.output(19, True)
t.sleep(.5)
G.output(19, False)
input_taster = G.input(20)
if input_taster == False:
print("break")
break
G.cleanup()
|
# coding: utf-8
from __future__ import absolute_import
import six
from postfinancecheckout.api_client import ApiClient
class BankAccountServiceApi:
def __init__(self, configuration):
self.api_client = ApiClient(configuration=configuration)
def count(self, space_id, **kwargs):
"""Count
Counts the number of items in the database as restricted by the given filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count(space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQueryFilter filter: The filter which restricts the entities which are used to calculate the count.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.count_with_http_info(space_id, **kwargs)
else:
(data) = self.count_with_http_info(space_id, **kwargs)
return data
def count_with_http_info(self, space_id, **kwargs):
"""Count
Counts the number of items in the database as restricted by the given filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count_with_http_info(space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQueryFilter filter: The filter which restricts the entities which are used to calculate the count.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'filter']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method count" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `count`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'filter' in params:
body_params = params['filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/count', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read(self, space_id, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read(space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int id: The ID of the bank account which should be returned. (required)
:return: BankAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_with_http_info(space_id, id, **kwargs)
else:
(data) = self.read_with_http_info(space_id, id, **kwargs)
return data
def read_with_http_info(self, space_id, id, **kwargs):
"""Read
Reads the entity with the given 'id' and returns it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_with_http_info(space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param int id: The ID of the bank account which should be returned. (required)
:return: BankAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `read`")
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `read`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
if 'id' in params:
query_params.append(('id', params['id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/read', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BankAccount',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search(self, space_id, query, **kwargs):
"""Search
Searches for the entities as specified by the given query.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search(space_id, query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQuery query: The query restricts the bank accounts which are returned by the search. (required)
:return: list[BankAccount]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_with_http_info(space_id, query, **kwargs)
else:
(data) = self.search_with_http_info(space_id, query, **kwargs)
return data
def search_with_http_info(self, space_id, query, **kwargs):
"""Search
Searches for the entities as specified by the given query.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_with_http_info(space_id, query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int space_id: (required)
:param EntityQuery query: The query restricts the bank accounts which are returned by the search. (required)
:return: list[BankAccount]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['space_id', 'query']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'space_id' is set
if ('space_id' not in params or
params['space_id'] is None):
raise ValueError("Missing the required parameter `space_id` when calling `search`")
# verify the required parameter 'query' is set
if ('query' not in params or
params['query'] is None):
raise ValueError("Missing the required parameter `query` when calling `search`")
collection_formats = {}
path_params = {}
query_params = []
if 'space_id' in params:
query_params.append(('spaceId', params['space_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'query' in params:
body_params = params['query']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=utf-8'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json;charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/bank-account/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[BankAccount]',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
import unittest
"""
Leetcode(https://leetcode.com/problems/container-with-most-water/solution/)
"""
def maxArea(height):
ans = 0
left, right = 0, len(height) - 1
while left < right:
ans = max(ans, min(height[left], height[right]) * (right - left))
if height[left] < height[right]:
left += 1
else:
right -= 1
return ans
class MyTestCase(unittest.TestCase):
def test1(self):
height = [4, 3, 2, 1, 4]
self.assertEqual(maxArea(height), 16)
def test2(self):
height = [1, 1]
self.assertEqual(maxArea(height), 1)
def test3(self):
height = [1, 2, 1]
self.assertEqual(maxArea(height), 2)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2016-2018 Dirk Thomas
# Licensed under the Apache License, Version 2.0
import logging
from pathlib import Path
import sys
from flake8 import LOG
from flake8.api.legacy import get_style_guide
# avoid debug and info messages from flake8 internals
LOG.setLevel(logging.WARN)
def test_flake8():
style_guide = get_style_guide(
ignore=['D100', 'D104'],
show_source=True,
)
style_guide_tests = get_style_guide(
ignore=['D100', 'D101', 'D102', 'D103', 'D104', 'D105', 'D107'],
show_source=True,
)
stdout = sys.stdout
sys.stdout = sys.stderr
# implicitly calls report_errors()
report = style_guide.check_files([
str(Path(__file__).parents[1] / 'colcon_powershell'),
])
report_tests = style_guide_tests.check_files([
str(Path(__file__).parents[1] / 'test'),
])
sys.stdout = stdout
total_errors = report.total_errors + report_tests.total_errors
if total_errors: # pragma: no cover
# output summary with per-category counts
print()
report._application.formatter.show_statistics(report._stats)
print(
'flake8 reported {total_errors} errors'
.format_map(locals()), file=sys.stderr)
assert not report.total_errors, \
'flake8 reported {total_errors} errors'.format_map(locals())
|
from sim.signal import Signal, SIG_UNDEF
from sim.sequencer import DEFAULT_SEQUENCER as SEQ
from sim.tests import okeq, okin, setsig, fails
from sim.device.arith import CounterOnebit
counter = CounterOnebit(
'c1b',
t_toggle_0_to_1=3.,
t_toggle_1_to_0=3.,
t_out_2_carry=1.,
t_clear_2_carry=2.,
t_clear_onoff=4.,
t_eor_onoff=2.)
din = Signal('d_in')
clr = Signal('clr')
ore = Signal('ore')
counter.connect('input', din)
counter.connect('clear', clr)
counter.connect('enable_or', ore)
din.trace()
clr.trace()
counter.output.trace()
counter.x_carry_out.trace()
COUNTER_CARRYOUTS_COUNT = 0
def carry_out_callback(time, signal):
global COUNTER_CARRYOUTS_COUNT
COUNTER_CARRYOUTS_COUNT += 1
counter.x_carry_out.add_connection(carry_out_callback)
SEQ.addall([
setsig(100.0, din, 1),
setsig(110.0, din, 1),
setsig(120.0, din, 1),
setsig(150.0, clr, 1),
setsig(160.0, clr, 0),
setsig(180.0, clr, 1),
setsig(181.0, ore, 1),
setsig(190.0, clr, 0),
setsig(200.0, din, 1),
setsig(210.0, din, 1),
setsig(220.0, din, 1),
setsig(223.0, clr, 0),
])
okeq(counter.output.state, 0)
okeq(COUNTER_CARRYOUTS_COUNT, 0)
SEQ.run(101.)
okeq(counter.output.state, SIG_UNDEF)
okeq(COUNTER_CARRYOUTS_COUNT, 0)
SEQ.run(109.)
okeq(counter.output.state, 1)
okeq(COUNTER_CARRYOUTS_COUNT, 0)
SEQ.run(111.)
okeq(counter.output.state, SIG_UNDEF)
okeq(COUNTER_CARRYOUTS_COUNT, 0)
SEQ.run(119.)
okeq(counter.output.state, 0)
okeq(COUNTER_CARRYOUTS_COUNT, 1)
SEQ.run()
# Check we get an error when clear active period is too short.
SEQ.addall([
setsig(250.0, clr, 1),
setsig(251.0, clr, 0)
])
with fails(ValueError):
SEQ.run()
|
from primitive_object import PrimitiveObject
from null import Null
from rpython.rlib.debug import make_sure_not_resized
class ComplexObject(PrimitiveObject):
__slots__ = ("fields", "size")
_immutable_fields_ = ("fields", "size")
def __init__(self, initial_size):
self.size = initial_size
self.fields = [Null()] * initial_size
make_sure_not_resized(self.fields)
def get_value_at(self, index):
assert index >= 0 and index < self.size
return self.fields[index]
def set_value_at(self, index, value):
assert isinstance(value, PrimitiveObject)
assert index >= 0 and index < self.size
self.fields[index] = value
def get_string(self):
result = "{" + ",".join([item.get_string() if item is not None else "None"
for item in self.fields]) + "}"
return result
def pprint(self):
print self.get_string()
def eq(self, rhs):
assert isinstance(rhs, PrimitiveObject)
if isinstance(rhs, Null):
return False
else:
assert isinstance(rhs, ComplexObject)
result = self.fields == rhs.fields
return result
def neq(self, rhs):
assert isinstance(rhs, PrimitiveObject)
if isinstance(rhs, Null):
return True
else:
assert isinstance(rhs, ComplexObject)
result = self.fields != rhs.fields
return result
|
#!/usr/bin/env python
from __future__ import unicode_literals
import configargparse
import sys
from config.config import statusCode,benchmark_types, language_supported, file_location
import config.bleu_results as bleu_results
import tools.sp_enc_dec as sp
import ancillary_functions_anuvaad.ancillary_functions as ancillary_functions
import ancillary_functions_anuvaad.sc_preface_handler as sc_preface_handler
import ancillary_functions_anuvaad.handle_date_url as date_url_util
from flask import Flask, jsonify, request,send_file,abort,send_from_directory
from flask_cors import CORS
from onmt.translate import TranslationServer, ServerModelError
from itertools import repeat
from onmt.utils.logging import init_logger,logger,entry_exit_log,LOG_TAGS
from onmt.utils.misc import split_corpus
from onmt.translate.translator import build_translator
import os
import onmt.opts as opts
from onmt.utils.parse import ArgumentParser
from config.mongo_model import db,Benchmarks
import datetime
from kafka_utils.document_translator import doc_translator
import threading
import translation_util.translate_util as translate_util
import translation_util.interactive_translate as interactive_translation
from config.kafka_topics import consumer_topics,producer_topics,kafka_topic
STATUS_OK = "ok"
STATUS_ERROR = "error"
mongo_config_dir = "config/mongo_config.py"
IS_RUN_KAFKA = 'IS_RUN_KAFKA'
IS_RUN_KAFKA_DEFAULT_VALUE = False
bootstrap_server_boolean = os.environ.get(IS_RUN_KAFKA, IS_RUN_KAFKA_DEFAULT_VALUE)
def start(config_file,
url_root="/translator",
host="0.0.0.0",
port=3003,
debug=True):
def prefix_route(route_function, prefix='', mask='{0}{1}'):
def newroute(route, *args, **kwargs):
return route_function(mask.format(prefix, route), *args, **kwargs)
return newroute
app = Flask(__name__)
CORS(app)
app.config.from_pyfile(mongo_config_dir)
db.init_app(app)
app.route = prefix_route(app.route, url_root)
translation_server = TranslationServer()
translation_server.start(config_file)
def kafka_function():
logger.info('starting kafka from nmt-server on thread-1')
doc_translator(translation_server,[kafka_topic[0]['consumer'],kafka_topic[1]['consumer'],kafka_topic[2]['consumer']])
if bootstrap_server_boolean:
t1 = threading.Thread(target=kafka_function)
# t1.start()
@app.route('/models', methods=['GET'])
def get_models():
out = {}
try:
out['status'] = statusCode["SUCCESS"]
out['response_body'] = translation_server.list_models()
except:
out['status'] = statusCode["SYSTEM_ERR"]
logger.info("Unexpected error: %s"% sys.exc_info()[0])
return jsonify(out)
@app.route('/clone_model/<int:model_id>', methods=['POST'])
def clone_model(model_id):
out = {}
data = request.get_json(force=True)
timeout = -1
if 'timeout' in data:
timeout = data['timeout']
del data['timeout']
opt = data.get('opt', None)
try:
model_id, load_time = translation_server.clone_model(
model_id, opt, timeout)
except ServerModelError as e:
out['status'] = STATUS_ERROR
out['error'] = str(e)
else:
out['status'] = STATUS_OK
out['model_id'] = model_id
out['load_time'] = load_time
return jsonify(out)
@app.route('/unload_model/<int:model_id>', methods=['GET'])
def unload_model(model_id):
out = {"model_id": model_id}
try:
translation_server.unload_model(model_id)
out['status'] = STATUS_OK
except Exception as e:
out['status'] = STATUS_ERROR
out['error'] = str(e)
return jsonify(out)
@app.route('/translate-anuvaad', methods=['POST'])
def translate():
inputs = request.get_json(force=True)
if len(inputs)>0:
logger.info("Making translate-anuvaad API call")
logger.info(entry_exit_log(LOG_TAGS["input"],inputs))
out = translate_util.translate_func(inputs, translation_server)
logger.info("out from translate_func-trans_util done{}".format(out))
logger.info(entry_exit_log(LOG_TAGS["output"],out))
return jsonify(out)
else:
logger.info("null inputs in request in translate-anuvaad API")
return jsonify({'status':statusCode["INVALID_API_REQUEST"]})
@app.route('/to_cpu/<int:model_id>', methods=['GET'])
def to_cpu(model_id):
out = {'model_id': model_id}
translation_server.models[model_id].to_cpu()
out['status'] = STATUS_OK
return jsonify(out)
@app.route('/to_gpu/<int:model_id>', methods=['GET'])
def to_gpu(model_id):
out = {'model_id': model_id}
translation_server.models[model_id].to_gpu()
out['status'] = STATUS_OK
return jsonify(out)
app.run(debug=debug, host=host, port=port, use_reloader=False,
threaded=True)
def _get_parser():
parser = configargparse.ArgumentParser(
config_file_parser_class=configargparse.YAMLConfigFileParser,
description="OpenNMT-py REST Server")
parser.add_argument("--ip", type=str, default="0.0.0.0")
parser.add_argument("--port", type=int, default="3003")
parser.add_argument("--url_root", type=str, default="/translator")
parser.add_argument("--debug", "-d", action="store_true")
parser.add_argument("--config", "-c", type=str,
default="./available_models/conf.json")
return parser
if __name__ == '__main__':
parser = _get_parser()
args = parser.parse_args()
start(args.config, url_root=args.url_root, host=args.ip, port=args.port,
debug=args.debug)
|
class CellState:
# Don't show these attributes in gui (not used any more?)
excludeAttr = ['divideFlag']
excludeAttr = ['deathFlag']
def __init__(self, cid):
self.id = cid
self.growthRate = 1.0
self.color = [0.5,0.5,0.5]
self.divideFlag = False
self.deathFlag = False
|
import sys
import argparse
import pandas as pd
from PySide2.QtCore import QDateTime, QTimeZone
from PySide2.QtWidgets import QApplication
from lesson_08_main_window import MainWindow
from lesson_08_mainWidget import Widget
def transform_date(utc, timezone=None):
utc_fmt = "yyyy-MM-ddTHH:mm:ss.zzzZ"
new_date = QDateTime().fromString(utc, utc_fmt)
if timezone:
new_date.setTimeZone(timezone)
return new_date
def read_data(fname):
# Read the CSV content
df = pd.read_csv(fname)
# Remove wrong magnitudes
df = df.drop(df[df.mag < 0].index)
magnitudes = df["mag"]
# My local timezone
timezone = QTimeZone(b"Aisa/ShangHai")
# Get timestamp transformed to our timezone
times = df["time"].apply(lambda x: transform_date(x, timezone))
return times, magnitudes
if __name__ == "__main__":
options = argparse.ArgumentParser()
options.add_argument("-f", "--file", type=str, required=True)
args = options.parse_args()
data = read_data(args.file)
# Qt Application
app = QApplication(sys.argv)
widget = Widget(data)
window = MainWindow(widget)
window.show()
sys.exit(app.exec_())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from ... import opcodes as OperandDef
from ..datasource import tensor as astensor
from .core import TensorComplexFFTNMixin, validate_fftn, TensorStandardFFTN
class TensorIFFTN(TensorStandardFFTN, TensorComplexFFTNMixin):
_op_type_ = OperandDef.IFFTN
def __init__(self, shape=None, axes=None, norm=None, **kw):
super().__init__(_shape=shape, _axes=axes, _norm=norm, **kw)
def ifftn(a, s=None, axes=None, norm=None):
"""
Compute the N-dimensional inverse discrete Fourier Transform.
This function computes the inverse of the N-dimensional discrete
Fourier Transform over any number of axes in an M-dimensional tensor by
means of the Fast Fourier Transform (FFT). In other words,
``ifftn(fftn(a)) == a`` to within numerical accuracy.
For a description of the definitions and conventions used, see `mt.fft`.
The input, analogously to `ifft`, should be ordered in the same way as is
returned by `fftn`, i.e. it should have the term for zero frequency
in all axes in the low-order corner, the positive frequency terms in the
first half of all axes, the term for the Nyquist frequency in the middle
of all axes and the negative frequency terms in the second half of all
axes, in order of decreasingly negative frequency.
Parameters
----------
a : array_like
Input tensor, can be complex.
s : sequence of ints, optional
Shape (length of each transformed axis) of the output
(``s[0]`` refers to axis 0, ``s[1]`` to axis 1, etc.).
This corresponds to ``n`` for ``ifft(x, n)``.
Along any axis, if the given shape is smaller than that of the input,
the input is cropped. If it is larger, the input is padded with zeros.
if `s` is not given, the shape of the input along the axes specified
by `axes` is used. See notes for issue on `ifft` zero padding.
axes : sequence of ints, optional
Axes over which to compute the IFFT. If not given, the last ``len(s)``
axes are used, or all axes if `s` is also not specified.
Repeated indices in `axes` means that the inverse transform over that
axis is performed multiple times.
norm : {None, "ortho"}, optional
Normalization mode (see `mt.fft`). Default is None.
Returns
-------
out : complex Tensor
The truncated or zero-padded input, transformed along the axes
indicated by `axes`, or by a combination of `s` or `a`,
as explained in the parameters section above.
Raises
------
ValueError
If `s` and `axes` have different length.
IndexError
If an element of `axes` is larger than than the number of axes of `a`.
See Also
--------
mt.fft : Overall view of discrete Fourier transforms, with definitions
and conventions used.
fftn : The forward *n*-dimensional FFT, of which `ifftn` is the inverse.
ifft : The one-dimensional inverse FFT.
ifft2 : The two-dimensional inverse FFT.
ifftshift : Undoes `fftshift`, shifts zero-frequency terms to beginning
of tensor.
Notes
-----
See `mt.fft` for definitions and conventions used.
Zero-padding, analogously with `ifft`, is performed by appending zeros to
the input along the specified dimension. Although this is the common
approach, it might lead to surprising results. If another form of zero
padding is desired, it must be performed before `ifftn` is called.
Examples
--------
>>> import mars.tensor as mt
>>> a = mt.eye(4)
>>> mt.fft.ifftn(mt.fft.fftn(a, axes=(0,)), axes=(1,)).execute()
array([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j]])
Create and plot an image with band-limited frequency content:
>>> import matplotlib.pyplot as plt
>>> n = mt.zeros((200,200), dtype=complex)
>>> n[60:80, 20:40] = mt.exp(1j*mt.random.uniform(0, 2*mt.pi, (20, 20)))
>>> im = mt.fft.ifftn(n).real
>>> plt.imshow(im.execute())
<matplotlib.image.AxesImage object at 0x...>
>>> plt.show()
"""
a = astensor(a)
axes = validate_fftn(a, s=s, axes=axes, norm=norm)
op = TensorIFFTN(shape=s, axes=axes, norm=norm, dtype=np.dtype(np.complex_))
return op(a)
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
'''dataloader'''
import os
import glob
import numpy as np
import PIL.Image as Image
import mindspore.dataset as ds
import mindspore.dataset.vision.c_transforms as CV
class DataLoader_Imagenet_val:
'''DataLoader_Imagenet_val'''
def __init__(self, data_dir, patch=256, noise_style="gauss25", batch_size=4):
super(DataLoader_Imagenet_val, self).__init__()
self.data_dir = data_dir
self.patch = patch
self.train_fns = glob.glob(os.path.join(self.data_dir, "*"))
self.train_fns.sort()
print('fetch {} samples for training'.format(len(self.train_fns)))
self.noise_generator = AugmentNoise(noise_style)
self.batch_size = batch_size
self.test = 1
def __getitem__(self, index):
# fetch image
fn = self.train_fns[index]
im = Image.open(fn)
im = np.array(im, dtype=np.float32)
# random crop
H = im.shape[0]
W = im.shape[1]
if H - self.patch > 0:
xx = np.random.randint(0, H - self.patch)
im = im[xx:xx + self.patch, :, :]
if W - self.patch > 0:
yy = np.random.randint(0, W - self.patch)
im = im[:, yy:yy + self.patch, :]
im /= 255.0 #clean image
noisy = self.noise_generator.add_noise(im)
return im, noisy
def __len__(self):
return len(self.train_fns)
class AugmentNoise():
'''AugmentNoise'''
def __init__(self, style):
if style.startswith('gauss'):
self.params = [
float(p) / 255.0 for p in style.replace('gauss', '').split('_')
]
if len(self.params) == 1:
self.style = "gauss_fix"
elif len(self.params) == 2:
self.style = "gauss_range"
elif style.startswith('poisson'):
self.params = [
float(p) for p in style.replace('poisson', '').split('_')
]
if len(self.params) == 1:
self.style = "poisson_fix"
elif len(self.params) == 2:
self.style = "poisson_range"
def add_noise(self, x):
'''add_noise'''
shape = x.shape
if self.style == "gauss_fix":
std = self.params[0]
return np.array(x + np.random.normal(size=shape) * std,
dtype=np.float32)
if self.style == "gauss_range":
min_std, max_std = self.params
std = np.random.uniform(low=min_std, high=max_std, size=(1, 1, 1))
return np.array(x + np.random.normal(size=shape) * std,
dtype=np.float32)
if self.style == "poisson_fix":
lam = self.params[0]
return np.array(np.random.poisson(lam * x) / lam, dtype=np.float32)
assert self.style == "poisson_range"
min_lam, max_lam = self.params
lam = np.random.uniform(low=min_lam, high=max_lam, size=(1, 1, 1))
return np.array(np.random.poisson(lam * x) / lam, dtype=np.float32)
def create_Dataset(data_dir, patch, noise_style, batch_size, device_num, rank, shuffle):
dataset = DataLoader_Imagenet_val(data_dir, patch, noise_style, batch_size)
hwc_to_chw = CV.HWC2CHW()
data_set = ds.GeneratorDataset(dataset, column_names=["image", "noisy"], \
num_parallel_workers=8, shuffle=shuffle, num_shards=device_num, shard_id=rank)
data_set = data_set.map(input_columns=["image"], operations=hwc_to_chw, num_parallel_workers=8)
data_set = data_set.map(input_columns=["noisy"], operations=hwc_to_chw, num_parallel_workers=8)
data_set = data_set.batch(batch_size, drop_remainder=True)
return data_set, data_set.get_dataset_size()
|
# coding=utf-8
from app.api.base import base_name as names
from app.api.src.sales import *
from app.api.base.base_router import BaseRouter
class Sales(BaseRouter):
def __init__(self):
super().__init__()
self.args = [names.LOGIN, names.PASSWORD]
def get(self, id_user):
args = {
names.ID_USER: id_user
}
return get_sales_user(args) or {}
|
# -*- coding: utf-8 -*-
'''
Using states instead of maps to deploy clouds
=============================================
.. versionadded:: 2014.1.0 (Hydrogen)
Use this minion to spin up a cloud instance:
.. code-block:: yaml
my-ec2-instance:
cloud.profile:
my-ec2-config
'''
import pprint
from salt._compat import string_types
import salt.utils.cloud as suc
def __virtual__():
'''
Only load if the cloud module is available in __salt__
'''
return 'cloud.profile' in __salt__
def _check_name(name):
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if suc.check_name(name, 'a-zA-Z0-9._-'):
ret['comment'] = 'Invalid characters in name.'
ret['result'] = False
return ret
else:
ret['result'] = True
return ret
def _valid(name, comment='', changes=None):
if not changes:
changes = {}
return {'name': name,
'result': True,
'changes': changes,
'comment': comment}
def present(name, cloud_provider, onlyif=None, unless=None, **kwargs):
'''
Spin up a single instance on a cloud provider, using salt-cloud. This state
does not take a profile argument; rather, it takes the arguments that would
normally be configured as part of the state.
Note that while this function does take any configuration argument that
would normally be used to create an instance, it will not verify the state
of any of those arguments on an existing instance. Stateful properties of
an instance should be configured using their own individual state (i.e.,
cloud.tagged, cloud.untagged, etc).
name
The name of the instance to create
cloud_provider
The name of the cloud provider to use
onlyif
Do run the state only if is unless succeed
unless
Do not run the state at least unless succeed
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
instance = __salt__['cloud.action'](
fun='show_instance', names=[name])
retcode = __salt__['cmd.retcode']
prov = str([a for a in instance][0])
if onlyif is not None:
if not isinstance(onlyif, string_types):
if not onlyif:
return _valid(name, comment='onlyif execution failed')
elif isinstance(onlyif, string_types):
if retcode(onlyif) != 0:
return _valid(name, comment='onlyif execution failed')
if unless is not None:
if not isinstance(unless, string_types):
if unless:
return _valid(name, comment='unless execution succeeded')
elif isinstance(unless, string_types):
if retcode(unless) == 0:
return _valid(name, comment='unless execution succeeded')
if instance and 'Not Actioned' not in prov:
ret['result'] = True
ret['comment'] = 'Instance {0} already exists in {1}'.format(name,
prov)
return ret
if __opts__['test']:
ret['comment'] = 'Instance {0} needs to be created'.format(name)
return ret
info = __salt__['cloud.create'](cloud_provider, name, **kwargs)
if info and not 'Error' in info:
ret['changes'] = info
ret['result'] = True
ret['comment'] = ('Created instance {0} using provider {1}'
' and the following options: {2}').format(
name,
cloud_provider,
pprint.pformat(kwargs)
)
elif info and not 'Error' in info:
ret['result'] = False
ret['comment'] = ('Failed to create instance {0}'
'using profile {1}: {2}').format(
name,
profile,
info['Error'],
)
else:
ret['result'] = False
ret['comment'] = ('Failed to create instance {0}'
' using profile {1},'
' please check your configuration').format(name,
profile)
return ret
def absent(name, onlyif=None, unless=None):
'''
Ensure that no instances with the specified names exist.
CAUTION: This is a destructive state, which will search all
configured cloud providers for the named instance,
and destroy it.
name
The name of the instance to destroy
onlyif
Do run the state only if is unless succeed
unless
Do not run the state at least unless succeed
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
retcode = __salt__['cmd.retcode']
instance = __salt__['cloud.action'](fun='show_instance', names=[name])
if not instance or \
('Not Actioned/Not Running' in ret
and name in ret['Not Actioned/Not Running']):
ret['result'] = True
ret['comment'] = 'Instance {0} already absent'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'Instance {0} needs to be destroyed'.format(name)
return ret
if onlyif is not None:
if not isinstance(onlyif, string_types):
if not onlyif:
return _valid(name, comment='onlyif execution failed')
elif isinstance(onlyif, string_types):
if retcode(onlyif) != 0:
return _valid(name, comment='onlyif execution failed')
if unless is not None:
if not isinstance(unless, string_types):
if unless:
return _valid(name, comment='unless execution succeeded')
elif isinstance(unless, string_types):
if retcode(unless) == 0:
return _valid(name, comment='unless execution succeeded')
info = __salt__['cloud.destroy'](name)
if info and not 'Error' in info:
ret['changes'] = info
ret['result'] = True
ret['comment'] = ('Destroyed instance {0}').format(
name,
)
elif 'Error' in info:
ret['result'] = False
ret['comment'] = ('Failed to destroy instance {0}: {1}').format(
name,
info['Error'],
)
else:
ret['result'] = False
ret['comment'] = 'Failed to destroy instance {0}'.format(name)
return ret
def profile(name, profile, onlyif=None, unless=None, **kwargs):
'''
Create a single instance on a cloud provider, using a salt-cloud profile.
Note that while profiles used this function do take any configuration
argument that would normally be used to create an instance using a profile,
this state will not verify the state of any of those arguments on an
existing instance. Stateful properties of an instance should be configured
using their own individual state (i.e., cloud.tagged, cloud.untagged, etc).
name
The name of the instance to create
profile
The name of the cloud profile to use
onlyif
Do run the state only if is unless succeed
unless
Do not run the state at least unless succeed
kwargs
Any profile override or addition
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
retcode = __salt__['cmd.retcode']
if onlyif is not None:
if not isinstance(onlyif, string_types):
if not onlyif:
return _valid(name, comment='onlyif execution failed')
elif isinstance(onlyif, string_types):
if retcode(onlyif) != 0:
return _valid(name, comment='onlyif execution failed')
if unless is not None:
if not isinstance(unless, string_types):
if unless:
return _valid(name, comment='unless execution succeeded')
elif isinstance(unless, string_types):
if retcode(unless) == 0:
return _valid(name, comment='unless execution succeeded')
instance = __salt__['cloud.action'](fun='show_instance', names=[name])
prov = str(instance.keys()[0])
if instance and 'Not Actioned' not in prov:
ret['result'] = True
ret['comment'] = 'Instance {0} already exists in {1}'.format(
name, prov)
return ret
if __opts__['test']:
ret['comment'] = 'Instance {0} needs to be created'.format(name)
return ret
info = __salt__['cloud.profile'](profile, name, vm_overrides=kwargs)
# get either {Error: ''} or {namestring: {Error: ''}}
# which is what we can get from providers returns
main_error = info.get('Error', '')
name_error = ''
if isinstance(info, dict):
subinfo = info.get(name, {})
if isinstance(subinfo, dict):
name_error = subinfo.get('Error', None)
error = main_error or name_error
if info and not error:
node_info = info.get(name)
ret['result'] = True
default_msg = 'Created instance {0} using profile {1}'.format(
name, profile,)
# some providers support changes
if 'changes' in node_info:
ret['changes'] = node_info['changes']
ret['comment'] = node_info.get('comment', default_msg)
else:
ret['changes'] = info
ret['comment'] = default_msg
elif error:
ret['result'] = False
ret['comment'] = ('Failed to create instance {0}'
' using profile {1}: {2}').format(
name,
profile,
'{0}\n{1}\n'.format(main_error, name_error).strip(),
)
else:
ret['result'] = False
ret['comment'] = ('Failed to create instance {0}'
'using profile {1}').format(
name,
profile,
)
return ret
def volume_present(name, provider=None, **kwargs):
'''
Check that a block volume exists.
'''
ret = _check_name(name)
if not ret['result']:
return ret
volumes = __salt__['cloud.volume_list'](provider=provider)
if name in volumes.keys():
ret['comment'] = 'Volume exists: {0}'.format(name)
ret['result'] = True
return ret
elif __opts__['test']:
ret['comment'] = 'Volume {0} will be created.'.format(name)
ret['result'] = None
return ret
response = __salt__['cloud.volume_create'](
names=name,
provider=provider,
**kwargs
)
if response:
ret['result'] = True
ret['comment'] = 'Volume {0} was created'.format(name)
ret['changes'] = {'old': None, 'new': response}
else:
ret['result'] = False
ret['comment'] = 'Volume {0} failed to create.'.format(name)
return ret
def volume_absent(name, provider=None, **kwargs):
'''
Check that a block volume exists.
'''
ret = _check_name(name)
if not ret['result']:
return ret
volumes = __salt__['cloud.volume_list'](provider=provider)
if not name in volumes.keys():
ret['comment'] = 'Volume is absent.'
ret['result'] = True
return ret
elif __opts__['test']:
ret['comment'] = 'Volume {0} will be deleted.'.format(name)
ret['result'] = None
return ret
response = __salt__['cloud.volume_delete'](
names=name,
provider=provider,
**kwargs
)
if response:
ret['result'] = True
ret['comment'] = 'Volume {0} was deleted'.format(name)
ret['changes'] = {'old': volumes[name], 'new': response}
else:
ret['result'] = False
ret['comment'] = 'Volume {0} failed to delete.'.format(name)
return ret
def volume_attached(name, server_name, provider=None, **kwargs):
'''
Check if a block volume is attached.
'''
ret = _check_name(name)
if not ret['result']:
return ret
ret = _check_name(server_name)
if not ret['result']:
return ret
volumes = __salt__['cloud.volume_list'](provider=provider)
instance = __salt__['cloud.action'](
fun='show_instance',
names=server_name
)
if name in volumes.keys() and volumes[name]['attachments']:
volume = volumes[name]
ret['comment'] = ('Volume {name} is already'
'attached: {attachments}').format(**volumes[name])
ret['result'] = True
return ret
elif not name in volumes.keys():
ret['comment'] = 'Volume {0} does not exist'.format(name)
ret['result'] = False
return ret
elif not instance:
ret['comment'] = 'Server {0} does not exist'.format(server_name)
ret['result'] = False
return ret
elif __opts__['test']:
ret['comment'] = 'Volume {0} will be will be attached.'.format(
name
)
ret['result'] = None
return ret
response = __salt__['cloud.volume_attach'](
provider=provider,
names=name,
server_name=server_name,
**kwargs
)
if response:
ret['result'] = True
ret['comment'] = 'Volume {0} was created'.format(name)
ret['changes'] = {'old': volumes[name], 'new': response}
else:
ret['result'] = False
ret['comment'] = 'Volume {0} failed to attach.'.format(name)
return ret
def volume_detached(name, server_name=None, provider=None, **kwargs):
'''
Check if a block volume is attached.
Returns True if server or Volume do not exist.
'''
ret = _check_name(name)
if not ret['result']:
return ret
if not server_name is None:
ret = _check_name(server_name)
if not ret['result']:
return ret
volumes = __salt__['cloud.volume_list'](provider=provider)
if server_name:
instance = __salt__['cloud.action'](fun='show_instance', names=[name])
else:
instance = None
if name in volumes.keys() and not volumes[name]['attachments']:
volume = volumes[name]
ret['comment'] = (
'Volume {name} is not currently attached to anything.'
).format(**volumes[name])
ret['result'] = True
return ret
elif not name in volumes.keys():
ret['comment'] = 'Volume {0} does not exist'.format(name)
ret['result'] = True
return ret
elif not instance and not server_name is None:
ret['comment'] = 'Server {0} does not exist'.format(server_name)
ret['result'] = True
return ret
elif __opts__['test']:
ret['comment'] = 'Volume {0} will be will be detached.'.format(
name
)
ret['result'] = None
return ret
response = __salt__['cloud.volume_detach'](
provider=provider,
names=name,
server_name=server_name,
**kwargs
)
if response:
ret['result'] = True
ret['comment'] = 'Volume {0} was created'.format(name)
ret['changes'] = {'old': volumes[name], 'new': response}
else:
ret['result'] = False
ret['comment'] = 'Volume {0} failed to detach.'.format(name)
return ret
|
'make cuda-convnet batches from images in the input dir; start numbering batches from 7'
import os
import sys
import numpy as np
import cPickle as pickle
from natsort import natsorted
from PIL import Image
from PIL import ImageOps
def process( image ):
image = np.array( image ) # 32 x 32 x 3
image = np.rollaxis( image, 2 ) # 3 x 32 x 32
image = image.reshape( -1 ) # 3072
return image
def get_batch_path( output_dir, number ):
filename = "data_batch_{}".format( number )
return os.path.join( output_dir, filename )
def get_empty_batch():
return np.zeros(( 3072, 0 ), dtype = np.uint8 )
def write_batch( path, batch ):
print "writing {}...\n".format( path )
labels = [ 0 for x in range( batch.shape[1] ) ]
d = { 'labels': labels, 'data': batch }
pickle.dump( d, open( path, "wb" ))
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
try:
batch_counter = int( sys.argv[3] )
except IndexError:
batch_counter = 7
batch_size = 10000
print "reading file names..."
names = [ d for d in os.listdir( input_dir ) if d.endswith( '.png') ]
names = natsorted( names )
if batch_counter > 7:
omit_batches = batch_counter - 7
omit_images = omit_batches * batch_size
names = names[omit_images:]
print "omiting {} images".format( omit_images )
current_batch = get_empty_batch()
counter = 0
for n in names:
image = Image.open( os.path.join( input_dir, n ))
try:
image = process( image )
except ValueError:
print "problem with image {}".format( n )
sys.exit( 1 )
image = image.reshape( -1, 1 )
current_batch = np.hstack(( current_batch, image ))
if current_batch.shape[1] == batch_size:
batch_path = get_batch_path( output_dir, batch_counter )
write_batch( batch_path, current_batch )
batch_counter += 1
current_batch = get_empty_batch()
counter += 1
if counter % 1000 == 0:
print n
if __name__ == '__main__':
main()
|
# Copyright 2017 MDSLAB - University of Messina
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__author__ = "Nicola Peditto <npeditto@unime.it"
import asyncio
import inspect
import pkg_resources
from six import moves
from stevedore import extension
import sys
from iotronic_lightningrod.config import entry_points_name
from iotronic_lightningrod.lightningrod import SESSION
from iotronic_lightningrod.modules import Module
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def getFuncName():
return inspect.stack()[1][3]
def refresh_stevedore(namespace=None):
"""Trigger reload of entry points.
Useful to have dynamic loading/unloading of stevedore modules.
"""
# NOTE(sheeprine): pkg_resources doesn't support reload on python3 due to
# defining basestring which is still there on reload hence executing
# python2 related code.
try:
del sys.modules['pkg_resources'].basestring
except AttributeError:
# python2, do nothing
pass
# Force working_set reload
moves.reload_module(sys.modules['pkg_resources'])
# Clear stevedore cache
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
if namespace:
if namespace in cache:
del cache[namespace]
else:
cache.clear()
class Utility(Module.Module):
def __init__(self, board, session):
super(Utility, self).__init__("Utility", board)
def finalize(self):
pass
def restore(self):
pass
async def hello(self, client_name, message):
import random
s = random.uniform(0.5, 3.0)
await asyncio.sleep(s)
result = "Hello by board to Conductor " + client_name + \
" that said me " + message + " - Time: " + '%.2f' % s
LOG.info("DEVICE hello result: " + str(result))
return result
async def plug_and_play(self, new_module, new_class):
LOG.info("LR modules loaded:\n\t" + new_module)
# Updating entry_points
with open(entry_points_name, 'a') as entry_points:
entry_points.write(
new_module +
'= iotronic_lightningrod.modules.' + new_module + ':'
+ new_class
)
# Reload entry_points
refresh_stevedore('s4t.modules')
LOG.info("New entry_points loaded!")
# Reading updated entry_points
named_objects = {}
for ep in pkg_resources.iter_entry_points(group='s4t.modules'):
named_objects.update({ep.name: ep.load()})
await named_objects
SESSION.disconnect()
return str(named_objects)
async def changeConf(self, conf):
await self.board.getConf(conf)
self.board.setUpdateTime()
result = "Board configuration changed!"
LOG.info("PROVISIONING RESULT: " + str(result))
return result
async def destroyNode(self, conf):
await self.board.setConf(conf)
result = "Board configuration cleaned!"
LOG.info("DESTROY RESULT: " + str(result))
return result
|
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
from itertools import count
import sys
from networks import policy_nn
from utils import *
from env import Env
from BFS.KB import KB
from BFS.BFS import BFS
import time
relation = sys.argv[1]
# episodes = int(sys.argv[2])
graphpath = dataPath + 'tasks/' + relation + '/' + 'graph.txt'
relationPath = dataPath + 'tasks/' + relation + '/' + 'train_pos'
class SupervisedPolicy(object):
"""docstring for SupervisedPolicy"""
def __init__(self, learning_rate = 0.001):
self.initializer = tf.contrib.layers.xavier_initializer()
with tf.variable_scope('supervised_policy'):
self.state = tf.placeholder(tf.float32, [None, state_dim], name = 'state')
self.action = tf.placeholder(tf.int32, [None], name = 'action')
self.action_prob = policy_nn(self.state, state_dim, action_space, self.initializer)
action_mask = tf.cast(tf.one_hot(self.action, depth = action_space), tf.bool)
self.picked_action_prob = tf.boolean_mask(self.action_prob, action_mask)
self.loss = tf.reduce_sum(-tf.log(self.picked_action_prob)) + sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES, scope = 'supervised_policy'))
self.optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate)
self.train_op = self.optimizer.minimize(self.loss)
def predict(self, state, sess = None):
sess = sess or tf.get_default_session()
return sess.run(self.action_prob, {self.state: state})
def update(self, state, action, sess = None):
sess = sess or tf.get_default_session()
_, loss = sess.run([self.train_op, self.loss], {self.state: state, self.action: action})
return loss
def train():
tf.reset_default_graph()
policy_nn = SupervisedPolicy()
f = open(relationPath)
train_data = f.readlines()
f.close()
num_samples = len(train_data)
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
if num_samples > 500:
num_samples = 500
else:
num_episodes = num_samples
for episode in range(num_samples):
print("Episode %d" % episode)
print('Training Sample:', train_data[episode%num_samples][:-1])
env = Env(dataPath, train_data[episode%num_samples])
sample = train_data[episode%num_samples].split()
try:
good_episodes = teacher(sample[0], sample[1], 5, env, graphpath)
except Exception as e:
print('Cannot find a path')
continue
for item in good_episodes:
state_batch = []
action_batch = []
for t, transition in enumerate(item):
state_batch.append(transition.state)
action_batch.append(transition.action)
state_batch = np.squeeze(state_batch)
state_batch = np.reshape(state_batch, [-1, state_dim])
policy_nn.update(state_batch, action_batch)
saver.save(sess, 'models/policy_supervised_' + relation)
print('Model saved')
def test(test_episodes):
tf.reset_default_graph()
policy_nn = SupervisedPolicy()
f = open(relationPath)
test_data = f.readlines()
f.close()
test_num = len(test_data)
test_data = test_data[-test_episodes:]
print(len(test_data))
success = 0
saver = tf.train.Saver()
with tf.Session() as sess:
saver.restore(sess, 'models/policy_supervised_'+ relation)
print('Model reloaded')
for episode in range(len(test_data)):
print('Test sample %d: %s' % (episode,test_data[episode][:-1]))
env = Env(dataPath, test_data[episode])
sample = test_data[episode].split()
state_idx = [env.entity2id_[sample[0]], env.entity2id_[sample[1]], 0]
for t in count():
state_vec = env.idx_state(state_idx)
action_probs = policy_nn.predict(state_vec)
action_chosen = np.random.choice(np.arange(action_space), p = np.squeeze(action_probs))
reward, new_state, done = env.interact(state_idx, action_chosen)
if done or t == max_steps_test:
if done:
print('Success')
success += 1
print('Episode ends\n')
break
state_idx = new_state
print('Success persentage:', success/test_episodes)
if __name__ == "__main__":
train()
# test(50)
|
from django.contrib import admin
from .models import *
admin.site.register(Client)
admin.site.register(ServicesPL)
admin.site.register(MaterialsPL)
admin.site.register(Request)
admin.site.register(ChosenServices)
admin.site.register(ChosenMaterials)
admin.site.register(WorkGroup)
admin.site.register(Executor)
admin.site.register(Invoice)
admin.site.register(PaymentOrder)
admin.site.register(User)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
from hypothesis import assume, given, settings, HealthCheck
import hypothesis.strategies as st
import numpy as np
import unittest
@st.composite
def _glu_old_input(draw):
dims = draw(st.lists(st.integers(min_value=1, max_value=5), min_size=1, max_size=3))
axis = draw(st.integers(min_value=0, max_value=len(dims)))
# The axis dimension must be divisible by two
axis_dim = 2 * draw(st.integers(min_value=1, max_value=2))
dims.insert(axis, axis_dim)
X = draw(hu.arrays(dims, np.float32, None))
return (X, axis)
class TestGlu(serial.SerializedTestCase):
@given(
X_axis=_glu_old_input(),
**hu.gcs
)
@settings(deadline=10000)
def test_glu_old(self, X_axis, gc, dc):
X, axis = X_axis
def glu_ref(X):
x1, x2 = np.split(X, [X.shape[axis] // 2], axis=axis)
Y = x1 * (1. / (1. + np.exp(-x2)))
return [Y]
op = core.CreateOperator("Glu", ["X"], ["Y"], dim=axis)
self.assertReferenceChecks(gc, op, [X], glu_ref)
if __name__ == "__main__":
unittest.main()
|
import urllib
import json
summ = 0
count = 0
address = 'http://python-data.dr-chuck.net/comments_319811.json'
#raw_input('Enter location: ')
uh = urllib.urlopen(address)
data = uh.read()
#print data
tree = json.loads(data)
#print json.dumps(tree, indent= 4)
for item in tree['comments']:
count = count +1
#print item['count']
summ = summ + int(item['count'])
#print item.find('count').text
print summ
print count
|
import pytest
from diofant import (I, Matrix, MutableDenseMatrix, MutableSparseMatrix,
PurePoly, Rational, ShapeError, SparseMatrix, eye, ones,
zeros)
from diofant.abc import x, y, z
__all__ = ()
def test_sparse_matrix():
def sparse_eye(n):
return SparseMatrix.eye(n)
def sparse_zeros(n):
return SparseMatrix.zeros(n)
# creation args
pytest.raises(TypeError, lambda: SparseMatrix(1, 2))
pytest.raises(ValueError, lambda: SparseMatrix(2, 2, (1, 3, 4, 5, 6)))
a = SparseMatrix((
(1, 0),
(0, 1)
))
assert SparseMatrix(a) == a
a = MutableSparseMatrix([])
b = MutableDenseMatrix([1, 2])
assert a.row_join(b) == b
assert a.col_join(b) == b
assert type(a.row_join(b)) == type(a)
assert type(a.col_join(b)) == type(a)
# test element assignment
a = SparseMatrix((
(1, 0),
(0, 1)
))
a[3] = 4
assert a[1, 1] == 4
a[3] = 1
a[0, 0] = 2
assert a == SparseMatrix((
(2, 0),
(0, 1)
))
a[1, 0] = 5
assert a == SparseMatrix((
(2, 0),
(5, 1)
))
a[1, 1] = 0
assert a == SparseMatrix((
(2, 0),
(5, 0)
))
assert a._smat == {(0, 0): 2, (1, 0): 5}
# test_multiplication
a = SparseMatrix((
(1, 2),
(3, 1),
(0, 6),
))
b = SparseMatrix((
(1, 2),
(3, 0),
))
c = a*b
assert c[0, 0] == 7
assert c[0, 1] == 2
assert c[1, 0] == 6
assert c[1, 1] == 6
assert c[2, 0] == 18
assert c[2, 1] == 0
c = b * x
assert isinstance(c, SparseMatrix)
assert c[0, 0] == x
assert c[0, 1] == 2*x
assert c[1, 0] == 3*x
assert c[1, 1] == 0
c = 5 * b
assert isinstance(c, SparseMatrix)
assert c[0, 0] == 5
assert c[0, 1] == 2*5
assert c[1, 0] == 3*5
assert c[1, 1] == 0
# test_power
A = SparseMatrix([[2, 3], [4, 5]])
assert (A**5)[:] == [6140, 8097, 10796, 14237]
A = SparseMatrix([[2, 1, 3], [4, 2, 4], [6, 12, 1]])
assert (A**3)[:] == [290, 262, 251, 448, 440, 368, 702, 954, 433]
# test_creation
a = SparseMatrix([[x, 0], [0, 0]])
m = a
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
b = SparseMatrix(2, 2, [x, 0, 0, 0])
m = b
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
assert a == b
S = sparse_eye(3)
del S[1, :]
assert S == SparseMatrix([
[1, 0, 0],
[0, 0, 1]])
S = sparse_eye(3)
del S[:, 1]
assert S == SparseMatrix([
[1, 0],
[0, 0],
[0, 1]])
S = SparseMatrix.eye(3)
S[2, 1] = 2
S.col_swap(1, 0)
assert S == SparseMatrix([[0, 1, 0],
[1, 0, 0],
[2, 0, 1]])
S.row_swap(0, 1)
assert S == SparseMatrix([[1, 0, 0],
[0, 1, 0],
[2, 0, 1]])
S.col_swap(0, 1)
assert S == SparseMatrix([[0, 1, 0],
[1, 0, 0],
[0, 2, 1]])
S.row_swap(0, 2)
assert S == SparseMatrix([[0, 2, 1],
[1, 0, 0],
[0, 1, 0]])
S.col_swap(0, 2)
assert S == SparseMatrix([[1, 2, 0],
[0, 0, 1],
[0, 1, 0]])
a = SparseMatrix(1, 2, [1, 2])
b = a.copy()
c = a.copy()
assert a[0] == 1
del a[0, :]
assert a == SparseMatrix(0, 2, [])
del b[:, 1]
assert b == SparseMatrix(1, 1, [1])
# test_determinant
assert SparseMatrix(1, 1, [0]).det() == 0
assert SparseMatrix([[1]]).det() == 1
assert SparseMatrix(((-3, 2), (8, -5))).det() == -1
assert SparseMatrix(((x, 1), (y, 2*y))).det() == 2*x*y - y
assert SparseMatrix(( (1, 1, 1),
(1, 2, 3),
(1, 3, 6) )).det() == 1
assert SparseMatrix(( ( 3, -2, 0, 5),
(-2, 1, -2, 2),
( 0, -2, 5, 0),
( 5, 0, 3, 4) )).det() == -289
assert SparseMatrix(( ( 1, 2, 3, 4),
( 5, 6, 7, 8),
( 9, 10, 11, 12),
(13, 14, 15, 16) )).det() == 0
assert SparseMatrix(( (3, 2, 0, 0, 0),
(0, 3, 2, 0, 0),
(0, 0, 3, 2, 0),
(0, 0, 0, 3, 2),
(2, 0, 0, 0, 3) )).det() == 275
assert SparseMatrix(( (1, 0, 1, 2, 12),
(2, 0, 1, 1, 4),
(2, 1, 1, -1, 3),
(3, 2, -1, 1, 8),
(1, 1, 1, 0, 6) )).det() == -55
assert SparseMatrix(( (-5, 2, 3, 4, 5),
( 1, -4, 3, 4, 5),
( 1, 2, -3, 4, 5),
( 1, 2, 3, -2, 5),
( 1, 2, 3, 4, -1) )).det() == 11664
assert SparseMatrix(( ( 2, 7, -1, 3, 2),
( 0, 0, 1, 0, 1),
(-2, 0, 7, 0, 2),
(-3, -2, 4, 5, 3),
( 1, 0, 0, 0, 1) )).det() == 123
# test_slicing
m0 = sparse_eye(4)
assert m0[:3, :3] == sparse_eye(3)
assert m0[2:4, 0:2] == sparse_zeros(2)
m1 = SparseMatrix(3, 3, lambda i, j: i + j)
assert m1[0, :] == SparseMatrix(1, 3, (0, 1, 2))
assert m1[1:3, 1] == SparseMatrix(2, 1, (2, 3))
m2 = SparseMatrix(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]])
assert m2[:, -1] == SparseMatrix(4, 1, [3, 7, 11, 15])
assert m2[-2:, :] == SparseMatrix([[8, 9, 10, 11], [12, 13, 14, 15]])
assert SparseMatrix([[1, 2], [3, 4]])[[1], [1]] == Matrix([[4]])
# test_submatrix_assignment
m = sparse_zeros(4)
m[2:4, 2:4] = sparse_eye(2)
assert m == SparseMatrix([(0, 0, 0, 0),
(0, 0, 0, 0),
(0, 0, 1, 0),
(0, 0, 0, 1)])
assert len(m._smat) == 2
m[:2, :2] = sparse_eye(2)
assert m == sparse_eye(4)
m[:, 0] = SparseMatrix(4, 1, (1, 2, 3, 4))
assert m == SparseMatrix([(1, 0, 0, 0),
(2, 1, 0, 0),
(3, 0, 1, 0),
(4, 0, 0, 1)])
m[:, :] = sparse_zeros(4)
assert m == sparse_zeros(4)
m[:, :] = ((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16))
assert m == SparseMatrix((( 1, 2, 3, 4),
( 5, 6, 7, 8),
( 9, 10, 11, 12),
(13, 14, 15, 16)))
m[:2, 0] = [0, 0]
assert m == SparseMatrix((( 0, 2, 3, 4),
( 0, 6, 7, 8),
( 9, 10, 11, 12),
(13, 14, 15, 16)))
# test_reshape
m0 = sparse_eye(3)
assert m0.reshape(1, 9) == SparseMatrix(1, 9, (1, 0, 0, 0, 1, 0, 0, 0, 1))
m1 = SparseMatrix(3, 4, lambda i, j: i + j)
assert m1.reshape(4, 3) == \
SparseMatrix([(0, 1, 2), (3, 1, 2), (3, 4, 2), (3, 4, 5)])
assert m1.reshape(2, 6) == \
SparseMatrix([(0, 1, 2, 3, 1, 2), (3, 4, 2, 3, 4, 5)])
# test_applyfunc
m0 = sparse_eye(3)
assert m0.applyfunc(lambda x: 2*x) == sparse_eye(3)*2
assert m0.applyfunc(lambda x: 0 ) == sparse_zeros(3)
# test_LUdecomp
testmat = SparseMatrix([[ 0, 2, 5, 3],
[ 3, 3, 7, 4],
[ 8, 4, 0, 2],
[-2, 6, 3, 4]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == sparse_zeros(4)
testmat = SparseMatrix([[ 6, -2, 7, 4],
[ 0, 3, 6, 7],
[ 1, -2, 7, 4],
[-9, 2, 6, 3]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == sparse_zeros(4)
M = Matrix(((1, x, 1), (2, y, 0), (y, 0, z)))
L, U, p = M.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - M == sparse_zeros(3)
# test_LUsolve
A = SparseMatrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
B = SparseMatrix(3, 1, [3, 7, 5])
b = A*B
soln = A.LUsolve(b)
assert soln == B
A = SparseMatrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
B = SparseMatrix(3, 1, [-1, 2, 5])
b = A*B
soln = A.LUsolve(b)
assert soln == B
# test_inverse
A = sparse_eye(4)
assert A.inv() == sparse_eye(4)
assert A.inv(method='CH') == sparse_eye(4)
assert A.inv(method='LDL') == sparse_eye(4)
A = SparseMatrix([[2, 3, 5],
[3, 6, 2],
[7, 2, 6]])
Ainv = SparseMatrix(Matrix(A).inv())
assert A*Ainv == sparse_eye(3)
assert A.inv(method='CH') == Ainv
assert A.inv(method='LDL') == Ainv
A = SparseMatrix([[2, 3, 5],
[3, 6, 2],
[5, 2, 6]])
Ainv = SparseMatrix(Matrix(A).inv())
assert A*Ainv == sparse_eye(3)
assert A.inv(method='CH') == Ainv
assert A.inv(method='LDL') == Ainv
# test_cross
v1 = Matrix(1, 3, [1, 2, 3])
v2 = Matrix(1, 3, [3, 4, 5])
assert v1.cross(v2) == Matrix(1, 3, [-2, 4, -2])
assert v1.norm(2)**2 == 14
# conjugate
a = SparseMatrix(((1, 2 + I), (3, 4)))
assert a.C == SparseMatrix([
[1, 2 - I],
[3, 4]
])
# mul
assert a*Matrix(2, 2, [1, 0, 0, 1]) == a
assert a + Matrix(2, 2, [1, 1, 1, 1]) == SparseMatrix([
[2, 3 + I],
[4, 5]
])
assert a*0 == Matrix([[0, 0], [0, 0]])
# col join
assert a.col_join(sparse_eye(2)) == SparseMatrix([
[1, 2 + I],
[3, 4],
[1, 0],
[0, 1]
])
A = SparseMatrix(ones(3))
B = eye(3)
assert A.col_join(B) == Matrix([[1, 1, 1], [1, 1, 1], [1, 1, 1],
[1, 0, 0], [0, 1, 0], [0, 0, 1]])
# row join
A = SparseMatrix(((1, 0, 1), (0, 1, 0), (1, 1, 0)))
B = Matrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
assert A.row_join(B) == Matrix([[1, 0, 1, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[1, 1, 0, 0, 0, 1]])
# symmetric
assert not a.is_symmetric(simplify=False)
assert sparse_eye(3).is_symmetric(simplify=False)
# test_cofactor
assert sparse_eye(3) == sparse_eye(3).cofactorMatrix()
test = SparseMatrix([[1, 3, 2], [2, 6, 3], [2, 3, 6]])
assert test.cofactorMatrix() == \
SparseMatrix([[27, -6, -6], [-12, 2, 3], [-3, 1, 0]])
test = SparseMatrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert test.cofactorMatrix() == \
SparseMatrix([[-3, 6, -3], [6, -12, 6], [-3, 6, -3]])
# test_jacobian
L = SparseMatrix(1, 2, [x**2*y, 2*y**2 + x*y])
syms = [x, y]
assert L.jacobian(syms) == Matrix([[2*x*y, x**2], [y, 4*y + x]])
L = SparseMatrix(1, 2, [x, x**2*y**3])
assert L.jacobian(syms) == SparseMatrix([[1, 0], [2*x*y**3, x**2*3*y**2]])
# test_QR
A = Matrix([[1, 2], [2, 3]])
Q, S = A.QRdecomposition()
R = Rational
assert Q == Matrix([
[ 5**R(-1, 2), (R(2)/5)*(R(1)/5)**R(-1, 2)],
[2*5**R(-1, 2), (-R(1)/5)*(R(1)/5)**R(-1, 2)]])
assert S == Matrix([
[5**R(1, 2), 8*5**R(-1, 2)],
[ 0, (R(1)/5)**R(1, 2)]])
assert Q*S == A
assert Q.T * Q == sparse_eye(2)
R = Rational
# test nullspace
# first test reduced row-ech form
M = SparseMatrix([[5, 7, 2, 1],
[1, 6, 2, -1]])
out, tmp = M.rref()
assert out == Matrix([[1, 0, -R(2)/23, R(13)/23],
[0, 1, R(8)/23, R(-6)/23]])
M = SparseMatrix([[ 1, 3, 0, 2, 6, 3, 1],
[-2, -6, 0, -2, -8, 3, 1],
[ 3, 9, 0, 0, 6, 6, 2],
[-1, -3, 0, 1, 0, 9, 3]])
out, tmp = M.rref()
assert out == Matrix([[1, 3, 0, 0, 2, 0, 0],
[0, 0, 0, 1, 2, 0, 0],
[0, 0, 0, 0, 0, 1, R(1)/3],
[0, 0, 0, 0, 0, 0, 0]])
# now check the vectors
basis = M.nullspace()
assert basis[0] == Matrix([-3, 1, 0, 0, 0, 0, 0])
assert basis[1] == Matrix([0, 0, 1, 0, 0, 0, 0])
assert basis[2] == Matrix([-2, 0, 0, -2, 1, 0, 0])
assert basis[3] == Matrix([0, 0, 0, 0, 0, R(-1)/3, 1])
# test eigen
sparse_eye3 = sparse_eye(3)
assert sparse_eye3.charpoly(x) == PurePoly(((x - 1)**3))
assert sparse_eye3.charpoly(y) == PurePoly(((y - 1)**3))
# test values
M = Matrix([( 0, 1, -1),
( 1, 1, 0),
(-1, 0, 1)])
vals = M.eigenvals()
assert sorted(vals) == [-1, 1, 2]
R = Rational
M = Matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
assert M.eigenvects() == [(1, 3, [
Matrix([1, 0, 0]),
Matrix([0, 1, 0]),
Matrix([0, 0, 1])])]
M = Matrix([[5, 0, 2],
[3, 2, 0],
[0, 0, 1]])
assert M.eigenvects() == [(1, 1, [Matrix([R(-1)/2, R(3)/2, 1])]),
(2, 1, [Matrix([0, 1, 0])]),
(5, 1, [Matrix([1, 1, 0])])]
assert M.zeros(3, 5) == SparseMatrix(3, 5, {})
A = SparseMatrix(10, 10, {(0, 0): 18, (0, 9): 12, (1, 4): 18, (2, 7): 16, (3, 9): 12, (4, 2): 19, (5, 7): 16, (6, 2): 12, (9, 7): 18})
assert A.row_list() == [(0, 0, 18), (0, 9, 12), (1, 4, 18), (2, 7, 16), (3, 9, 12), (4, 2, 19), (5, 7, 16), (6, 2, 12), (9, 7, 18)]
assert A.col_list() == [(0, 0, 18), (4, 2, 19), (6, 2, 12), (1, 4, 18), (2, 7, 16), (5, 7, 16), (9, 7, 18), (0, 9, 12), (3, 9, 12)]
assert SparseMatrix.eye(2).nnz() == 2
M = SparseMatrix.eye(3)*2
M[1, 0] = -1
M.col_op(1, lambda v, i: v + 2*M[i, 0])
assert M == Matrix([[ 2, 4, 0], [-1, 0, 0], [ 0, 0, 2]])
M = SparseMatrix.zeros(3)
M.fill(1)
assert M == ones(3)
assert SparseMatrix(ones(0, 3)).tolist() == []
def test_eq():
A = SparseMatrix(((1, 2), (3, 4)))
assert A != 1
assert A != zeros(2, 1)
def test_transpose():
assert SparseMatrix(((1, 2), (3, 4))).transpose() == \
SparseMatrix(((1, 3), (2, 4)))
def test_trace():
assert SparseMatrix(((1, 2), (3, 4))).trace() == 5
assert SparseMatrix(((0, 0), (0, 4))).trace() == 4
def test_CL_RL():
assert SparseMatrix(((1, 2), (3, 4))).row_list() == \
[(0, 0, 1), (0, 1, 2), (1, 0, 3), (1, 1, 4)]
assert SparseMatrix(((1, 2), (3, 4))).col_list() == \
[(0, 0, 1), (1, 0, 3), (0, 1, 2), (1, 1, 4)]
def test_add():
assert SparseMatrix(((1, 0), (0, 1))) + SparseMatrix(((0, 1), (1, 0))) == \
SparseMatrix(((1, 1), (1, 1)))
a = SparseMatrix(100, 100, lambda i, j: int(j != 0 and i % j == 0))
b = SparseMatrix(100, 100, lambda i, j: int(i != 0 and j % i == 0))
assert (len(a._smat) + len(b._smat) - len((a + b)._smat) > 0)
def test_errors():
pytest.raises(ValueError, lambda: SparseMatrix(1.4, 2, lambda i, j: 0))
pytest.raises(ValueError, lambda: SparseMatrix(2, 2, 1))
pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3], [1, 2]))
pytest.raises(ValueError, lambda: SparseMatrix([[1, 2], [3, 4]])[(1, 2, 3)])
pytest.raises(IndexError, lambda: SparseMatrix([[1, 2], [3, 4]])[5])
pytest.raises(ValueError, lambda: SparseMatrix([[1, 2], [3, 4]])[1, 2, 3])
pytest.raises(TypeError,
lambda: SparseMatrix([[1, 2],
[3, 4]]).copyin_list([0, 1], set()))
pytest.raises(IndexError, lambda: SparseMatrix([[1, 2], [3, 4]])[1, 2])
pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3]).cross(1))
pytest.raises(IndexError, lambda: SparseMatrix(1, 2, [1, 2])[3])
pytest.raises(ShapeError,
lambda: SparseMatrix(1, 2,
[1, 2]) + SparseMatrix(2, 1, [2, 1]))
pytest.raises(IndexError, lambda: SparseMatrix([1, 2, 3])[3, 0])
pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3]).applyfunc(1))
pytest.raises(ValueError, lambda: SparseMatrix([1, 2, 3]).reshape(2, 2))
pytest.raises(ValueError,
lambda: SparseMatrix([[2, 3], [4, 1]]).cholesky())
pytest.raises(ValueError,
lambda: SparseMatrix([[2, 3], [4, 1]]).LDLdecomposition())
pytest.raises(ValueError, lambda: SparseMatrix([[2, 3], [4, 1]]).add(1))
pytest.raises(ShapeError,
lambda: SparseMatrix([[1, 2],
[3, 4]]).row_join(Matrix([[1, 2]])))
pytest.raises(ShapeError,
lambda: SparseMatrix([[1, 2],
[3, 4]]).col_join(Matrix([1, 2])))
pytest.raises(ShapeError,
lambda: SparseMatrix([[1, 2],
[3, 4]]).copyin_matrix([1, 0],
Matrix([1, 2])))
def test_len():
assert not SparseMatrix()
assert SparseMatrix() == SparseMatrix([])
assert SparseMatrix() == SparseMatrix([[]])
def test_sparse_zeros_sparse_eye():
assert SparseMatrix.eye(3) == eye(3, cls=SparseMatrix)
assert len(SparseMatrix.eye(3)._smat) == 3
assert SparseMatrix.zeros(3) == zeros(3, cls=SparseMatrix)
assert len(SparseMatrix.zeros(3)._smat) == 0
def test_copyin():
s = SparseMatrix(3, 3, {})
s[1, 0] = 1
assert s[:, 0] == SparseMatrix(Matrix([0, 1, 0]))
assert s[3] == 1
assert s[3: 4] == [1]
s[1, 1] = 42
assert s[1, 1] == 42
assert s[1, 1:] == SparseMatrix([[42, 0]])
s[1, 1:] = Matrix([[5, 6]])
assert s[1, :] == SparseMatrix([[1, 5, 6]])
s[1, 1:] = [[42, 43]]
assert s[1, :] == SparseMatrix([[1, 42, 43]])
s[0, 0] = 17
assert s[:, :1] == SparseMatrix([17, 1, 0])
s[0, 0] = [1, 1, 1]
assert s[:, 0] == SparseMatrix([1, 1, 1])
s[0, 0] = Matrix([1, 1, 1])
assert s[:, 0] == SparseMatrix([1, 1, 1])
s[0, 0] = SparseMatrix([1, 1, 1])
assert s[:, 0] == SparseMatrix([1, 1, 1])
def test_sparse_solve():
A = SparseMatrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
assert A.cholesky() == Matrix([
[ 5, 0, 0],
[ 3, 3, 0],
[-1, 1, 3]])
assert A.cholesky() * A.cholesky().T == Matrix([
[25, 15, -5],
[15, 18, 0],
[-5, 0, 11]])
A = SparseMatrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
L, D = A.LDLdecomposition()
assert 15*L == Matrix([
[15, 0, 0],
[ 9, 15, 0],
[-3, 5, 15]])
assert D == Matrix([
[25, 0, 0],
[ 0, 9, 0],
[ 0, 0, 9]])
assert L * D * L.T == A
A = SparseMatrix(((3, 0, 2), (0, 0, 1), (1, 2, 0)))
assert A.inv() * A == SparseMatrix(eye(3))
A = SparseMatrix([
[ 2, -1, 0],
[-1, 2, -1],
[ 0, 0, 2]])
ans = SparseMatrix([
[Rational(2, 3), Rational(1, 3), Rational(1, 6)],
[Rational(1, 3), Rational(2, 3), Rational(1, 3)],
[ 0, 0, Rational(1, 2)]])
assert A.inv(method='CH') == ans
assert A.inv(method='LDL') == ans
assert A * ans == SparseMatrix(eye(3))
s = A.solve(A[:, 0], 'LDL')
assert A*s == A[:, 0]
s = A.solve(A[:, 0], 'CH')
assert A*s == A[:, 0]
A = A.col_join(A)
s = A.solve_least_squares(A[:, 0], 'CH')
assert A*s == A[:, 0]
s = A.solve_least_squares(A[:, 0], 'LDL')
assert A*s == A[:, 0]
pytest.raises(ValueError, lambda: SparseMatrix([[1, 0, 1],
[0, 0, 1]]).solve([1, 1]))
pytest.raises(ValueError, lambda: SparseMatrix([[1, 0], [0, 0],
[2, 1]]).solve([1, 1, 1]))
def test_hermitian():
a = SparseMatrix([[0, I], [-I, 0]])
assert a.is_hermitian
a = SparseMatrix([[1, I], [-I, 1]])
assert a.is_hermitian
a[0, 0] = 2*I
assert a.is_hermitian is False
a[0, 0] = x
assert a.is_hermitian is None
a[0, 1] = a[1, 0]*I
assert a.is_hermitian is False
def test_fill():
a = SparseMatrix([[0, I], [-I, 0]])
a.fill(0)
assert a == Matrix([[0, 0], [0, 0]])
|
# -*- coding: utf-8 -*-
"""
Tool tests meant to be run with pytest.
Testing whether issue #596 has been repaired.
Note: Platform dependent test. Will only fail on Windows > NT. """
import time
from os import remove
from os.path import join
from moviepy.video.compositing.CompositeVideoClip import clips_array
from moviepy.video.io.VideoFileClip import VideoFileClip
from moviepy.video.VideoClip import ColorClip
from tests.test_helper import TMP_DIR
def test_release_of_file_via_close():
# Create a random video file.
red = ColorClip((256, 200), color=(255, 0, 0))
green = ColorClip((256, 200), color=(0, 255, 0))
blue = ColorClip((256, 200), color=(0, 0, 255))
red.fps = green.fps = blue.fps = 10
# Repeat this so we can see no conflicts.
for i in range(3):
# Get the name of a temporary file we can use.
local_video_filename = join(
TMP_DIR, "test_release_of_file_via_close_%s.mp4" % int(time.time())
)
clip = clips_array([[red, green, blue]]).with_duration(0.5)
clip.write_videofile(local_video_filename)
# Open it up with VideoFileClip.
video = VideoFileClip(local_video_filename)
video.close()
clip.close()
# Now remove the temporary file.
# This would fail on Windows if the file is still locked.
# This should succeed without exceptions.
remove(local_video_filename)
red.close()
green.close()
blue.close()
|
# Generated by Django 2.1 on 2019-02-14 14:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('report_builder', '0006_auto_20180413_0747'),
]
operations = [
migrations.AlterField(
model_name='filterfield',
name='filter_value',
field=models.CharField(blank=True, max_length=2000),
),
]
|
from pyautogui import *
import pyautogui
import time
import keyboard
import random
import win32api, win32con
px = 0
py = 0
class Tile():
def __init__(self,px,py):
self.x = px
self.y = py
def click(x,y):
win32api.SetCursorPos((x,y))
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN,0,0)
time.sleep(0.1)
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP,0,0)
tiles = int(input("How many tiles?\n"))
tilesp = []
for tile in range(tiles):
print("Press Q on tile " + str(tile))
while True:
if keyboard.read_key() == "q":
px = position().x
py = position().y
tilesp.append(Tile(px,py))
time.sleep(0.1)
break
print("Press Q on the color we want to snipe")
while True:
if keyboard.read_key() == "q":
px = position().x
py = position().y
color = pyautogui.pixel(px, py)
time.sleep(0.1)
break
print(color)
print("Hold K to snipe or J to reverse snipe! - Press P to stop the bot")
toggle = False
while True:
#if keyboard.read_key() == "l":
# toggle = not toggle
# print(toggle)
# time.sleep(0.1)
#if toggle:
# for tile in tilesp:
# if pyautogui.pixel(tile.x,tile.y) == color:
# click(tile.x,tile.y)
#else:
if keyboard.is_pressed("k") == True:
for tile in tilesp:
if pyautogui.pixel(tile.x,tile.y) == color:
click(tile.x,tile.y)
if keyboard.is_pressed("j") == True:
for tile in tilesp:
if pyautogui.pixel(tile.x,tile.y) != color:
click(tile.x,tile.y)
if keyboard.is_pressed("p") == True:
print("Exiting!")
break
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 10
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_9_0_0
from isi_sdk_9_0_0.models.cluster_node_hardware import ClusterNodeHardware # noqa: E501
from isi_sdk_9_0_0.rest import ApiException
class TestClusterNodeHardware(unittest.TestCase):
"""ClusterNodeHardware unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testClusterNodeHardware(self):
"""Test ClusterNodeHardware"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_9_0_0.models.cluster_node_hardware.ClusterNodeHardware() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Copyright (c) 2019-2021 Xenios SEZC
# https://www.veriblock.org
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC help output."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
import os
class HelpRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.test_categories()
self.dump_help()
def test_categories(self):
node = self.nodes[0]
# wrong argument count
assert_raises_rpc_error(-1, 'help', node.help, 'foo', 'bar')
# invalid argument
assert_raises_rpc_error(-1, 'JSON value is not a string as expected', node.help, 0)
# help of unknown command
assert_equal(node.help('foo'), 'help: unknown command: foo')
# command titles
titles = [line[3:-3] for line in node.help().splitlines() if line.startswith('==')]
components = ['Blockchain', 'Control', 'Generating', 'Mining', 'Network', 'Pop_mining', 'Rawtransactions', 'Util']
if self.is_wallet_compiled():
components.append('Wallet')
if self.is_zmq_compiled():
components.append('Zmq')
assert_equal(titles, components)
def dump_help(self):
dump_dir = os.path.join(self.options.tmpdir, 'rpc_help_dump')
os.mkdir(dump_dir)
calls = [line.split(' ', 1)[0] for line in self.nodes[0].help().splitlines() if line and not line.startswith('==')]
for call in calls:
with open(os.path.join(dump_dir, call), 'w', encoding='utf-8') as f:
# Make sure the node can generate the help at runtime without crashing
f.write(self.nodes[0].help(call))
if __name__ == '__main__':
HelpRpcTest().main()
|
from importlib import import_module
import re
from copy import deepcopy
from ..utils.data_info import MixinInfo
from .column import Column
from .table import Table, QTable, has_info_class
from ..units.quantity import QuantityInfo
__construct_mixin_classes = ('astropy.time.core.Time',
'astropy.time.core.TimeDelta',
'astropy.units.quantity.Quantity',
'astropy.coordinates.angles.Latitude',
'astropy.coordinates.angles.Longitude',
'astropy.coordinates.angles.Angle',
'astropy.coordinates.distances.Distance',
'astropy.coordinates.earth.EarthLocation',
'astropy.coordinates.sky_coordinate.SkyCoord',
'astropy.table.table.NdarrayMixin')
class SerializedColumn(dict):
"""
Subclass of dict that is a used in the representation to contain the name
(and possible other info) for a mixin attribute (either primary data or an
array-like attribute) that is serialized as a column in the table.
Normally contains the single key ``name`` with the name of the column in the
table.
"""
pass
def _represent_mixin_as_column(col, name, new_cols, mixin_cols,
exclude_classes=()):
"""Convert a mixin column to a plain columns or a set of mixin columns."""
# If not a mixin, or if class in ``exclude_classes`` tuple then
# treat as a normal column. Excluded sub-classes must be explicitly
# specified.
if not has_info_class(col, MixinInfo) or col.__class__ in exclude_classes:
new_cols.append(col)
return
# Subtlety here is handling mixin info attributes. The basic list of such
# attributes is: 'name', 'unit', 'dtype', 'format', 'description', 'meta'.
# - name: handled directly [DON'T store]
# - unit: DON'T store if this is a parent attribute
# - dtype: captured in plain Column if relevant [DON'T store]
# - format: possibly irrelevant but settable post-object creation [DO store]
# - description: DO store
# - meta: DO store
info = {}
for attr, nontrivial, xform in (('unit', lambda x: x not in (None, ''), str),
('format', lambda x: x is not None, None),
('description', lambda x: x is not None, None),
('meta', lambda x: x, None)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = xform(col_attr) if xform else col_attr
obj_attrs = col.info._represent_as_dict()
ordered_keys = col.info._represent_as_dict_attrs
data_attrs = [key for key in ordered_keys if key in obj_attrs and
getattr(obj_attrs[key], 'shape', ())[:1] == col.shape[:1]]
for data_attr in data_attrs:
data = obj_attrs[data_attr]
if len(data_attrs) == 1 and not has_info_class(data, MixinInfo):
# For one non-mixin attribute, we need only one serialized column.
# We can store info there, and keep the column name as is.
new_cols.append(Column(data, name=name, **info))
obj_attrs[data_attr] = SerializedColumn({'name': name})
# Remove attributes that are already on the serialized column.
for attr in info:
if attr in obj_attrs:
del obj_attrs[attr]
else:
# New column name combines the old name and attribute
# (e.g. skycoord.ra, skycoord.dec).
new_name = name + '.' + data_attr
# TODO masking, MaskedColumn
if not has_info_class(data, MixinInfo):
new_cols.append(Column(data, name=new_name))
obj_attrs[data_attr] = SerializedColumn({'name': new_name})
else:
# recurse. This will define obj_attrs[new_name].
_represent_mixin_as_column(data, new_name, new_cols, obj_attrs)
obj_attrs[data_attr] = SerializedColumn(obj_attrs.pop(new_name))
# Strip out from info any attributes defined by the parent
for attr in col.info.attrs_from_parent:
if attr in info:
del info[attr]
if info:
obj_attrs['__info__'] = info
# Store the fully qualified class name
obj_attrs['__class__'] = col.__module__ + '.' + col.__class__.__name__
mixin_cols[name] = obj_attrs
def _represent_mixins_as_columns(tbl, exclude_classes=()):
"""
Convert any mixin columns to plain Column or MaskedColumn and
return a new table. Exclude any mixin columns in ``exclude_classes``,
which must be a tuple of classes.
"""
if not tbl.has_mixin_columns:
return tbl
mixin_cols = {}
new_cols = []
for col in tbl.itercols():
_represent_mixin_as_column(col, col.info.name, new_cols, mixin_cols,
exclude_classes=exclude_classes)
meta = deepcopy(tbl.meta)
meta['__serialized_columns__'] = mixin_cols
out = Table(new_cols, meta=meta, copy=False)
return out
def _construct_mixin_from_obj_attrs_and_info(obj_attrs, info):
cls_full_name = obj_attrs.pop('__class__')
# If this is a supported class then import the class and run
# the _construct_from_col method. Prevent accidentally running
# untrusted code by only importing known astropy classes.
if cls_full_name not in __construct_mixin_classes:
raise ValueError('unsupported class for construct {}'.format(cls_full_name))
mod_name, cls_name = re.match(r'(.+)\.(\w+)', cls_full_name).groups()
module = import_module(mod_name)
cls = getattr(module, cls_name)
for attr, value in info.items():
if attr in cls.info.attrs_from_parent:
obj_attrs[attr] = value
mixin = cls.info._construct_from_dict(obj_attrs)
for attr, value in info.items():
if attr not in obj_attrs:
setattr(mixin.info, attr, value)
return mixin
def _construct_mixin_from_columns(new_name, obj_attrs, out):
data_attrs_map = {}
for name, val in obj_attrs.items():
if isinstance(val, SerializedColumn):
if 'name' in val:
data_attrs_map[val['name']] = name
else:
_construct_mixin_from_columns(name, val, out)
data_attrs_map[name] = name
for name in data_attrs_map.values():
del obj_attrs[name]
# Get the index where to add new column
idx = min(out.colnames.index(name) for name in data_attrs_map)
# Name is the column name in the table (e.g. "coord.ra") and
# data_attr is the object attribute name (e.g. "ra"). A different
# example would be a formatted time object that would have (e.g.)
# "time_col" and "value", respectively.
for name, data_attr in data_attrs_map.items():
col = out[name]
obj_attrs[data_attr] = col
del out[name]
info = obj_attrs.pop('__info__', {})
if len(data_attrs_map) == 1:
# col is the first and only serialized column; in that case, use info
# stored on the column.
for attr, nontrivial in (('unit', lambda x: x not in (None, '')),
('format', lambda x: x is not None),
('description', lambda x: x is not None),
('meta', lambda x: x)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = col_attr
info['name'] = new_name
col = _construct_mixin_from_obj_attrs_and_info(obj_attrs, info)
out.add_column(col, index=idx)
def _construct_mixins_from_columns(tbl):
if '__serialized_columns__' not in tbl.meta:
return tbl
# Don't know final output class but assume QTable so no columns get
# downgraded.
out = QTable(tbl, copy=False)
mixin_cols = out.meta.pop('__serialized_columns__')
for new_name, obj_attrs in mixin_cols.items():
_construct_mixin_from_columns(new_name, obj_attrs, out)
# If no quantity subclasses are in the output then output as Table.
# For instance ascii.read(file, format='ecsv') doesn't specify an
# output class and should return the minimal table class that
# represents the table file.
has_quantities = any(isinstance(col.info, QuantityInfo)
for col in out.itercols())
if not has_quantities:
out = Table(out, copy=False)
return out
|
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('posts.urls')),
path('accounts/', include('accounts.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 - 2019 Karlsruhe Institute of Technology - Steinbuch Centre for Computing
# This code is distributed under the MIT License
# Please, see the LICENSE file
#
"""
Created on Sat Aug 10 08:47:51 2019
@author: vykozlov
"""
import unittest
import semseg_vaihingen.models.deepaas_api as deepaas_api
class TestModelMethods(unittest.TestCase):
def setUp(self):
self.meta = deepaas_api.get_metadata()
def test_model_metadata_type(self):
"""
Test that get_metadata() returns dict
"""
self.assertTrue(type(self.meta) is dict)
def test_model_metadata_values(self):
"""
Test that get_metadata() returns right values (subset)
"""
self.assertEqual(self.meta['Name'].replace('-','_'),
'semseg_vaihingen'.replace('-','_'))
self.assertEqual(self.meta['Author'], 'G.Cavallaro (FZJ), M.Goetz (KIT), V.Kozlov (KIT), A.Grupp (KIT)')
self.assertEqual(self.meta['Author-email'], 'valentin.kozlov@kit.edu')
if __name__ == '__main__':
unittest.main()
|
"""
Copyright 2013 Steven Diamond, 2017 Akshay Agrawal
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy import settings as s
from cvxpy import error
from cvxpy.problems.objective import Minimize, Maximize
from cvxpy.reductions.chain import Chain
from cvxpy.reductions.dgp2dcp.dgp2dcp import Dgp2Dcp
from cvxpy.reductions.dqcp2dcp import dqcp2dcp
from cvxpy.reductions.eval_params import EvalParams
from cvxpy.reductions.flip_objective import FlipObjective
from cvxpy.reductions.solvers.solving_chain import construct_solving_chain
from cvxpy.interface.matrix_utilities import scalar_value
from cvxpy.reductions.solvers import bisection
from cvxpy.reductions.solvers import defines as slv_def
from cvxpy.utilities.deterministic import unique_list
import cvxpy.utilities.performance_utils as perf
from cvxpy.constraints import Equality, Inequality, NonPos, Zero, NonNeg
import cvxpy.utilities as u
from collections import namedtuple
import numpy as np
import time
SolveResult = namedtuple(
'SolveResult',
['opt_value', 'status', 'primal_values', 'dual_values'])
class Cache(object):
def __init__(self):
self.key = None
self.solving_chain = None
self.param_prog = None
self.inverse_data = None
def invalidate(self):
self.key = None
self.solving_chain = None
self.param_prog = None
self.inverse_data = None
def make_key(self, solver, gp):
return (solver, gp)
def gp(self):
return self.key is not None and self.key[1]
class Problem(u.Canonical):
"""A convex optimization problem.
Problems are immutable, save for modification through the specification
of :class:`~cvxpy.expressions.constants.parameters.Parameter`
Arguments
---------
objective : Minimize or Maximize
The problem's objective.
constraints : list
The constraints on the problem variables.
"""
# The solve methods available.
REGISTERED_SOLVE_METHODS = {}
def __init__(self, objective, constraints=None):
if constraints is None:
constraints = []
# Check that objective is Minimize or Maximize.
if not isinstance(objective, (Minimize, Maximize)):
raise error.DCPError("Problem objective must be Minimize or Maximize.")
# Constraints and objective are immutable.
self._objective = objective
self._constraints = [c for c in constraints]
self._value = None
self._status = None
self._solution = None
self._cache = Cache()
self._solver_cache = {}
# Information about the shape of the problem and its constituent parts
self._size_metrics = None
# Benchmarks reported by the solver:
self._solver_stats = None
self.args = [self._objective, self._constraints]
@property
def value(self):
"""float : The value from the last time the problem was solved
(or None if not solved).
"""
if self._value is None:
return None
else:
return scalar_value(self._value)
@property
def status(self):
"""str : The status from the last time the problem was solved; one
of optimal, infeasible, or unbounded (with or without
suffix inaccurate).
"""
return self._status
@property
def solution(self):
"""Solution : The solution from the last time the problem was solved.
"""
return self._solution
@property
def objective(self):
"""Minimize or Maximize : The problem's objective.
Note that the objective cannot be reassigned after creation,
and modifying the objective after creation will result in
undefined behavior.
"""
return self._objective
@property
def constraints(self):
"""A shallow copy of the problem's constraints.
Note that constraints cannot be reassigned, appended to, or otherwise
modified after creation, except through parameters.
"""
return self._constraints[:]
@perf.compute_once
def is_dcp(self, dpp=False):
"""Does the problem satisfy DCP rules?
Arguments
---------
dpp : bool, optional
If True, enforce the disciplined parametrized programming (DPP)
ruleset; only relevant when the problem involves Parameters.
DPP is a mild restriction of DCP. When a problem involving
Parameters is DPP, subsequent solves can be much faster than
the first one. For more information, consult the documentation at
https://www.cvxpy.org/tutorial/advanced/index.html#disciplined-parametrized-programming
Returns
-------
bool
True if the Expression is DCP, False otherwise.
"""
return all(
expr.is_dcp(dpp) for expr in self.constraints + [self.objective])
@perf.compute_once
def is_dgp(self, dpp=False):
"""Does the problem satisfy DGP rules?
Arguments
---------
dpp : bool, optional
If True, enforce the disciplined parametrized programming (DPP)
ruleset; only relevant when the problem involves Parameters.
DPP is a mild restriction of DGP. When a problem involving
Parameters is DPP, subsequent solves can be much faster than
the first one. For more information, consult the documentation at
https://www.cvxpy.org/tutorial/advanced/index.html#disciplined-parametrized-programming
Returns
-------
bool
True if the Expression is DGP, False otherwise.
"""
return all(
expr.is_dgp(dpp) for expr in self.constraints + [self.objective])
@perf.compute_once
def is_dqcp(self):
"""Does the problem satisfy the DQCP rules?
"""
return all(
expr.is_dqcp() for expr in self.constraints + [self.objective])
@perf.compute_once
def is_dpp(self, context='dcp'):
"""Does the problem satisfy DPP rules?
DPP is a mild restriction of DGP. When a problem involving
Parameters is DPP, subsequent solves can be much faster than
the first one. For more information, consult the documentation at
https://www.cvxpy.org/tutorial/advanced/index.html#disciplined-parametrized-programming
Arguments
---------
context : str
Whether to check DPP-compliance for DCP or DGP; ``context`` should
be either ``'dcp'`` or ``'dgp'``. Calling ``problem.is_dpp('dcp')``
is equivalent to ``problem.is_dcp(dpp=True)``, and
`problem.is_dpp('dgp')`` is equivalent to
`problem.is_dgp(dpp=True)`.
Returns
-------
bool
Whether the problem satisfies the DPP rules.
"""
if context.lower() == 'dcp':
return self.is_dcp(dpp=True)
elif context.lower() == 'dgp':
return self.is_dgp(dpp=True)
else:
raise ValueError("Unsupported context ", context)
@perf.compute_once
def is_qp(self):
"""Is problem a quadratic program?
"""
for c in self.constraints:
if not (isinstance(c, (Equality, Zero)) or c.args[0].is_pwl()):
return False
for var in self.variables():
if var.is_psd() or var.is_nsd():
return False
return (self.is_dcp() and self.objective.args[0].is_qpwa())
@perf.compute_once
def is_mixed_integer(self):
return any(v.attributes['boolean'] or v.attributes['integer']
for v in self.variables())
@perf.compute_once
def variables(self):
"""Accessor method for variables.
Returns
-------
list of :class:`~cvxpy.expressions.variable.Variable`
A list of the variables in the problem.
"""
vars_ = self.objective.variables()
for constr in self.constraints:
vars_ += constr.variables()
return unique_list(vars_)
@perf.compute_once
def parameters(self):
"""Accessor method for parameters.
Returns
-------
list of :class:`~cvxpy.expressions.constants.parameter.Parameter`
A list of the parameters in the problem.
"""
params = self.objective.parameters()
for constr in self.constraints:
params += constr.parameters()
return unique_list(params)
@perf.compute_once
def constants(self):
"""Accessor method for constants.
Returns
-------
list of :class:`~cvxpy.expressions.constants.constant.Constant`
A list of the constants in the problem.
"""
const_dict = {}
constants_ = self.objective.constants()
for constr in self.constraints:
constants_ += constr.constants()
# Note that numpy matrices are not hashable, so we use the built-in
# function "id"
const_dict = {id(constant): constant for constant in constants_}
return list(const_dict.values())
def atoms(self):
"""Accessor method for atoms.
Returns
-------
list of :class:`~cvxpy.atoms.Atom`
A list of the atom types in the problem; note that this list
contains classes, not instances.
"""
atoms = self.objective.atoms()
for constr in self.constraints:
atoms += constr.atoms()
return unique_list(atoms)
@property
def size_metrics(self):
""":class:`~cvxpy.problems.problem.SizeMetrics` : Information about the problem's size.
"""
if self._size_metrics is None:
self._size_metrics = SizeMetrics(self)
return self._size_metrics
@property
def solver_stats(self):
""":class:`~cvxpy.problems.problem.SolverStats` : Information returned by the solver.
"""
return self._solver_stats
def solve(self, *args, **kwargs):
"""Solves the problem using the specified method.
Populates the :code:`status` and :code:`value` attributes on the
problem object as a side-effect.
Arguments
---------
solver : str, optional
The solver to use. For example, 'ECOS', 'SCS', or 'OSQP'.
verbose : bool, optional
Overrides the default of hiding solver output.
gp : bool, optional
If True, parses the problem as a disciplined geometric program
instead of a disciplined convex program.
qcp : bool, optional
If True, parses the problem as a disciplined quasiconvex program
instead of a disciplined convex program.
requires_grad : bool, optional
Makes it possible to compute gradients of a solution with respect to
Parameters by calling ``problem.backward()`` after solving, or to
compute perturbations to the variables given perturbations to Parameters by
calling ``problem.derivative()``.
Gradients are only supported for DCP and DGP problems, not
quasiconvex problems. When computing gradients (i.e., when
this argument is True), the problem must satisfy the DPP rules.
enforce_dpp : bool, optional
When True, a DPPError will be thrown when trying to solve a non-DPP
problem (instead of just a warning). Only relevant for problems
involving Parameters. Defaults to False.
method : function, optional
A custom solve method to use.
kwargs : keywords, optional
Additional solver specific arguments. See Notes below.
Notes
------
CVXPY interfaces with a wide range of solvers; the algorithms used by these solvers
have arguments relating to stopping criteria, and strategies to improve solution quality.
There is no one choice of arguments which is perfect for every problem. If you are not
getting satisfactory results from a solver, you can try changing its arguments. The
exact way this is done depends on the specific solver. Here are some examples:
prob.solve(solver='ECOS', abstol=1e-6)
prob.solve(solver='OSQP', max_iter=10000).
mydict = {"MSK_DPAR_INTPNT_CO_TOL_NEAR_REL": 10}
prob.solve(solver='MOSEK', mosek_params=mydict).
You should refer to CVXPY's web documentation for details on how to pass solver
solver arguments, available at
https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options
Returns
-------
float
The optimal value for the problem, or a string indicating
why the problem could not be solved.
Raises
------
cvxpy.error.DCPError
Raised if the problem is not DCP and `gp` is False.
cvxpy.error.DGPError
Raised if the problem is not DGP and `gp` is True.
cvxpy.error.SolverError
Raised if no suitable solver exists among the installed solvers,
or if an unanticipated error is encountered.
"""
func_name = kwargs.pop("method", None)
if func_name is not None:
solve_func = Problem.REGISTERED_SOLVE_METHODS[func_name]
else:
solve_func = Problem._solve
return solve_func(self, *args, **kwargs)
@classmethod
def register_solve(cls, name, func):
"""Adds a solve method to the Problem class.
Arguments
---------
name : str
The keyword for the method.
func : function
The function that executes the solve method. This function must
take as its first argument the problem instance to solve.
"""
cls.REGISTERED_SOLVE_METHODS[name] = func
def get_problem_data(self, solver, gp=False, enforce_dpp=False):
"""Returns the problem data used in the call to the solver.
When a problem is solved, CVXPY creates a chain of reductions enclosed
in a :class:`~cvxpy.reductions.solvers.solving_chain.SolvingChain`,
and compiles it to some low-level representation that is
compatible with the targeted solver. This method returns that low-level
representation.
For some solving chains, this low-level representation is a dictionary
that contains exactly those arguments that were supplied to the solver;
however, for other solving chains, the data is an intermediate
representation that is compiled even further by the solver interfaces.
A solution to the equivalent low-level problem can be obtained via the
data by invoking the `solve_via_data` method of the returned solving
chain, a thin wrapper around the code external to CVXPY that further
processes and solves the problem. Invoke the unpack_results method
to recover a solution to the original problem.
For example:
::
objective = ...
constraints = ...
problem = cp.Problem(objective, constraints)
data, chain, inverse_data = problem.get_problem_data(cp.SCS)
# calls SCS using `data`
soln = chain.solve_via_data(problem, data)
# unpacks the solution returned by SCS into `problem`
problem.unpack_results(soln, chain, inverse_data)
Alternatively, the `data` dictionary returned by this method
contains enough information to bypass CVXPY and call the solver
directly.
For example:
::
problem = cp.Problem(objective, constraints)
data, _, _ = problem.get_problem_data(cp.SCS)
import scs
probdata = {
'A': data['A'],
'b': data['b'],
'c': data['c'],
}
cone_dims = data['dims']
cones = {
"f": cone_dims.zero,
"l": cone_dims.nonpos,
"q": cone_dims.soc,
"ep": cone_dims.exp,
"s": cone_dims.psd,
}
soln = scs.solve(data, cones)
The structure of the data dict that CVXPY returns depends on the
solver. For details, consult the solver interfaces in
`cvxpy/reductions/solvers`.
Arguments
---------
solver : str
The solver the problem data is for.
gp : bool, optional
If True, then parses the problem as a disciplined geometric program
instead of a disciplined convex program.
enforce_dpp : bool, optional
When True, a DPPError will be thrown when trying to parse a non-DPP
problem (instead of just a warning). Defaults to False.
Returns
-------
dict or object
lowest level representation of problem
SolvingChain
The solving chain that created the data.
list
The inverse data generated by the chain.
"""
key = self._cache.make_key(solver, gp)
if key != self._cache.key:
self._cache.invalidate()
solving_chain = self._construct_chain(
solver=solver, gp=gp, enforce_dpp=enforce_dpp)
self._cache.key = key
self._cache.solving_chain = solving_chain
self._solver_cache = {}
else:
solving_chain = self._cache.solving_chain
if self._cache.param_prog is not None:
# fast path, bypasses application of reductions
if gp:
dgp2dcp = self._cache.solving_chain.get(Dgp2Dcp)
# Parameters in the param cone prog are the logs
# of parameters in the original problem (with one exception:
# parameters appearing as exponents (in power and gmatmul
# atoms) are unchanged.
old_params_to_new_params = dgp2dcp.canon_methods._parameters
for param in self.parameters():
if param in old_params_to_new_params:
old_params_to_new_params[param].value = np.log(
param.value)
data, solver_inverse_data = solving_chain.solver.apply(
self._cache.param_prog)
inverse_data = self._cache.inverse_data + [solver_inverse_data]
else:
data, inverse_data = solving_chain.apply(self)
safe_to_cache = (
isinstance(data, dict)
and s.PARAM_PROB in data
and not any(isinstance(reduction, EvalParams)
for reduction in solving_chain.reductions)
)
if safe_to_cache:
self._cache.param_prog = data[s.PARAM_PROB]
# the last datum in inverse_data corresponds to the solver,
# so we shouldn't cache it
self._cache.inverse_data = inverse_data[:-1]
return data, solving_chain, inverse_data
def _find_candidate_solvers(self,
solver=None,
gp=False):
"""
Find candiate solvers for the current problem. If solver
is not None, it checks if the specified solver is compatible
with the problem passed.
Arguments
---------
solver : string
The name of the solver with which to solve the problem. If no
solver is supplied (i.e., if solver is None), then the targeted
solver may be any of those that are installed. If the problem
is variable-free, then this parameter is ignored.
gp : bool
If True, the problem is parsed as a Disciplined Geometric Program
instead of as a Disciplined Convex Program.
Returns
-------
dict
A dictionary of compatible solvers divided in `qp_solvers`
and `conic_solvers`.
Raises
------
cvxpy.error.SolverError
Raised if the problem is not DCP and `gp` is False.
cvxpy.error.DGPError
Raised if the problem is not DGP and `gp` is True.
"""
candidates = {'qp_solvers': [],
'conic_solvers': []}
if solver is not None:
if solver not in slv_def.INSTALLED_SOLVERS:
raise error.SolverError("The solver %s is not installed." % solver)
if solver in slv_def.CONIC_SOLVERS:
candidates['conic_solvers'] += [solver]
if solver in slv_def.QP_SOLVERS:
candidates['qp_solvers'] += [solver]
else:
candidates['qp_solvers'] = [s for s in slv_def.INSTALLED_SOLVERS
if s in slv_def.QP_SOLVERS]
candidates['conic_solvers'] = [s for s in slv_def.INSTALLED_SOLVERS
if s in slv_def.CONIC_SOLVERS]
# If gp we must have only conic solvers
if gp:
if solver is not None and solver not in slv_def.CONIC_SOLVERS:
raise error.SolverError(
"When `gp=True`, `solver` must be a conic solver "
"(received '%s'); try calling " % solver +
" `solve()` with `solver=cvxpy.ECOS`."
)
elif solver is None:
candidates['qp_solvers'] = [] # No QP solvers allowed
if self.is_mixed_integer():
if len(slv_def.INSTALLED_MI_SOLVERS) == 0:
msg = """
CVXPY needs additional software (a `mixed-integer solver`) to handle this model.
The web documentation
https://www.cvxpy.org/tutorial/advanced/index.html#mixed-integer-programs
reviews open-source and commercial options for mixed-integer solvers.
Quick fix: if you install the python package CVXOPT (pip install cvxopt),
then CVXPY can use the open-source mixed-integer solver `GLPK`.
"""
raise error.SolverError(msg)
candidates['qp_solvers'] = [
s for s in candidates['qp_solvers']
if slv_def.SOLVER_MAP_QP[s].MIP_CAPABLE]
candidates['conic_solvers'] = [
s for s in candidates['conic_solvers']
if slv_def.SOLVER_MAP_CONIC[s].MIP_CAPABLE]
if not candidates['conic_solvers'] and \
not candidates['qp_solvers']:
raise error.SolverError(
"Problem is mixed-integer, but candidate "
"QP/Conic solvers (%s) are not MIP-capable." %
(candidates['qp_solvers'] +
candidates['conic_solvers']))
return candidates
def _construct_chain(self, solver=None, gp=False, enforce_dpp=False):
"""
Construct the chains required to reformulate and solve the problem.
In particular, this function
# finds the candidate solvers
# constructs the solving chain that performs the
numeric reductions and solves the problem.
Arguments
---------
solver : str, optional
The solver to use. Defaults to ECOS.
gp : bool, optional
If True, the problem is parsed as a Disciplined Geometric Program
instead of as a Disciplined Convex Program.
enforce_dpp : bool, optional
Whether to error on DPP violations.
Returns
-------
A solving chain
"""
candidate_solvers = self._find_candidate_solvers(solver=solver, gp=gp)
return construct_solving_chain(self, candidate_solvers, gp=gp,
enforce_dpp=enforce_dpp)
def _invalidate_cache(self):
self._cache_key = None
self._solving_chain = None
self._param_prog = None
self._inverse_data = None
def _solve(self,
solver=None,
warm_start=True,
verbose=False,
gp=False, qcp=False, requires_grad=False, enforce_dpp=False, **kwargs):
"""Solves a DCP compliant optimization problem.
Saves the values of primal and dual variables in the variable
and constraint objects, respectively.
Arguments
---------
solver : str, optional
The solver to use. Defaults to ECOS.
warm_start : bool, optional
Should the previous solver result be used to warm start?
verbose : bool, optional
Overrides the default of hiding solver output.
gp : bool, optional
If True, parses the problem as a disciplined geometric program.
qcp : bool, optional
If True, parses the problem as a disciplined quasiconvex program.
requires_grad : bool, optional
Makes it possible to compute gradients with respect to
parameters by calling `backward()` after solving, or to compute
perturbations to the variables by calling `derivative()`. When
True, the solver must be SCS, and dqcp must be False.
A DPPError is thrown when problem is not DPP.
enforce_dpp : bool, optional
When True, a DPPError will be thrown when trying to solve a non-DPP
problem (instead of just a warning). Defaults to False.
kwargs : dict, optional
A dict of options that will be passed to the specific solver.
In general, these options will override any default settings
imposed by cvxpy.
Returns
-------
float
The optimal value for the problem, or a string indicating
why the problem could not be solved.
"""
for parameter in self.parameters():
if parameter.value is None:
raise error.ParameterError(
"A Parameter (whose name is '%s') does not have a value "
"associated with it; all Parameter objects must have "
"values before solving a problem." % parameter.name())
if requires_grad:
dpp_context = 'dgp' if gp else 'dcp'
if qcp:
raise ValueError("Cannot compute gradients of DQCP problems.")
elif not self.is_dpp(dpp_context):
raise error.DPPError("Problem is not DPP (when requires_grad "
"is True, problem must be DPP).")
elif solver is not None and solver not in [s.SCS, s.DIFFCP]:
raise ValueError("When requires_grad is True, the only "
"supported solver is SCS "
"(received %s)." % solver)
elif s.DIFFCP not in slv_def.INSTALLED_SOLVERS:
raise ImportError(
"The Python package diffcp must be installed to "
"differentiate through problems. Please follow the "
"installation instructions at "
"https://github.com/cvxgrp/diffcp")
else:
solver = s.DIFFCP
else:
if gp and qcp:
raise ValueError("At most one of `gp` and `qcp` can be True.")
if qcp and not self.is_dcp():
if not self.is_dqcp():
raise error.DQCPError("The problem is not DQCP.")
reductions = [dqcp2dcp.Dqcp2Dcp()]
if type(self.objective) == Maximize:
reductions = [FlipObjective()] + reductions
chain = Chain(problem=self, reductions=reductions)
soln = bisection.bisect(
chain.reduce(), solver=solver, verbose=verbose, **kwargs)
self.unpack(chain.retrieve(soln))
return self.value
data, solving_chain, inverse_data = self.get_problem_data(
solver, gp, enforce_dpp)
solution = solving_chain.solve_via_data(
self, data, warm_start, verbose, kwargs)
self.unpack_results(solution, solving_chain, inverse_data)
return self.value
def backward(self):
"""Compute the gradient of a solution with respect to Parameters.
This method differentiates through the solution map of the problem,
obtaining the gradient of a solution with respect to the Parameters.
In other words, it calculates the sensitivities of the Parameters
with respect to perturbations in the optimal Variable values. This
can be useful for integrating CVXPY into automatic differentation
toolkits.
``backward()`` populates the ``gradient`` attribute of each Parameter
in the problem as a side-effect. It can only be called after calling
``solve()`` with ``requires_grad=True``.
Below is a simple example:
::
import cvxpy as cp
import numpy as np
p = cp.Parameter()
x = cp.Variable()
quadratic = cp.square(x - 2 * p)
problem = cp.Problem(cp.Minimize(quadratic), [x >= 0])
p.value = 3.0
problem.solve(requires_grad=True, eps=1e-10)
# backward() populates the gradient attribute of the parameters
problem.backward()
# Because x* = 2 * p, dx*/dp = 2
np.testing.assert_allclose(p.gradient, 2.0)
In the above example, the gradient could easily be computed by hand.
The ``backward()`` is useful because for almost all problems, the
gradient cannot be computed analytically.
This method can be used to differentiate through any DCP or DGP
problem, as long as the problem is DPP compliant (i.e.,
``problem.is_dcp(dpp=True)`` or ``problem.is_dgp(dpp=True)`` evaluates to
``True``).
This method uses the chain rule to evaluate the gradients of a
scalar-valued function of the Variables with respect to the Parameters.
For example, let x be a variable and p a Parameter; x and p might be
scalars, vectors, or matrices. Let f be a scalar-valued function, with
z = f(x). Then this method computes dz/dp = (dz/dx) (dx/p). dz/dx
is chosen as the all-ones vector by default, corresponding to
choosing f to be the sum function. You can specify a custom value for
dz/dx by setting the ``gradient`` attribute on your variables. For example,
::
import cvxpy as cp
import numpy as np
b = cp.Parameter()
x = cp.Variable()
quadratic = cp.square(x - 2 * b)
problem = cp.Problem(cp.Minimize(quadratic), [x >= 0])
b.value = 3.
problem.solve(requires_grad=True, eps=1e-10)
x.gradient = 4.
problem.backward()
# dz/dp = dz/dx dx/dp = 4. * 2. == 8.
np.testing.assert_allclose(b.gradient, 8.)
The ``gradient`` attribute on a variable can also be interpreted as a
perturbation to its optimal value.
Raises
------
ValueError
if solve was not called with ``requires_grad=True``
SolverError
if the problem is infeasible or unbounded
"""
if s.DIFFCP not in self._solver_cache:
raise ValueError("backward can only be called after calling "
"solve with `requires_grad=True`")
elif self.status not in s.SOLUTION_PRESENT:
raise error.SolverError("Backpropagating through "
"infeasible/unbounded problems is not "
"yet supported. Please file an issue on "
"Github if you need this feature.")
# TODO(akshayka): Backpropagate through dual variables as well.
backward_cache = self._solver_cache[s.DIFFCP]
DT = backward_cache["DT"]
zeros = np.zeros(backward_cache["s"].shape)
del_vars = {}
gp = self._cache.gp()
for variable in self.variables():
if variable.gradient is None:
del_vars[variable.id] = np.ones(variable.shape)
else:
del_vars[variable.id] = np.asarray(variable.gradient,
dtype=np.float64)
if gp:
# x_gp = exp(x_cone_program),
# dx_gp/d x_cone_program = exp(x_cone_program) = x_gp
del_vars[variable.id] *= variable.value
dx = self._cache.param_prog.split_adjoint(del_vars)
start = time.time()
dA, db, dc = DT(dx, zeros, zeros)
end = time.time()
backward_cache['DT_TIME'] = end - start
dparams = self._cache.param_prog.apply_param_jac(dc, -dA, db)
if not gp:
for param in self.parameters():
param.gradient = dparams[param.id]
else:
dgp2dcp = self._cache.solving_chain.get(Dgp2Dcp)
old_params_to_new_params = dgp2dcp.canon_methods._parameters
for param in self.parameters():
# Note: if param is an exponent in a power or gmatmul atom,
# then the parameter passes through unchanged to the DCP
# program; if the param is also used elsewhere (not as an
# exponent), then param will also be in
# old_params_to_new_params. Therefore, param.gradient =
# dparams[param.id] (or 0) + 1/param*dparams[new_param.id]
#
# Note that param.id is in dparams if and only if
# param was used as an exponent (because this means that
# the parameter entered the DCP problem unchanged.)
grad = 0.0 if param.id not in dparams else dparams[param.id]
if param in old_params_to_new_params:
new_param = old_params_to_new_params[param]
# new_param.value == log(param), apply chain rule
grad += (1.0 / param.value) * dparams[new_param.id]
param.gradient = grad
def derivative(self):
"""Apply the derivative of the solution map to perturbations in the Parameters
This method applies the derivative of the solution map to perturbations
in the Parameters to obtain perturbations in the optimal values of the
Variables. In other words, it tells you how the optimal values of the
Variables would be changed by small changes to the Parameters.
You can specify perturbations in a Parameter by setting its ``delta``
attribute (if unspecified, the perturbation defaults to 0).
This method populates the ``delta`` attribute of the Variables as a
side-effect.
This method can only be called after calling ``solve()`` with
``requires_grad=True``. It is compatible with both DCP and DGP
problems (that are also DPP-compliant).
Below is a simple example:
::
import cvxpy as cp
import numpy as np
p = cp.Parameter()
x = cp.Variable()
quadratic = cp.square(x - 2 * p)
problem = cp.Problem(cp.Minimize(quadratic), [x >= 0])
p.value = 3.0
problem.solve(requires_grad=True, eps=1e-10)
# derivative() populates the delta attribute of the variables
problem.derivative()
p.delta = 1e-3
# Because x* = 2 * p, dx*/dp = 2, so (dx*/dp)(p.delta) == 2e-3
np.testing.assert_allclose(x.delta, 2e-3)
Raises
------
ValueError
if solve was not called with ``requires_grad=True``
SolverError
if the problem is infeasible or unbounded
"""
if s.DIFFCP not in self._solver_cache:
raise ValueError("derivative can only be called after calling "
"solve with `requires_grad=True`")
elif self.status not in s.SOLUTION_PRESENT:
raise ValueError("Differentiating through infeasible/unbounded "
"problems is not yet supported. Please file an "
"issue on Github if you need this feature.")
# TODO(akshayka): Forward differentiate dual variables as well
backward_cache = self._solver_cache[s.DIFFCP]
param_prog = self._cache.param_prog
D = backward_cache["D"]
param_deltas = {}
gp = self._cache.gp()
if gp:
dgp2dcp = self._cache.solving_chain.get(Dgp2Dcp)
if not self.parameters():
for variable in self.variables():
variable.delta = np.zeros(variable.shape)
return
for param in self.parameters():
delta = param.delta if param.delta is not None else np.zeros(param.shape)
if gp:
if param in dgp2dcp.canon_methods._parameters:
new_param_id = dgp2dcp.canon_methods._parameters[param].id
else:
new_param_id = param.id
param_deltas[new_param_id] = (
1.0/param.value * np.asarray(delta, dtype=np.float64))
if param.id in param_prog.param_id_to_col:
# here, param generated a new parameter and also
# passed through to the param cone prog unchanged
# (because it was an exponent of a power)
param_deltas[param.id] = np.asarray(delta,
dtype=np.float64)
else:
param_deltas[param.id] = np.asarray(delta, dtype=np.float64)
dc, _, dA, db = param_prog.apply_parameters(param_deltas,
zero_offset=True)
start = time.time()
dx, _, _ = D(-dA, db, dc)
end = time.time()
backward_cache['D_TIME'] = end - start
dvars = param_prog.split_solution(
dx, [v.id for v in self.variables()])
for variable in self.variables():
variable.delta = dvars[variable.id]
if gp:
# x_gp = exp(x_cone_program),
# dx_gp/d x_cone_program = exp(x_cone_program) = x_gp
variable.delta *= variable.value
def _clear_solution(self):
for v in self.variables():
v.save_value(None)
for c in self.constraints:
for dv in c.dual_variables:
dv.save_value(None)
self._value = None
self._status = None
self._solution = None
def unpack(self, solution):
"""Updates the problem state given a Solution.
Updates problem.status, problem.value and value of primal and dual
variables. If solution.status is in cvxpy.settins.ERROR, this method
is a no-op.
Arguments
_________
solution : cvxpy.Solution
A Solution object.
Raises
------
ValueError
If the solution object has an invalid status
"""
if solution.status in s.SOLUTION_PRESENT:
for v in self.variables():
v.save_value(solution.primal_vars[v.id])
for c in self.constraints:
if c.id in solution.dual_vars:
c.save_dual_value(solution.dual_vars[c.id])
elif solution.status in s.INF_OR_UNB:
for v in self.variables():
v.save_value(None)
for constr in self.constraints:
for dv in constr.dual_variables:
dv.save_value(None)
else:
raise ValueError("Cannot unpack invalid solution: %s" % solution)
self._value = solution.opt_val
self._status = solution.status
self._solution = solution
def unpack_results(self, solution, chain, inverse_data):
"""Updates the problem state given the solver results.
Updates problem.status, problem.value and value of
primal and dual variables.
Arguments
_________
solution : object
The solution returned by applying the chain to the problem
and invoking the solver on the resulting data.
chain : SolvingChain
A solving chain that was used to solve the problem.
inverse_data : list
The inverse data returned by applying the chain to the problem.
Raises
------
cvxpy.error.SolverError
If the solver failed
"""
solution = chain.invert(solution, inverse_data)
if solution.status in s.ERROR:
raise error.SolverError(
"Solver '%s' failed. " % chain.solver.name() +
"Try another solver, or solve with verbose=True for more "
"information.")
self.unpack(solution)
self._solver_stats = SolverStats(self._solution.attr,
chain.solver.name())
def __str__(self):
if len(self.constraints) == 0:
return str(self.objective)
else:
subject_to = "subject to "
lines = [str(self.objective),
subject_to + str(self.constraints[0])]
for constr in self.constraints[1:]:
lines += [len(subject_to) * " " + str(constr)]
return '\n'.join(lines)
def __repr__(self):
return "Problem(%s, %s)" % (repr(self.objective),
repr(self.constraints))
def __neg__(self):
return Problem(-self.objective, self.constraints)
def __add__(self, other):
if other == 0:
return self
elif not isinstance(other, Problem):
return NotImplemented
return Problem(self.objective + other.objective,
unique_list(self.constraints + other.constraints))
def __radd__(self, other):
if other == 0:
return self
else:
return NotImplemented
def __sub__(self, other):
if not isinstance(other, Problem):
return NotImplemented
return Problem(self.objective - other.objective,
unique_list(self.constraints + other.constraints))
def __rsub__(self, other):
if other == 0:
return -self
else:
return NotImplemented
def __mul__(self, other):
if not isinstance(other, (int, float)):
return NotImplemented
return Problem(self.objective * other, self.constraints)
__rmul__ = __mul__
def __div__(self, other):
if not isinstance(other, (int, float)):
return NotImplemented
return Problem(self.objective * (1.0 / other), self.constraints)
def is_constant(self):
return False
__truediv__ = __div__
class SolverStats(object):
"""Reports some of the miscellaneous information that is returned
by the solver after solving but that is not captured directly by
the Problem instance.
Attributes
----------
solve_time : double
The time (in seconds) it took for the solver to solve the problem.
setup_time : double
The time (in seconds) it took for the solver to setup the problem.
num_iters : int
The number of iterations the solver had to go through to find a solution.
"""
def __init__(self, results_dict, solver_name):
self.solver_name = solver_name
self.solve_time = None
self.setup_time = None
self.num_iters = None
if s.SOLVE_TIME in results_dict:
self.solve_time = results_dict[s.SOLVE_TIME]
if s.SETUP_TIME in results_dict:
self.setup_time = results_dict[s.SETUP_TIME]
if s.NUM_ITERS in results_dict:
self.num_iters = results_dict[s.NUM_ITERS]
class SizeMetrics(object):
"""Reports various metrics regarding the problem.
Attributes
----------
num_scalar_variables : integer
The number of scalar variables in the problem.
num_scalar_data : integer
The number of scalar constants and parameters in the problem. The number of
constants used across all matrices, vectors, in the problem.
Some constants are not apparent when the problem is constructed: for example,
The sum_squares expression is a wrapper for a quad_over_lin expression with a
constant 1 in the denominator.
num_scalar_eq_constr : integer
The number of scalar equality constraints in the problem.
num_scalar_leq_constr : integer
The number of scalar inequality constraints in the problem.
max_data_dimension : integer
The longest dimension of any data block constraint or parameter.
max_big_small_squared : integer
The maximum value of (big)(small)^2 over all data blocks of the problem, where
(big) is the larger dimension and (small) is the smaller dimension
for each data block.
"""
def __init__(self, problem):
# num_scalar_variables
self.num_scalar_variables = 0
for var in problem.variables():
self.num_scalar_variables += var.size
# num_scalar_data, max_data_dimension, and max_big_small_squared
self.max_data_dimension = 0
self.num_scalar_data = 0
self.max_big_small_squared = 0
for const in problem.constants()+problem.parameters():
big = 0
# Compute number of data
self.num_scalar_data += const.size
big = 1 if len(const.shape) == 0 else max(const.shape)
small = 1 if len(const.shape) == 0 else min(const.shape)
# Get max data dimension:
if self.max_data_dimension < big:
self.max_data_dimension = big
max_big_small_squared = float(big)*(float(small)**2)
if self.max_big_small_squared < max_big_small_squared:
self.max_big_small_squared = max_big_small_squared
# num_scalar_eq_constr
self.num_scalar_eq_constr = 0
for constraint in problem.constraints:
if isinstance(constraint, (Equality, Zero)):
self.num_scalar_eq_constr += constraint.expr.size
# num_scalar_leq_constr
self.num_scalar_leq_constr = 0
for constraint in problem.constraints:
if isinstance(constraint, (Inequality, NonPos, NonNeg)):
self.num_scalar_leq_constr += constraint.expr.size
|
import torch.nn as nn
import torch
import torch.nn.functional as F
from models.pointnet_util import PointNetSetAbstractionMsg,PointNetSetAbstraction,PointNetFeaturePropagation
class get_model(nn.Module):
def __init__(self, num_classes, normal_channel=False):
super(get_model, self).__init__()
if normal_channel:
additional_channel = 3
else:
additional_channel = 0
self.normal_channel = normal_channel
self.sa1 = PointNetSetAbstractionMsg(512, [0.1, 0.2, 0.4], [32, 64, 128], 3+additional_channel, [[32, 32, 64], [64, 64, 128], [64, 96, 128]])
self.sa2 = PointNetSetAbstractionMsg(128, [0.4,0.8], [64, 128], 128+128+64, [[128, 128, 256], [128, 196, 256]])
self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=512 + 3, mlp=[256, 512, 1024], group_all=True)
self.fp3 = PointNetFeaturePropagation(in_channel=1536, mlp=[256, 256])
self.fp2 = PointNetFeaturePropagation(in_channel=576, mlp=[256, 128])
self.fp1 = PointNetFeaturePropagation(in_channel=150+additional_channel, mlp=[128, 128])
self.conv1 = nn.Conv1d(128, 128, 1)
self.bn1 = nn.BatchNorm1d(128)
self.drop1 = nn.Dropout(0.5)
self.conv2 = nn.Conv1d(128, num_classes, 1)
def forward(self, xyz, cls_label):
# Set Abstraction layers
B,C,N = xyz.shape
if self.normal_channel:
l0_points = xyz
l0_xyz = xyz[:,:3,:]
else:
l0_points = xyz
l0_xyz = xyz
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
# Feature Propagation layers
l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
cls_label_one_hot = cls_label.view(B,16,1).repeat(1,1,N)
# print(cls_label_one_hot)
l0_points = self.fp1(l0_xyz, l1_xyz, torch.cat([cls_label_one_hot,l0_xyz,l0_points],1), l1_points)
# FC layers
feat = F.relu(self.bn1(self.conv1(l0_points)))
x = self.drop1(feat)
x = self.conv2(x)
x = F.log_softmax(x, dim=1)
x = x.permute(0, 2, 1)
return x, l3_points
class get_loss(nn.Module):
def __init__(self):
super(get_loss, self).__init__()
def forward(self, pred, target, trans_feat):
total_loss = F.nll_loss(pred, target)
return total_loss
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Main training loop."""
import os
import pickle
import time
import PIL.Image
import numpy as np
import tensorflow as tf
import dnnlib
import dnnlib.tflib as tflib
from dnnlib.tflib.autosummary import autosummary
from training import dataset
#----------------------------------------------------------------------------
# Select size and contents of the image snapshot grids that are exported
# periodically during training.
def setup_snapshot_image_grid(training_set):
gw = np.clip(7680 // training_set.shape[2], 7, 32)
gh = np.clip(4320 // training_set.shape[1], 4, 32)
# Unconditional.
if training_set.label_size == 0:
reals, labels = training_set.get_minibatch_np(gw * gh)
return (gw, gh), reals, labels
# Row per class.
cw, ch = (gw, 1)
nw = (gw - 1) // cw + 1
nh = (gh - 1) // ch + 1
# Collect images.
blocks = [[] for _i in range(nw * nh)]
for _iter in range(1000000):
real, label = training_set.get_minibatch_np(1)
idx = np.argmax(label[0])
while idx < len(blocks) and len(blocks[idx]) >= cw * ch:
idx += training_set.label_size
if idx < len(blocks):
blocks[idx].append((real, label))
if all(len(block) >= cw * ch for block in blocks):
break
# Layout grid.
reals = np.zeros([gw * gh] + training_set.shape, dtype=training_set.dtype)
labels = np.zeros([gw * gh, training_set.label_size], dtype=training_set.label_dtype)
for i, block in enumerate(blocks):
for j, (real, label) in enumerate(block):
x = (i % nw) * cw + j % cw
y = (i // nw) * ch + j // cw
if x < gw and y < gh:
reals[x + y * gw] = real[0]
labels[x + y * gw] = label[0]
return (gw, gh), reals, labels
#----------------------------------------------------------------------------
def save_image_grid(images, filename, drange, grid_size):
lo, hi = drange
gw, gh = grid_size
images = np.asarray(images, dtype=np.float32)
images = (images - lo) * (255 / (hi - lo))
images = np.rint(images).clip(0, 255).astype(np.uint8)
_N, C, H, W = images.shape
images = images.reshape(gh, gw, C, H, W)
images = images.transpose(0, 3, 1, 4, 2)
images = images.reshape(gh * H, gw * W, C)
PIL.Image.fromarray(images, {3: 'RGB', 1: 'L'}[C]).save(filename)
#----------------------------------------------------------------------------
# Main training script.
def training_loop(
run_dir = '.', # Output directory.
G_args = {}, # Options for generator network.
D_args = {}, # Options for discriminator network.
G_opt_args = {}, # Options for generator optimizer.
D_opt_args = {}, # Options for discriminator optimizer.
loss_args = {}, # Options for loss function.
train_dataset_args = {}, # Options for dataset to train with.
metric_dataset_args = {}, # Options for dataset to evaluate metrics against.
augment_args = {}, # Options for adaptive augmentations.
metric_arg_list = [], # Metrics to evaluate during training.
num_gpus = 1, # Number of GPUs to use.
minibatch_size = 32, # Global minibatch size.
minibatch_gpu = 4, # Number of samples processed at a time by one GPU.
G_smoothing_kimg = 10, # Half-life of the exponential moving average (EMA) of generator weights.
G_smoothing_rampup = None, # EMA ramp-up coefficient.
minibatch_repeats = 4, # Number of minibatches to run in the inner loop.
lazy_regularization = True, # Perform regularization as a separate training step?
G_reg_interval = 4, # How often the perform regularization for G? Ignored if lazy_regularization=False.
D_reg_interval = 16, # How often the perform regularization for D? Ignored if lazy_regularization=False.
total_kimg = 25000, # Total length of the training, measured in thousands of real images.
kimg_per_tick = 4, # Progress snapshot interval.
image_snapshot_ticks = 50, # How often to save image snapshots? None = only save 'reals.png' and 'fakes-init.png'.
network_snapshot_ticks = 50, # How often to save network snapshots? None = only save 'networks-final.pkl'.
resume_pkl = None, # Network pickle to resume training from.
abort_fn = None, # Callback function for determining whether to abort training.
progress_fn = None, # Callback function for updating training progress.
):
assert minibatch_size % (num_gpus * minibatch_gpu) == 0
start_time = time.time()
print('Loading training set...')
training_set = dataset.load_dataset(**train_dataset_args)
print('Image shape:', np.int32(training_set.shape).tolist())
print('Label shape:', [training_set.label_size])
print()
print('Constructing networks...')
with tf.device('/gpu:0'):
G = tflib.Network('G', num_channels=training_set.shape[0], resolution=training_set.shape[1], label_size=training_set.label_size, **G_args)
D = tflib.Network('D', num_channels=training_set.shape[0], resolution=training_set.shape[1], label_size=training_set.label_size, **D_args)
Gs = G.clone('Gs')
if resume_pkl is not None:
print(f'Resuming from "{resume_pkl}"')
with dnnlib.util.open_url(resume_pkl) as f:
rG, rD, rGs = pickle.load(f)
G.copy_vars_from(rG)
D.copy_vars_from(rD)
Gs.copy_vars_from(rGs)
G.print_layers()
D.print_layers()
print('Exporting sample images...')
grid_size, grid_reals, grid_labels = setup_snapshot_image_grid(training_set)
save_image_grid(grid_reals, os.path.join(run_dir, 'reals.png'), drange=[0,255], grid_size=grid_size)
grid_latents = np.random.randn(np.prod(grid_size), *G.input_shape[1:])
grid_fakes = Gs.run(grid_latents, grid_labels, is_validation=True, minibatch_size=minibatch_gpu)
save_image_grid(grid_fakes, os.path.join(run_dir, 'fakes_init.png'), drange=[-1,1], grid_size=grid_size)
print(f'Replicating networks across {num_gpus} GPUs...')
G_gpus = [G]
D_gpus = [D]
for gpu in range(1, num_gpus):
with tf.device(f'/gpu:{gpu}'):
G_gpus.append(G.clone(f'{G.name}_gpu{gpu}'))
D_gpus.append(D.clone(f'{D.name}_gpu{gpu}'))
print('Initializing augmentations...')
aug = None
if augment_args.get('class_name', None) is not None:
aug = dnnlib.util.construct_class_by_name(**augment_args)
aug.init_validation_set(D_gpus=D_gpus, training_set=training_set)
print('Setting up optimizers...')
G_opt_args = dict(G_opt_args)
D_opt_args = dict(D_opt_args)
for args, reg_interval in [(G_opt_args, G_reg_interval), (D_opt_args, D_reg_interval)]:
args['minibatch_multiplier'] = minibatch_size // num_gpus // minibatch_gpu
if lazy_regularization:
mb_ratio = reg_interval / (reg_interval + 1)
args['learning_rate'] *= mb_ratio
if 'beta1' in args: args['beta1'] **= mb_ratio
if 'beta2' in args: args['beta2'] **= mb_ratio
G_opt = tflib.Optimizer(name='TrainG', **G_opt_args)
D_opt = tflib.Optimizer(name='TrainD', **D_opt_args)
G_reg_opt = tflib.Optimizer(name='RegG', share=G_opt, **G_opt_args)
D_reg_opt = tflib.Optimizer(name='RegD', share=D_opt, **D_opt_args)
print('Constructing training graph...')
data_fetch_ops = []
training_set.configure(minibatch_gpu)
for gpu, (G_gpu, D_gpu) in enumerate(zip(G_gpus, D_gpus)):
with tf.name_scope(f'Train_gpu{gpu}'), tf.device(f'/gpu:{gpu}'):
# Fetch training data via temporary variables.
with tf.name_scope('DataFetch'):
real_images_var = tf.Variable(name='images', trainable=False, initial_value=tf.zeros([minibatch_gpu] + training_set.shape))
real_labels_var = tf.Variable(name='labels', trainable=False, initial_value=tf.zeros([minibatch_gpu, training_set.label_size]))
real_images_write, real_labels_write = training_set.get_minibatch_tf()
real_images_write = tflib.convert_images_from_uint8(real_images_write)
data_fetch_ops += [tf.assign(real_images_var, real_images_write)]
data_fetch_ops += [tf.assign(real_labels_var, real_labels_write)]
# Evaluate loss function and register gradients.
fake_labels = training_set.get_random_labels_tf(minibatch_gpu)
terms = dnnlib.util.call_func_by_name(G=G_gpu, D=D_gpu, aug=aug, fake_labels=fake_labels, real_images=real_images_var, real_labels=real_labels_var, **loss_args)
if lazy_regularization:
if terms.G_reg is not None: G_reg_opt.register_gradients(tf.reduce_mean(terms.G_reg * G_reg_interval), G_gpu.trainables)
if terms.D_reg is not None: D_reg_opt.register_gradients(tf.reduce_mean(terms.D_reg * D_reg_interval), D_gpu.trainables)
else:
if terms.G_reg is not None: terms.G_loss += terms.G_reg
if terms.D_reg is not None: terms.D_loss += terms.D_reg
G_opt.register_gradients(tf.reduce_mean(terms.G_loss), G_gpu.trainables)
D_opt.register_gradients(tf.reduce_mean(terms.D_loss), D_gpu.trainables)
print('Finalizing training ops...')
data_fetch_op = tf.group(*data_fetch_ops)
G_train_op = G_opt.apply_updates()
D_train_op = D_opt.apply_updates()
G_reg_op = G_reg_opt.apply_updates(allow_no_op=True)
D_reg_op = D_reg_opt.apply_updates(allow_no_op=True)
Gs_beta_in = tf.placeholder(tf.float32, name='Gs_beta_in', shape=[])
Gs_update_op = Gs.setup_as_moving_average_of(G, beta=Gs_beta_in)
tflib.init_uninitialized_vars()
with tf.device('/gpu:0'):
peak_gpu_mem_op = tf.contrib.memory_stats.MaxBytesInUse()
print('Initializing metrics...')
summary_log = tf.summary.FileWriter(run_dir)
metrics = []
for args in metric_arg_list:
metric = dnnlib.util.construct_class_by_name(**args)
metric.configure(dataset_args=metric_dataset_args, run_dir=run_dir)
metrics.append(metric)
print(f'Training for {total_kimg} kimg...')
print()
if progress_fn is not None:
progress_fn(0, total_kimg)
tick_start_time = time.time()
maintenance_time = tick_start_time - start_time
cur_nimg = 0
cur_tick = -1
tick_start_nimg = cur_nimg
running_mb_counter = 0
done = False
while not done:
# Compute EMA decay parameter.
Gs_nimg = G_smoothing_kimg * 1000.0
if G_smoothing_rampup is not None:
Gs_nimg = min(Gs_nimg, cur_nimg * G_smoothing_rampup)
Gs_beta = 0.5 ** (minibatch_size / max(Gs_nimg, 1e-8))
# Run training ops.
for _repeat_idx in range(minibatch_repeats):
rounds = range(0, minibatch_size, minibatch_gpu * num_gpus)
run_G_reg = (lazy_regularization and running_mb_counter % G_reg_interval == 0)
run_D_reg = (lazy_regularization and running_mb_counter % D_reg_interval == 0)
cur_nimg += minibatch_size
running_mb_counter += 1
# Fast path without gradient accumulation.
if len(rounds) == 1:
tflib.run([G_train_op, data_fetch_op])
if run_G_reg:
tflib.run(G_reg_op)
tflib.run([D_train_op, Gs_update_op], {Gs_beta_in: Gs_beta})
if run_D_reg:
tflib.run(D_reg_op)
# Slow path with gradient accumulation.
else:
for _round in rounds:
tflib.run(G_train_op)
if run_G_reg:
tflib.run(G_reg_op)
tflib.run(Gs_update_op, {Gs_beta_in: Gs_beta})
for _round in rounds:
tflib.run(data_fetch_op)
tflib.run(D_train_op)
if run_D_reg:
tflib.run(D_reg_op)
# Run validation.
if aug is not None:
aug.run_validation(minibatch_size=minibatch_size)
# Tune augmentation parameters.
if aug is not None:
aug.tune(minibatch_size * minibatch_repeats)
# Perform maintenance tasks once per tick.
done = (cur_nimg >= total_kimg * 1000) or (abort_fn is not None and abort_fn())
if done or cur_tick < 0 or cur_nimg >= tick_start_nimg + kimg_per_tick * 1000:
cur_tick += 1
tick_kimg = (cur_nimg - tick_start_nimg) / 1000.0
tick_start_nimg = cur_nimg
tick_end_time = time.time()
total_time = tick_end_time - start_time
tick_time = tick_end_time - tick_start_time
# Report progress.
print(' '.join([
f"tick {autosummary('Progress/tick', cur_tick):<5d}",
f"kimg {autosummary('Progress/kimg', cur_nimg / 1000.0):<8.1f}",
f"time {dnnlib.util.format_time(autosummary('Timing/total_sec', total_time)):<12s}",
f"sec/tick {autosummary('Timing/sec_per_tick', tick_time):<7.1f}",
f"sec/kimg {autosummary('Timing/sec_per_kimg', tick_time / tick_kimg):<7.2f}",
f"maintenance {autosummary('Timing/maintenance_sec', maintenance_time):<6.1f}",
f"gpumem {autosummary('Resources/peak_gpu_mem_gb', peak_gpu_mem_op.eval() / 2**30):<5.1f}",
f"augment {autosummary('Progress/augment', aug.strength if aug is not None else 0):.3f}",
]))
autosummary('Timing/total_hours', total_time / (60.0 * 60.0))
autosummary('Timing/total_days', total_time / (24.0 * 60.0 * 60.0))
if progress_fn is not None:
progress_fn(cur_nimg // 1000, total_kimg)
# Save snapshots.
if image_snapshot_ticks is not None and (done or cur_tick % image_snapshot_ticks == 0):
grid_fakes = Gs.run(grid_latents, grid_labels, is_validation=True, minibatch_size=minibatch_gpu)
save_image_grid(grid_fakes, os.path.join(run_dir, f'fakes{cur_nimg // 1000:06d}.png'), drange=[-1,1], grid_size=grid_size)
if network_snapshot_ticks is not None and (done or cur_tick % network_snapshot_ticks == 0):
pkl = os.path.join(run_dir, f'network-snapshot-{cur_nimg // 1000:06d}.pkl')
with open(pkl, 'wb') as f:
pickle.dump((G, D, Gs), f)
if len(metrics):
print('Evaluating metrics...')
for metric in metrics:
metric.run(pkl, num_gpus=num_gpus)
# Update summaries.
for metric in metrics:
metric.update_autosummaries()
tflib.autosummary.save_summaries(summary_log, cur_nimg)
tick_start_time = time.time()
maintenance_time = tick_start_time - tick_end_time
print()
print('Exiting...')
summary_log.close()
training_set.close()
#----------------------------------------------------------------------------
|
from __future__ import print_function
import argparse
import json
import datetime
import pyjq
import yaml
import sys
from netaddr import IPNetwork
from shared.nodes import Account, Region
from shared.query import query_aws, get_parameter_file
class Severity:
# For logging
DEBUG = 0
INFO = 1
WARN = 2
ERROR = 3
@classmethod
def str_to_int(cls, level):
if level == "DEBUG":
return cls.DEBUG
elif level == "INFO":
return cls.INFO
elif level == "WARN":
return cls.WARN
elif level == "ERROR":
return cls.ERROR
else:
raise Exception("Unknown log level {}".format(level))
@staticmethod
def string(severity_level):
if severity_level == Severity.DEBUG:
return "DEBUG"
elif severity_level == Severity.INFO:
return "INFO"
elif severity_level == Severity.WARN:
return "WARN"
elif severity_level == Severity.ERROR:
return "ERROR"
else:
raise Exception("Unknown severity level")
LOG_LEVEL = Severity.INFO
def log_debug(msg, location=None, reasons=[]):
log_issue(Severity.DEBUG, msg, location, reasons)
def log_info(msg, location=None, reasons=[]):
log_issue(Severity.INFO, msg, location, reasons)
def log_warning(msg, location=None, reasons=[]):
log_issue(Severity.WARN, msg, location, reasons)
def log_error(msg, location=None, reasons=[]):
log_issue(Severity.ERROR, msg, location, reasons)
def log_issue(severity, msg, location=None, reasons=[]):
if severity >= LOG_LEVEL:
json_issue = {
"Severity": Severity.string(severity),
"Issue": msg,
"Location": location,
"Reasons": reasons,
}
print(json.dumps(json_issue, sort_keys=True), file=sys.stderr)
class Finding(object):
"""Used for auditing"""
region = None
issue_id = None
resource_id = None
resource_details = None
def __init__(self, region, issue_id, resource_id, resource_details=None):
self.region = region
self.issue_id = issue_id
self.resource_id = resource_id
self.resource_details = resource_details
def __str__(self):
return json.dumps(
{
"account_id": self.region.account.local_id,
"account_name": self.region.account.name,
"region": self.region.name,
"issue": self.issue_id,
"resource": self.resource_id,
"details": self.resource_details,
}
)
@property
def account_name(self):
return self.region.account.name
def custom_serializer(x):
if isinstance(x, datetime.datetime):
return x.isoformat()
elif isinstance(x, bytes):
return x.decode()
raise TypeError("Unknown type")
def make_list(v):
if not isinstance(v, list):
return [v]
return v
def is_external_cidr(cidr):
ipnetwork = IPNetwork(cidr)
if (
ipnetwork in IPNetwork("10.0.0.0/8")
or ipnetwork in IPNetwork("172.16.0.0/12")
or ipnetwork in IPNetwork("192.168.0.0/16")
):
return False
return True
def is_unblockable_cidr(cidr):
ipnetwork = IPNetwork(cidr)
if (
ipnetwork in IPNetwork("169.254.0.0/16")
or ipnetwork in IPNetwork("127.0.0.0/8") # link local
or ipnetwork in IPNetwork("192.0.2.0/24") # loopback
or ipnetwork in IPNetwork("198.51.100.0/24") # Test network from RFC 5737
or ipnetwork in IPNetwork("203.0.113.0/24") # Test network
or ipnetwork in IPNetwork("224.0.0.0/4") # Test network
or ipnetwork in IPNetwork("240.0.0.0/5") # class D multicast
or ipnetwork in IPNetwork("248.0.0.0/5") # class E reserved
or ipnetwork in IPNetwork("255.255.255.255/32") # reserved # broadcast
):
return True
return False
def get_regions(account, outputfilter={}):
# aws ec2 describe-regions
region_data = query_aws(account, "describe-regions")
region_filter = ""
if "regions" in outputfilter:
region_filter = "| select(.RegionName | contains({}))".format(
outputfilter["regions"]
)
regions = pyjq.all(".Regions[]{}".format(region_filter), region_data)
return regions
def get_account(account_name, config=None, config_filename="config.json.demo"):
if config is None:
config = json.load(open(config_filename))
for account in config["accounts"]:
if account["name"] == account_name:
return account
if account_name is None and account.get("default", False):
return account
# Else could not find account
if account_name is None:
exit(
"ERROR: Must specify an account, or set one in {} as a default".format(
config_filename
)
)
exit(
'ERROR: Account named "{}" not found in {}'.format(
account_name, config_filename
)
)
def parse_arguments(arguments, parser=None):
"""Returns (args, accounts, config)"""
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--config", help="Config file name", default="config.json", type=str
)
parser.add_argument(
"--accounts", help="Accounts to collect from", required=True, type=str
)
parser.add_argument(
"--log_level",
help="Log level to record (DEBUG, INFO, WARN, ERROR)",
default="INFO",
required=False,
type=str,
)
args = parser.parse_args(arguments)
global LOG_LEVEL
LOG_LEVEL = Severity.str_to_int(args.log_level)
# Read accounts file
try:
config = json.load(open(args.config))
except IOError:
exit('ERROR: Unable to load config file "{}"'.format(args.config))
except ValueError as e:
exit(
'ERROR: Config file "{}" could not be loaded ({}), see config.json.demo for an example'.format(
args.config, e
)
)
# Get accounts
account_names = args.accounts.split(",")
accounts = []
# TODO Need to be able to tag accounts into sets (ex. Prod, or by business unit) so the tag can be referenced
# as opposed to the individual account names.
for account_name in account_names:
if account_name == "all":
for account in config["accounts"]:
accounts.append(account)
break
accounts.append(get_account(account_name, config, args.config))
return (args, accounts, config)
def get_account_stats(account, all_resources=False):
"""Returns stats for an account"""
with open("stats_config.yaml", "r") as f:
resources = yaml.safe_load(f)
account = Account(None, account)
log_debug(
"Collecting stats in account {} ({})".format(account.name, account.local_id)
)
stats = {}
stats["keys"] = []
for resource in resources:
# If the resource is marked as verbose, and we're not showing all resources, skip it.
if resource.get("verbose", False) and not all_resources:
continue
stats["keys"].append(resource["name"])
stats[resource["name"]] = {}
for region_json in get_regions(account):
region = Region(account, region_json)
for resource in resources:
if resource.get("verbose", False) and not all_resources:
continue
# Skip global services (just CloudFront)
if ("region" in resource) and (resource["region"] != region.name):
continue
# S3 buckets require special code to identify their location
if resource["name"] == "S3 buckets":
if region.name == "us-east-1":
buckets = pyjq.all(
".Buckets[].Name",
query_aws(region.account, "s3-list-buckets", region),
)
for bucket in buckets:
# Get the bucket's location
bucket_region = get_parameter_file(
region, "s3", "get-bucket-location", bucket
)["LocationConstraint"]
# Convert the value to a name.
# See https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
if bucket_region is None:
bucket_region = "us-east-1"
elif bucket_region == "EU":
bucket_region = "eu-west-1"
# Increment the count
tmp = stats[resource["name"]].get(bucket_region, 0)
stats[resource["name"]][bucket_region] = tmp + 1
else:
if region.name != 'ap-east-1':
# Normal path
stats[resource["name"]][region.name] = sum(
pyjq.all(
resource["query"],
query_aws(region.account, resource["source"], region),
)
)
return stats
def get_us_east_1(account):
for region_json in get_regions(account):
region = Region(account, region_json)
if region.name == "us-east-1":
return region
raise Exception("us-east-1 not found")
def iso_date(d):
""" Convert ISO format date string such as 2018-04-08T23:33:20+00:00"""
time_format = "%Y-%m-%dT%H:%M:%S"
return datetime.datetime.strptime(d.split("+")[0], time_format)
def days_between(s1, s2):
"""s1 and s2 are date strings"""
d1 = iso_date(s1)
d2 = iso_date(s2)
return abs((d1 - d2).days)
def get_collection_date(account):
if type(account) is not Account:
account = Account(None, account)
account_struct = account
json_blob = query_aws(
account_struct, "iam-get-credential-report", get_us_east_1(account_struct)
)
if not json_blob:
raise Exception(
"File iam-get-credential-report.json does not exist or is not well-formed. Likely cause is you did not run the collect command for this account."
)
# GeneratedTime looks like "2019-01-30T15:43:24+00:00"
return json_blob["GeneratedTime"]
def get_access_advisor_active_counts(account, max_age=90):
region = get_us_east_1(account)
json_account_auth_details = query_aws(
region.account, "iam-get-account-authorization-details", region
)
account_stats = {
"users": {"active": 0, "inactive": 0},
"roles": {"active": 0, "inactive": 0},
}
for principal_auth in [
*json_account_auth_details["UserDetailList"],
*json_account_auth_details["RoleDetailList"],
]:
stats = {}
stats["auth"] = principal_auth
principal_type = "roles"
if "UserName" in principal_auth:
principal_type = "users"
job_id = get_parameter_file(
region,
"iam",
"generate-service-last-accessed-details",
principal_auth["Arn"],
)["JobId"]
json_last_access_details = get_parameter_file(
region, "iam", "get-service-last-accessed-details", job_id
)
stats["last_access"] = json_last_access_details
stats["is_inactive"] = True
job_completion_date = datetime.datetime.strptime(
json_last_access_details["JobCompletionDate"][0:10], "%Y-%m-%d"
)
for service in json_last_access_details["ServicesLastAccessed"]:
if "LastAuthenticated" in service:
last_access_date = datetime.datetime.strptime(
service["LastAuthenticated"][0:10], "%Y-%m-%d"
)
if (job_completion_date - last_access_date).days < max_age:
stats["is_inactive"] = False
break
if stats["is_inactive"]:
account_stats[principal_type]["inactive"] += 1
else:
account_stats[principal_type]["active"] += 1
return account_stats
def get_current_policy_doc(policy):
for doc in policy["PolicyVersionList"]:
if doc["IsDefaultVersion"]:
return doc["Document"]
raise Exception("No default document version in policy {}".format(policy["Arn"]))
|
# searchAgents.py
# ---------------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
This file contains all of the agents that can be selected to
control Pacman. To select an agent, use the '-p' option
when running pacman.py. Arguments can be passed to your agent
using '-a'. For example, to load a SearchAgent that uses
depth first search (dfs), run the following command:
> python pacman.py -p SearchAgent -a searchFunction=depthFirstSearch
Commands to invoke other search strategies can be found in the
project description.
Please only change the parts of the file you are asked to.
Look for the lines that say
"*** YOUR CODE HERE ***"
The parts you fill in start about 3/4 of the way down. Follow the
project description for details.
Good luck and happy searching!
"""
from game import Directions
from game import Agent
from game import Actions
import util
import time
import search
import searchAgents
class GoWestAgent(Agent):
"An agent that goes West until it can't."
def getAction(self, state):
"The agent receives a GameState (defined in pacman.py)."
if Directions.WEST in state.getLegalPacmanActions():
return Directions.WEST
else:
return Directions.STOP
#######################################################
# This portion is written for you, but will only work #
# after you fill in parts of search.py #
#######################################################
class SearchAgent(Agent):
"""
This very general search agent finds a path using a supplied search algorithm for a
supplied search problem, then returns actions to follow that path.
As a default, this agent runs DFS on a PositionSearchProblem to find location (1,1)
Options for fn include:
depthFirstSearch or dfs
breadthFirstSearch or bfs
Note: You should NOT change any code in SearchAgent
"""
def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'):
# Warning: some advanced Python magic is employed below to find the right functions and problems
# Get the search function from the name and heuristic
if fn not in dir(search):
raise AttributeError, fn + ' is not a search function in search.py.'
func = getattr(search, fn)
if 'heuristic' not in func.func_code.co_varnames:
print('[SearchAgent] using function ' + fn)
self.searchFunction = func
else:
if heuristic in dir(searchAgents):
heur = getattr(searchAgents, heuristic)
elif heuristic in dir(search):
heur = getattr(search, heuristic)
else:
raise AttributeError, heuristic + ' is not a function in searchAgents.py or search.py.'
print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic))
# Note: this bit of Python trickery combines the search algorithm and the heuristic
self.searchFunction = lambda x: func(x, heuristic=heur)
# Get the search problem type from the name
if prob not in dir(searchAgents) or not prob.endswith('Problem'):
raise AttributeError, prob + ' is not a search problem type in SearchAgents.py.'
self.searchType = getattr(searchAgents, prob)
print('[SearchAgent] using problem type ' + prob)
def registerInitialState(self, state):
"""
This is the first time that the agent sees the layout of the game board. Here, we
choose a path to the goal. In this phase, the agent should compute the path to the
goal and store it in a local variable. All of the work is done in this method!
state: a GameState object (pacman.py)
"""
if self.searchFunction == None: raise Exception, "No search function provided for SearchAgent"
starttime = time.time()
problem = self.searchType(state) # Makes a new search problem
self.actions = self.searchFunction(problem) # Find a path
totalCost = problem.getCostOfActions(self.actions)
print('Path found with total cost of %d in %.1f seconds' % (totalCost, time.time() - starttime))
if '_expanded' in dir(problem): print('Search nodes expanded: %d' % problem._expanded)
def getAction(self, state):
"""
Returns the next action in the path chosen earlier (in registerInitialState). Return
Directions.STOP if there is no further action to take.
state: a GameState object (pacman.py)
"""
if 'actionIndex' not in dir(self): self.actionIndex = 0
i = self.actionIndex
self.actionIndex += 1
if i < len(self.actions):
return self.actions[i]
else:
return Directions.STOP
class PositionSearchProblem(search.SearchProblem):
"""
A search problem defines the state space, start state, goal test,
successor function and cost function. This search problem can be
used to find paths to a particular point on the pacman board.
The state space consists of (x,y) positions in a pacman game.
Note: this search problem is fully specified; you should NOT change it.
"""
def __init__(self, gameState, costFn = lambda x: 1, goal=(1,1), start=None, warn=True):
"""
Stores the start and goal.
gameState: A GameState object (pacman.py)
costFn: A function from a search state (tuple) to a non-negative number
goal: A position in the gameState
"""
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
if start != None: self.startState = start
self.goal = goal
self.costFn = costFn
if warn and (gameState.getNumFood() != 1 or not gameState.hasFood(*goal)):
print 'Warning: this does not look like a regular search maze'
# For display purposes
self._visited, self._visitedlist, self._expanded = {}, [], 0
def getStartState(self):
return self.startState
def isGoalState(self, state):
isGoal = state == self.goal
# For display purposes only
if isGoal:
self._visitedlist.append(state)
import __main__
if '_display' in dir(__main__):
if 'drawExpandedCells' in dir(__main__._display): #@UndefinedVariable
__main__._display.drawExpandedCells(self._visitedlist) #@UndefinedVariable
return isGoal
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
x,y = state
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
nextState = (nextx, nexty)
cost = self.costFn(nextState)
successors.append( ( nextState, action, cost) )
# Bookkeeping for display purposes
self._expanded += 1
if state not in self._visited:
self._visited[state] = True
self._visitedlist.append(state)
return successors
def getCostOfActions(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999
"""
if actions == None: return 999999
x,y= self.getStartState()
cost = 0
for action in actions:
# Check figure out the next state and see whether its' legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
cost += self.costFn((x,y))
return cost
class StayEastSearchAgent(SearchAgent):
"""
An agent for position search with a cost function that penalizes being in
positions on the West side of the board.
The cost function for stepping into a position (x,y) is 1/2^x.
"""
def __init__(self):
self.searchFunction = search.uniformCostSearch
costFn = lambda pos: .5 ** pos[0]
self.searchType = lambda state: PositionSearchProblem(state, costFn)
class StayWestSearchAgent(SearchAgent):
"""
An agent for position search with a cost function that penalizes being in
positions on the East side of the board.
The cost function for stepping into a position (x,y) is 2^x.
"""
def __init__(self):
self.searchFunction = search.uniformCostSearch
costFn = lambda pos: 2 ** pos[0]
self.searchType = lambda state: PositionSearchProblem(state, costFn)
def manhattanHeuristic(position, problem, info={}):
"The Manhattan distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])
def euclideanHeuristic(position, problem, info={}):
"The Euclidean distance heuristic for a PositionSearchProblem"
xy1 = position
xy2 = problem.goal
return ( (xy1[0] - xy2[0]) ** 2 + (xy1[1] - xy2[1]) ** 2 ) ** 0.5
#####################################################
# This portion is incomplete. Time to write code! #
#####################################################
class CornersProblem(search.SearchProblem):
"""
This search problem finds paths through all four corners of a layout.
You must select a suitable state space and successor function
"""
def __init__(self, startingGameState):
"""
Stores the walls, pacman's starting position and corners.
"""
self.walls = startingGameState.getWalls()
self.startingPosition = startingGameState.getPacmanPosition()
top, right = self.walls.height-2, self.walls.width-2
self.corners = ((1,1), (1,top), (right, 1), (right, top))
for corner in self.corners:
if not startingGameState.hasFood(*corner):
print 'Warning: no food in corner ' + str(corner)
self._expanded = 0 # Number of search nodes expanded
"*** YOUR CODE HERE ***"
self.start = (startingGameState.getPacmanPosition(), self.corners)
def getStartState(self):
"Returns the start state (in your state space, not the full Pacman state space)"
"*** YOUR CODE HERE ***"
return self.start
def isGoalState(self, state):
"Returns whether this search state is a goal state of the problem"
"*** YOUR CODE HERE ***"
return len(state[1]) == 0
def getSuccessors(self, state):
"""
Returns successor states, the actions they require, and a cost of 1.
As noted in search.py:
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
successors = []
for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
# Add a successor state to the successor list if the action is legal
# Here's a code snippet for figuring out whether a new position hits a wall:
"*** YOUR CODE HERE ***"
x, y = state[0]
dx, dy = Actions.directionToVector(action)
nextx, nexty = int(x + dx), int(y + dy)
hitsWall = self.walls[nextx][nexty]
if not hitsWall:
nextState = (nextx, nexty)
if nextState in state[1]:
temp = list(state[1])
temp.remove(nextState)
successors.append(((nextState,tuple(temp)), action, 1))
else:
successors.append(((nextState,state[1]), action, 1))
self._expanded += 1
return successors
def getCostOfActions(self, actions):
"""
Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999. This is implemented for you.
"""
if actions == None: return 999999
x,y= self.startingPosition
for action in actions:
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]: return 999999
return len(actions)
def cornersHeuristic(state, problem):
"""
A heuristic for the CornersProblem that you defined.
state: The current search state
(a data structure you chose in your search problem)
problem: The CornersProblem instance for this layout.
This function should always return a number that is a lower bound
on the shortest path from the state to a goal of the problem; i.e.
it should be admissible (as well as consistent).
"""
corners = problem.corners # These are the corner coordinates
walls = problem.walls # These are the walls of the maze, as a Grid (game.py)
"*** YOUR CODE HERE ***"
corners = list(state[1])
distances = []
if len(corners) <= 0:
return 0
else:
xy1 = state[0]
for i in range(len(corners)):
xy2 = corners[i]
distances.append(abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1]))
initialDistance = min(distances)
closestCorner = corners[distances.index(initialDistance)]
corners.remove(closestCorner)
cornersDistance = 0
while len(corners) > 0:
distances = []
xy1 = closestCorner
for i in range(len(corners)):
xy2 = corners[i]
distances.append(abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1]))
additionalDistance = min(distances)
closestCorner = corners[distances.index(additionalDistance)]
corners.remove(closestCorner)
cornersDistance = cornersDistance + additionalDistance
return initialDistance + cornersDistance
class AStarCornersAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, cornersHeuristic)
self.searchType = CornersProblem
class FoodSearchProblem:
"""
A search problem associated with finding the a path that collects all of the
food (dots) in a Pacman game.
A search state in this problem is a tuple ( pacmanPosition, foodGrid ) where
pacmanPosition: a tuple (x,y) of integers specifying Pacman's position
foodGrid: a Grid (see game.py) of either True or False, specifying remaining food
"""
def __init__(self, startingGameState):
self.start = (startingGameState.getPacmanPosition(), startingGameState.getFood())
self.walls = startingGameState.getWalls()
self.startingGameState = startingGameState
self._expanded = 0
self.heuristicInfo = {} # A dictionary for the heuristic to store information
def getStartState(self):
return self.start
def isGoalState(self, state):
return state[1].count() == 0
def getSuccessors(self, state):
"Returns successor states, the actions they require, and a cost of 1."
successors = []
self._expanded += 1
for direction in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]:
x,y = state[0]
dx, dy = Actions.directionToVector(direction)
nextx, nexty = int(x + dx), int(y + dy)
if not self.walls[nextx][nexty]:
nextFood = state[1].copy()
nextFood[nextx][nexty] = False
successors.append( ( ((nextx, nexty), nextFood), direction, 1) )
return successors
def getCostOfActions(self, actions):
"""Returns the cost of a particular sequence of actions. If those actions
include an illegal move, return 999999"""
x,y= self.getStartState()[0]
cost = 0
for action in actions:
# figure out the next state and see whether it's legal
dx, dy = Actions.directionToVector(action)
x, y = int(x + dx), int(y + dy)
if self.walls[x][y]:
return 999999
cost += 1
return cost
class AStarFoodSearchAgent(SearchAgent):
"A SearchAgent for FoodSearchProblem using A* and your foodHeuristic"
def __init__(self):
self.searchFunction = lambda prob: search.aStarSearch(prob, foodHeuristic)
self.searchType = FoodSearchProblem
def foodHeuristic(state, problem):
"""
Your heuristic for the FoodSearchProblem goes here.
This heuristic must be consistent to ensure correctness. First, try to come up
with an admissible heuristic; almost all admissible heuristics will be consistent
as well.
If using A* ever finds a solution that is worse uniform cost search finds,
your heuristic is *not* consistent, and probably not admissible! On the other hand,
inadmissible or inconsistent heuristics may find optimal solutions, so be careful.
The state is a tuple ( pacmanPosition, foodGrid ) where foodGrid is a
Grid (see game.py) of either True or False. You can call foodGrid.asList()
to get a list of food coordinates instead.
If you want access to info like walls, capsules, etc., you can query the problem.
For example, problem.walls gives you a Grid of where the walls are.
If you want to *store* information to be reused in other calls to the heuristic,
there is a dictionary called problem.heuristicInfo that you can use. For example,
if you only want to count the walls once and store that value, try:
problem.heuristicInfo['wallCount'] = problem.walls.count()
Subsequent calls to this heuristic can access problem.heuristicInfo['wallCount']
"""
position, foodGrid = state
"*** YOUR CODE HERE ***"
#use the all the food and pacman to form a MST
foodList = foodGrid.asList()
if(len(foodList) == 0):
return 0
#manhattan distance between each node
distances = []
nodes = foodList[:]
nodes.insert(0,position)
for x in nodes:
distanceDict = {}
for y in nodes:
distanceDict[y] = abs(x[0]-y[0]) + abs(x[1]-y[1])
distances.append(distanceDict)
#shortest distance from the node to the MST
leastCost = []
#which node is the nearest to that node
closest = []
for node in nodes:
leastCost.append(distances[0][node])
closest.append(position)
for i in range (0,len(nodes)):
tempLeastCost = leastCost[:]
tempLeastCost.sort(cmp=None, key=None, reverse=False)
indexLeastCostNode = leastCost.index(tempLeastCost[i:][0])
leastCost[indexLeastCostNode] = 0
for node in nodes:
if distances[indexLeastCostNode][node] < leastCost[nodes.index(node)]:
leastCost[nodes.index(node)] = distances[indexLeastCostNode][node]
closest[nodes.index(node)] = nodes[indexLeastCostNode]
distance = 0
for i in range (0,len(nodes)):
distance += distances[i][closest[i]]
return distance
class ClosestDotSearchAgent(SearchAgent):
"Search for all food using a sequence of searches"
def registerInitialState(self, state):
self.actions = []
currentState = state
while(currentState.getFood().count() > 0):
nextPathSegment = self.findPathToClosestDot(currentState) # The missing piece
self.actions += nextPathSegment
for action in nextPathSegment:
legal = currentState.getLegalActions()
if action not in legal:
t = (str(action), str(currentState))
raise Exception, 'findPathToClosestDot returned an illegal move: %s!\n%s' % t
currentState = currentState.generateSuccessor(0, action)
self.actionIndex = 0
print 'Path found with cost %d.' % len(self.actions)
def findPathToClosestDot(self, gameState):
"Returns a path (a list of actions) to the closest dot, starting from gameState"
# Here are some useful elements of the startState
startPosition = gameState.getPacmanPosition()
food = gameState.getFood()
walls = gameState.getWalls()
problem = AnyFoodSearchProblem(gameState)
"*** YOUR CODE HERE ***"
#BFS from search.py
from game import Directions
directions = {'South': Directions.SOUTH, 'North': Directions.NORTH,
'West': Directions.WEST, 'East': Directions.EAST}
fringe = util.Queue()
fringe.push((startPosition, [], 0))
visited = []
while not fringe.isEmpty():
current = fringe.pop()
if(problem.isGoalState(current[0])):
return current[1]
if visited.count(current[0])<1:
visited.append(current[0])
for s in problem.getSuccessors(current[0]):
if visited.count(s[0])<1:
fringe.push((s[0], current[1]+[directions[s[1]]], s[2]))
return []
class AnyFoodSearchProblem(PositionSearchProblem):
"""
A search problem for finding a path to any food.
This search problem is just like the PositionSearchProblem, but
has a different goal test, which you need to fill in below. The
state space and successor function do not need to be changed.
The class definition above, AnyFoodSearchProblem(PositionSearchProblem),
inherits the methods of the PositionSearchProblem.
You can use this search problem to help you fill in
the findPathToClosestDot method.
"""
def __init__(self, gameState):
"Stores information from the gameState. You don't need to change this."
# Store the food for later reference
self.food = gameState.getFood()
# Store info for the PositionSearchProblem (no need to change this)
self.walls = gameState.getWalls()
self.startState = gameState.getPacmanPosition()
self.costFn = lambda x: 1
self._visited, self._visitedlist, self._expanded = {}, [], 0
def isGoalState(self, state):
"""
The state is Pacman's position. Fill this in with a goal test
that will complete the problem definition.
"""
x,y = state
"*** YOUR CODE HERE ***"
return (x,y) in self.food.asList()
##################
# Mini-contest 1 #
##################
class ApproximateSearchAgent(Agent):
"Implement your contest entry here. Change anything but the class name."
def registerInitialState(self, state):
"This method is called before any moves are made."
"*** YOUR CODE HERE ***"
def getAction(self, state):
"""
From game.py:
The Agent will receive a GameState and must return an action from
Directions.{North, South, East, West, Stop}
"""
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
def mazeDistance(point1, point2, gameState):
"""
Returns the maze distance between any two points, using the search functions
you have already built. The gameState can be any game state -- Pacman's position
in that state is ignored.
Example usage: mazeDistance( (2,4), (5,6), gameState)
This might be a useful helper function for your ApproximateSearchAgent.
"""
x1, y1 = point1
x2, y2 = point2
walls = gameState.getWalls()
assert not walls[x1][y1], 'point1 is a wall: ' + point1
assert not walls[x2][y2], 'point2 is a wall: ' + str(point2)
prob = PositionSearchProblem(gameState, start=point1, goal=point2, warn=False)
return len(search.bfs(prob))
|
import six
import transmissionrpc
from pytz import reference, utc
from sqlalchemy import Column, Integer, String
from monitorrent.db import Base, DBSession
from monitorrent.plugin_managers import register_plugin
import base64
class TransmissionCredentials(Base):
__tablename__ = "transmission_credentials"
id = Column(Integer, primary_key=True)
host = Column(String, nullable=False)
port = Column(Integer, nullable=False)
username = Column(String, nullable=True)
password = Column(String, nullable=True)
class TransmissionClientPlugin(object):
name = "transmission"
form = [{
'type': 'row',
'content': [{
'type': 'text',
'label': 'Host',
'model': 'host',
'flex': 80
}, {
'type': 'text',
'label': 'Port',
'model': 'port',
'flex': 20
}]
}, {
'type': 'row',
'content': [{
'type': 'text',
'label': 'Username',
'model': 'username',
'flex': 50
}, {
'type': 'password',
'label': 'Password',
'model': 'password',
'flex': 50
}]
}]
DEFAULT_PORT = 9091
SUPPORTED_FIELDS = ['download_dir']
def get_settings(self):
with DBSession() as db:
cred = db.query(TransmissionCredentials).first()
if not cred:
return None
return {'host': cred.host, 'port': cred.port, 'username': cred.username}
def set_settings(self, settings):
with DBSession() as db:
cred = db.query(TransmissionCredentials).first()
if not cred:
cred = TransmissionCredentials()
db.add(cred)
cred.host = settings['host']
cred.port = settings.get('port', self.DEFAULT_PORT)
cred.username = settings.get('username', None)
cred.password = settings.get('password', None)
def check_connection(self):
with DBSession() as db:
cred = db.query(TransmissionCredentials).first()
if not cred:
return False
client = transmissionrpc.Client(address=cred.host, port=cred.port,
user=cred.username, password=cred.password)
return client
def find_torrent(self, torrent_hash):
client = self.check_connection()
if not client:
return False
try:
torrent = client.get_torrent(torrent_hash.lower(), ['id', 'hashString', 'addedDate', 'name'])
return {
"name": torrent.name,
"date_added": torrent.date_added.replace(tzinfo=reference.LocalTimezone()).astimezone(utc)
}
except KeyError:
return False
def get_download_dir(self):
client = self.check_connection()
if not client:
return None
session = client.get_session()
return six.text_type(session.download_dir)
def add_torrent(self, torrent, torrent_settings):
"""
:type torrent: str
:type torrent_settings: clients.TopicSettings | None
"""
client = self.check_connection()
if not client:
return False
torrent_settings_dict = {}
if torrent_settings is not None:
if torrent_settings.download_dir is not None:
torrent_settings_dict['download_dir'] = torrent_settings.download_dir
client.add_torrent(base64.b64encode(torrent).decode('utf-8'), **torrent_settings_dict)
return True
def remove_torrent(self, torrent_hash):
client = self.check_connection()
if not client:
return False
client.remove_torrent(torrent_hash.lower(), delete_data=False)
return True
register_plugin('client', 'transmission', TransmissionClientPlugin())
|
'''
Created on Dec 19, 2018
@author: gsnyder
Generate notices report for a given project-version
'''
from blackduck.HubRestApi import HubInstance
import argparse
import json
import logging
import sys
import time
import zipfile
parser = argparse.ArgumentParser("A program to generate the notices file for a given project-version")
parser.add_argument("project_name")
parser.add_argument("version_name")
# TODO: Add the copyright checkbox option
parser.add_argument('-f', "--file_name_base", default="notices_report", help="Base file name to write the report data into. If the report format is TEXT a .zip file will be created, otherwise a .json file")
parser.add_argument('-r', '--report_format', default='TEXT', choices=["JSON", "TEXT"], help="Report format - choices are TEXT or HTML")
parser.add_argument('-c', '--include_copyright_info', action='store_true', help="Set this option to have additional copyright information from the Black Duck KB included in the notices file report.")
args = parser.parse_args()
hub = HubInstance()
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', stream=sys.stderr, level=logging.DEBUG)
class FailedReportDownload(Exception):
pass
def download_report(location, file_name_base, retries=10):
report_id = location.split("/")[-1]
if retries:
logging.debug("Retrieving generated report from {}".format(location))
# response = hub.download_report(report_id)
response, report_format = hub.download_notification_report(location)
if response.status_code == 200:
if report_format == "TEXT":
filename = file_name_base + ".zip"
with open(filename, "wb") as f:
f.write(response.content)
else:
# JSON format
filename = file_name_base + ".json"
with open(filename, "w") as f:
json.dump(response.json(), f, indent=3)
logging.info("Successfully downloaded json file to {} for report {}".format(
filename, report_id))
else:
logging.warning("Failed to retrieve report {}".format(report_id))
logging.warning("Probably not ready yet, waiting 5 seconds then retrying (remaining retries={}".format(retries))
time.sleep(5)
retries -= 1
download_report(location, file_name_base, retries)
else:
raise FailedReportDownload("Failed to retrieve report {} after multiple retries".format(report_id))
project = hub.get_project_by_name(args.project_name)
if project:
version = hub.get_version_by_name(project, args.version_name)
response = hub.create_version_notices_report(version, args.report_format, include_copyright_info=args.include_copyright_info)
if response.status_code == 201:
logging.info("Successfully created notices report in {} format for project {} and version {}".format(
args.report_format, args.project_name, args.version_name))
location = response.headers['Location']
download_report(location, args.file_name_base)
# Showing how you can interact with the downloaded zip and where to find the
# output content. Uncomment the lines below to see how it works.
# with zipfile.ZipFile(zip_file_name_base, 'r') as zipf:
# with zipf.open("{}/{}/version-license.txt".format(args.project_name, args.version_name), "r") as license_file:
# print(license_file.read())
else:
logging.error("Failed to create reports for project {} version {}, status code returned {}".format(
args.project_name, args.version_name, response.status_code))
else:
logging.warning("Did not find project with name {}".format(args.project_name))
|
from qtpy.QtCore import QSize
from qtpy.QtGui import QIcon
from qtpy.QtWidgets import QListWidget, QListWidgetItem
from pathlib import Path
ICON_ROOT = Path(__file__).parent / "icons"
STYLES = r"""
QListWidget{
min-width: 294;
background: none;
font-size: 8pt;
color: #eee;
}
QListWidget::item {
width: 68;
height: 85;
border-radius: 0;
margin: 1;
padding: 4;
background: #414851;
}
QListWidget::item::hover {
background: #5A626C;
}
"""
def _get_icon(name):
path = ICON_ROOT / f'{name.lower().replace(" ", "_")}.png'
if not path.exists():
return ""
return str(path)
class ButtonGrid(QListWidget):
def __init__(self, parent=None):
super().__init__(parent=parent)
self.setMovement(self.Static) # The items cannot be moved by the user.
self.setViewMode(self.IconMode) # make items icons
self.setResizeMode(self.Adjust) # relayout when view is resized.
self.setUniformItemSizes(True) # better performance
self.setIconSize(QSize(64, 44))
self.setWordWrap(True)
self.setStyleSheet(STYLES)
def addItem(self, label : str, tool_tip : str = None):
if isinstance(label, QListWidgetItem):
super().addItem(label)
item = QListWidgetItem(QIcon(_get_icon(label)), label)
if tool_tip is not None:
item.setToolTip(tool_tip)
super().addItem(item)
def addItems(self, labels) -> None:
for label in labels:
if hasattr(labels[label], "tool_tip"):
self.addItem(label, labels[label].tool_tip)
else:
self.addItem(label)
|
from collections import defaultdict
from copy import copy
from math import ceil, floor
def parse_item(item):
[num, name] = item.strip().split(' ')
return {}
def filter_zeroes(d):
ret = defaultdict(lambda: 0)
for k, v in d.items():
if v != 0:
ret[k] = v
return ret
output_to_formula = {}
def parse_input():
lines = open('input').readlines()
for line in lines:
[input_string, output_string] = line.split('=>')
[output_number, output_chemical] = output_string.strip().split(' ')
formula = {'num': int(output_number)}
input_formula = defaultdict(lambda: 0)
for inp in input_string.strip().split(','):
[num, name] = inp.strip().split(' ')
input_formula[name] = int(num)
formula['inputs'] = input_formula
output_to_formula[output_chemical] = formula
def subtract_from_extras(extras, chem, num):
ret = num
if chem in extras:
from_extras = min(num, extras[chem])
ret -= from_extras
extras[chem] -= from_extras
return ret
def expand_one(chem, needed, extras):
if chem == 'ORE':
return {chem: needed}
formula = output_to_formula[chem]
fnum = formula['num']
scaling = ceil(needed / fnum)
extra = fnum * scaling - needed
if extra != 0:
extras[chem] += extra
ins = copy(formula['inputs'])
for key in ins.keys():
ins[key] *= scaling
ins[key] = subtract_from_extras(extras, key, ins[key])
return ins
def expand(chemicals):
extras = defaultdict(lambda: 0)
while list(chemicals.keys()) != ['ORE']:
new = defaultdict(lambda: 0)
for chem, num in chemicals.items():
num = subtract_from_extras(extras, chem, num)
expanded = expand_one(chem, num, extras)
for key in expanded.keys():
new[key] += expanded[key]
print('Round! ', chemicals, '->', new)
chemicals = new
ret = defaultdict(lambda: 0)
for key, value in extras.items():
if value != 0:
ret[key] = value
for key, value in chemicals.items():
if value != 0:
ret[key] = value
return chemicals
def part1():
parse_input()
chemicals = defaultdict(lambda: 0)
chemicals['FUEL'] = 1
while list(chemicals.keys()) != ['ORE']:
chemicals = expand(chemicals)
print('Expanded: ', chemicals)
# part1() # 892207
ONE_TRILLION = 1_000_000_000_000
START_FUELS = floor(ONE_TRILLION / 892207)
START_STEP = floor(START_FUELS / 2)
def part2():
parse_input()
fuels = START_FUELS
step = START_STEP
while True:
chemicals = defaultdict(lambda: 0)
chemicals['FUEL'] = fuels + step
while list(chemicals.keys()) != ['ORE']:
chemicals = expand(chemicals)
ores = chemicals['ORE']
if ores == ONE_TRILLION or step == 0:
print('FUELS = ', fuels)
break
elif ores < ONE_TRILLION:
fuels += step
elif ores > ONE_TRILLION:
step = floor(step / 2)
print(ores - ONE_TRILLION, step)
part2() # 1935265
|
from django.db import migrations
from ufdl.core_app.migrations import DataMigration
from ufdl.core_app.migrations.job_templates import get_python_job_template_migration
from .job_templates import iterate_job_templates
class Migration(migrations.Migration):
"""
Migration inserting the pre-trained model presets into the database.
"""
dependencies = [
('ufdl-image-segmentation', '0005_pretrained_models')
]
operations = [
DataMigration(get_python_job_template_migration(iterate_job_templates()))
]
|
# Copyright 2019 PIQuIL - All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
import torch
from torch.nn import functional as F
from qucumber import _warn_on_missing_gpu
from qucumber.utils import cplx, unitaries
from qucumber.rbm import PurificationRBM
from .neural_state import NeuralStateBase
class DensityMatrix(NeuralStateBase):
r"""
:param num_visible: The number of visible units, i.e. the size of the system
:type num_visible: int
:param num_hidden: The number of units in the hidden layer
:type num_hidden: int
:param num_aux: The number of units in the purification layer
:type num_aux: int
:param unitary_dict: A dictionary associating bases with their unitary rotations
:type unitary_dict: dict[str, torch.Tensor]
:param gpu: Whether to perform computations on the default gpu.
:type gpu: bool
"""
_rbm_am = None
_rbm_ph = None
_device = None
def __init__(
self,
num_visible,
num_hidden=None,
num_aux=None,
unitary_dict=None,
gpu=False,
module=None,
):
if gpu and torch.cuda.is_available():
warnings.warn(
"Using DensityMatrix on GPU is not recommended due to poor performance compared to CPU.",
ResourceWarning,
2,
)
self.device = torch.device("cuda")
else:
self.device = torch.device("cpu")
if module is None:
self.rbm_am = PurificationRBM(num_visible, num_hidden, num_aux, gpu=gpu)
self.rbm_ph = PurificationRBM(num_visible, num_hidden, num_aux, gpu=gpu)
else:
_warn_on_missing_gpu(gpu)
self.rbm_am = module.to(self.device)
self.rbm_am.device = self.device
self.rbm_ph = module.to(self.device).clone()
self.rbm_ph.device = self.device
self.num_visible = self.rbm_am.num_visible
self.num_hidden = self.rbm_am.num_hidden
self.num_aux = self.rbm_am.num_aux
self.device = self.rbm_am.device
self.unitary_dict = unitary_dict if unitary_dict else unitaries.create_dict()
self.unitary_dict = {
k: v.to(device=self.device) for k, v in self.unitary_dict.items()
}
@property
def networks(self):
return ["rbm_am", "rbm_ph"]
@property
def rbm_am(self):
return self._rbm_am
@rbm_am.setter
def rbm_am(self, new_val):
self._rbm_am = new_val
@property
def rbm_ph(self):
"""RBM used to learn the wavefunction phase."""
return self._rbm_ph
@rbm_ph.setter
def rbm_ph(self, new_val):
self._rbm_ph = new_val
@property
def device(self):
return self._device
@device.setter
def device(self, new_val):
self._device = new_val
def pi(self, v, vp, expand=True):
r"""Calculates elements of the :math:`\Pi` matrix.
If `expand` is `True`, will return a complex matrix
:math:`A_{ij} = \langle\sigma_i|\Pi|\sigma'_j\rangle`.
Otherwise will return a complex vector
:math:`A_{i} = \langle\sigma_i|\Pi|\sigma'_i\rangle`.
:param v: A batch of visible states, :math:`\sigma`.
:type v: torch.Tensor
:param vp: The other batch of visible state, :math:`\sigma'`.
:type vp: torch.Tensor
:param expand: Whether to return a matrix (`True`) or a vector (`False`).
:type expand: bool
:returns: The matrix elements given by :math:`\langle\sigma|\Pi|\sigma'\rangle`
:rtype: torch.Tensor
"""
m_am = F.linear(v, self.rbm_am.weights_U, self.rbm_am.aux_bias)
mp_am = F.linear(vp, self.rbm_am.weights_U, self.rbm_am.aux_bias)
m_ph = F.linear(v, self.rbm_ph.weights_U)
mp_ph = F.linear(vp, self.rbm_ph.weights_U)
if expand and v.dim() >= 2:
m_am = m_am.unsqueeze_(1)
m_ph = m_ph.unsqueeze_(1)
if expand and vp.dim() >= 2:
mp_am = mp_am.unsqueeze_(0)
mp_ph = mp_ph.unsqueeze_(0)
exp_arg = (m_am + mp_am) / 2
phase = (m_ph - mp_ph) / 2
real = (
(1 + 2 * exp_arg.exp() * phase.cos() + (2 * exp_arg).exp())
.sqrt()
.log()
.sum(-1)
)
imag = torch.atan2(
(exp_arg.exp() * phase.sin()), (1 + exp_arg.exp() * phase.cos())
).sum(-1)
return cplx.make_complex(real, imag)
def pi_grad(self, v, vp, phase=False, expand=False):
r"""Calculates the gradient of the :math:`\Pi` matrix with
respect to the amplitude RBM parameters for two input states
:param v: One of the visible states, :math:`\sigma`
:type v: torch.Tensor
:param vp: The other visible state, :math`\sigma'`
:type vp: torch.Tensor
:param phase: Whether to compute the gradients for the phase RBM (`True`)
or the amplitude RBM (`False`)
:type phase: bool
:returns: The matrix element of the gradient given by
:math:`\langle\sigma|\nabla_\lambda\Pi|\sigma'\rangle`
:rtype: torch.Tensor
"""
unsqueezed = v.dim() < 2 or vp.dim() < 2
v = (v.unsqueeze(0) if v.dim() < 2 else v).to(self.rbm_am.weights_W)
vp = (vp.unsqueeze(0) if vp.dim() < 2 else vp).to(self.rbm_am.weights_W)
if expand:
arg_real = 0.5 * (
F.linear(v, self.rbm_am.weights_U, self.rbm_am.aux_bias).unsqueeze_(1)
+ F.linear(vp, self.rbm_am.weights_U, self.rbm_am.aux_bias).unsqueeze_(
0
)
)
arg_imag = 0.5 * (
F.linear(v, self.rbm_ph.weights_U).unsqueeze_(1)
- F.linear(vp, self.rbm_ph.weights_U).unsqueeze_(0)
)
else:
arg_real = self.rbm_am.mixing_term(v + vp)
arg_imag = self.rbm_ph.mixing_term(v - vp)
sig = cplx.sigmoid(arg_real, arg_imag)
batch_sizes = (
(v.shape[0], vp.shape[0], *v.shape[1:-1]) if expand else (*v.shape[:-1],)
)
W_grad = torch.zeros_like(self.rbm_am.weights_W).expand(*batch_sizes, -1, -1)
vb_grad = torch.zeros_like(self.rbm_am.visible_bias).expand(*batch_sizes, -1)
hb_grad = torch.zeros_like(self.rbm_am.hidden_bias).expand(*batch_sizes, -1)
if phase:
temp = (v.unsqueeze(1) - vp.unsqueeze(0)) if expand else (v - vp)
sig = cplx.scalar_mult(sig, cplx.I)
ab_grad_real = torch.zeros_like(self.rbm_ph.aux_bias).expand(
*batch_sizes, -1
)
ab_grad_imag = ab_grad_real.clone()
else:
temp = (v.unsqueeze(1) + vp.unsqueeze(0)) if expand else (v + vp)
ab_grad_real = cplx.real(sig)
ab_grad_imag = cplx.imag(sig)
U_grad = 0.5 * torch.einsum("c...j,...k->c...jk", sig, temp)
U_grad_real = cplx.real(U_grad)
U_grad_imag = cplx.imag(U_grad)
vec_real = [
W_grad.view(*batch_sizes, -1),
U_grad_real.view(*batch_sizes, -1),
vb_grad,
hb_grad,
ab_grad_real,
]
vec_imag = [
W_grad.view(*batch_sizes, -1).clone(),
U_grad_imag.view(*batch_sizes, -1),
vb_grad.clone(),
hb_grad.clone(),
ab_grad_imag,
]
if unsqueezed and not expand:
vec_real = [grad.squeeze_(0) for grad in vec_real]
vec_imag = [grad.squeeze_(0) for grad in vec_imag]
return cplx.make_complex(
torch.cat(vec_real, dim=-1), torch.cat(vec_imag, dim=-1)
)
def rho(self, v, vp=None, expand=True):
r"""Computes the matrix elements of the (unnormalized) density matrix.
If `expand` is `True`, will return a complex matrix
:math:`A_{ij} = \langle\sigma_i|\widetilde{\rho}|\sigma'_j\rangle`.
Otherwise will return a complex vector
:math:`A_{i} = \langle\sigma_i|\widetilde{\rho}|\sigma'_i\rangle`.
:param v: One of the visible states, :math:`\sigma`.
:type v: torch.Tensor
:param vp: The other visible state, :math:`\sigma'`.
If `None`, will be set to `v`.
:type vp: torch.Tensor
:param expand: Whether to return a matrix (`True`) or a vector (`False`).
:type expand: bool
:returns: The elements of the current density matrix
:math:`\langle\sigma|\widetilde{\rho}|\sigma'\rangle`
:rtype: torch.Tensor
"""
if expand is False and vp is None:
return cplx.make_complex(self.probability(v))
elif vp is None:
vp = v
pi_ = self.pi(v, vp, expand=expand)
amp = (self.rbm_am.gamma(v, vp, eta=+1, expand=expand) + cplx.real(pi_)).exp()
phase = self.rbm_ph.gamma(v, vp, eta=-1, expand=expand) + cplx.imag(pi_)
return cplx.make_complex(amp * phase.cos(), amp * phase.sin())
def importance_sampling_numerator(self, vp, v):
return self.rho(vp, v, expand=False)
def importance_sampling_denominator(self, v):
return cplx.make_complex(self.probability(v))
def rotated_gradient(self, basis, sample):
r"""Computes the gradients rotated into the measurement basis
:param basis: The bases in which the measurement is made
:type basis: numpy.ndarray
:param sample: The measurement (either 0 or 1)
:type sample: torch.Tensor
:returns: A list of two tensors, representing the rotated gradients
of the amplitude and phase RBMs
:rtype: list[torch.Tensor, torch.Tensor]
"""
UrhoU, UrhoU_v, v = unitaries.rotate_rho_probs(
self, basis, sample, include_extras=True
)
inv_UrhoU = 1 / (UrhoU + 1e-8) # avoid dividing by zero
raw_grads = [self.am_grads(v), self.ph_grads(v)]
rotated_grad = [
-cplx.einsum("ijb,ijbg->bg", UrhoU_v, g, imag_part=False) for g in raw_grads
]
return [torch.einsum("b,bg->g", inv_UrhoU, g) for g in rotated_grad]
def am_grads(self, v):
r"""Computes the gradients of the amplitude RBM for given input states
:param v: The first input state, :math:`\sigma`
:type v: torch.Tensor
:returns: The gradients of all amplitude RBM parameters
:rtype: torch.Tensor
"""
return self.rbm_am.gamma_grad(v, v, eta=+1, expand=True) + self.pi_grad(
v, v, phase=False, expand=True
)
def ph_grads(self, v):
r"""Computes the gradients of the phase RBM for given input states
:param v: The first input state, :math:`\sigma`
:type v: torch.Tensor
:returns: The gradients of all phase RBM parameters
:rtype: torch.Tensor
"""
return cplx.scalar_mult( # need to multiply Gamma- by i
self.rbm_ph.gamma_grad(v, v, eta=-1, expand=True), cplx.I
) + self.pi_grad(v, v, phase=True, expand=True)
def fit(
self,
data,
epochs=100,
pos_batch_size=100,
neg_batch_size=None,
k=1,
lr=1,
input_bases=None,
progbar=False,
starting_epoch=1,
time=False,
callbacks=None,
optimizer=torch.optim.SGD,
optimizer_args=None,
scheduler=None,
scheduler_args=None,
**kwargs,
):
if input_bases is None:
raise ValueError("input_bases must be provided to train a DensityMatrix!")
else:
super().fit(
data=data,
epochs=epochs,
pos_batch_size=pos_batch_size,
neg_batch_size=neg_batch_size,
k=k,
lr=lr,
input_bases=input_bases,
progbar=progbar,
starting_epoch=starting_epoch,
time=time,
callbacks=callbacks,
optimizer=optimizer,
optimizer_args=optimizer_args,
scheduler=scheduler,
scheduler_args=scheduler_args,
**kwargs,
)
@staticmethod
def autoload(location, gpu=False):
state_dict = torch.load(location)
nn_state = DensityMatrix(
unitary_dict=state_dict["unitary_dict"],
num_visible=len(state_dict["rbm_am"]["visible_bias"]),
num_hidden=len(state_dict["rbm_am"]["hidden_bias"]),
num_aux=len(state_dict["rbm_am"]["aux_bias"]),
gpu=gpu,
)
nn_state.load(location)
return nn_state
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class TaskGroupDefinition(Model):
"""TaskGroupDefinition.
:param display_name:
:type display_name: str
:param is_expanded:
:type is_expanded: bool
:param name:
:type name: str
:param tags:
:type tags: list of str
:param visible_rule:
:type visible_rule: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'is_expanded': {'key': 'isExpanded', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'visible_rule': {'key': 'visibleRule', 'type': 'str'}
}
def __init__(self, display_name=None, is_expanded=None, name=None, tags=None, visible_rule=None):
super(TaskGroupDefinition, self).__init__()
self.display_name = display_name
self.is_expanded = is_expanded
self.name = name
self.tags = tags
self.visible_rule = visible_rule
|
#!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test -reindex with CheckBlockIndex
#
from test_framework import NewcoinTestFramework
from newcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
import os.path
class ReindexTest(NewcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.is_network_split = False
self.nodes.append(start_node(0, self.options.tmpdir))
def run_test(self):
self.nodes[0].generate(3)
stop_node(self.nodes[0], 0)
wait_newcoinds()
self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug", "-reindex", "-checkblockindex=1"])
assert_equal(self.nodes[0].getblockcount(), 3)
print "Success"
if __name__ == '__main__':
ReindexTest().main()
|
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from rest_framework import generics
from rest_framework import permissions
from rest_framework.views import status
from rest_framework.response import Response
from rest_framework_jwt.settings import api_settings
from .serializers import TokenSerializer, UserSerializer
# Get the JWT settings
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
# Create your views here.
class LoginView(generics.CreateAPIView):
"""
POST auth/login/
"""
# This permission class will over ride the global permission
# class setting
permission_classes = (permissions.AllowAny,)
queryset = User.objects.all()
def post(self, request, *args, **kwargs):
username = request.data.get("username", "")
password = request.data.get("password", "")
user = authenticate(request, username=username, password=password)
if user is not None:
# login saves the user’s ID in the session,
# using Django’s session framework.
login(request, user)
serializer = TokenSerializer(data={
# using drf jwt utility functions to generate a token
"token": jwt_encode_handler(
jwt_payload_handler(user)
)})
serializer.is_valid()
return Response(data=serializer.data, status=status.HTTP_200_OK)
return Response(status=status.HTTP_401_UNAUTHORIZED)
class RegisterUsers(generics.CreateAPIView):
"""
POST auth/register/
"""
permission_classes = (permissions.AllowAny,)
def post(self, request, *args, **kwargs):
username = request.data.get("username", "")
password = request.data.get("password", "")
email = request.data.get("email", "")
if not username and not password and not email:
return Response(
data={
"message": "username, password and email is required to register a user"
},
status=status.HTTP_400_BAD_REQUEST
)
new_user = User.objects.create_user(
username=username, password=password, email=email
)
return Response(
data=UserSerializer(new_user).data,
status=status.HTTP_201_CREATED
)
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'action remplir."""
from primaires.scripting.action import Action
from primaires.scripting.instruction import ErreurExecution
class ClasseAction(Action):
"""Remplit un conteneur de nourriture ou de potion."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.remplir_objet, "Objet", "Objet")
cls.ajouter_types(cls.remplir_proto_nb, "Objet", "str",
"Fraction")
@staticmethod
def remplir_objet(conteneur, objet):
"""Met l'objet dans le conteneur de nourriture.
Attention, l'objet conteneur ne peut en aucun cas être "flottant" mais
doit lui-même être contenu quelque part (sol d'une salle, inventaire
d'un personnage, autre conteneur...).
"""
if not conteneur.contenu:
raise ErreurExecution("{} n'est contenu nul part".format(
conteneur.get_nom()))
if conteneur.est_de_type("conteneur de potion"):
if conteneur.potion:
raise ErreurExecution("{} est plein".format(
conteneur.get_nom()))
if objet.contenu:
objet.contenu.retirer(objet)
conteneur.potion = objet
conteneur.onces = conteneur.onces_max
return
if not conteneur.est_de_type("conteneur de nourriture"):
raise ErreurExecution("{} n'est pas un conteneur".format(
conteneur.get_nom()))
if objet.poids_unitaire > conteneur.poids_max:
raise ErreurExecution("{} est plein".format(conteneur.get_nom()))
if objet.contenu:
objet.contenu.retirer(objet)
conteneur.nourriture.append(objet)
@staticmethod
def remplir_proto_nb(conteneur, prototype, nb):
"""Pose dans le conteneur nb objets du prototype précisé.
Attention, l'objet conteneur ne peut en aucun cas être "flottant" mais
doit lui-même être contenu quelque part (sol d'une salle, inventaire
d'un personnage, autre conteneur...).
"""
nb = int(nb)
if not prototype in importeur.objet.prototypes:
raise ErreurExecution("prototype {} introuvable".format(prototype))
prototype = importeur.objet.prototypes[prototype]
if not conteneur.contenu:
raise ErreurExecution("{} n'est contenu nul part".format(
conteneur.get_nom()))
if conteneur.est_de_type("conteneur de potion"):
if conteneur.potion:
raise ErreurExecution("{} est plein".format(
conteneur.get_nom()))
objet = importeur.objet.creer_objet(prototype)
conteneur.potion = objet
conteneur.onces = conteneur.onces_max
return
if not conteneur.est_de_type("conteneur de nourriture"):
raise ErreurExecution("{} n'est pas un conteneur".format(
conteneur.get_nom()))
poids_total = 0
for i in range(nb):
poids_total += prototype.poids
if poids_total > conteneur.poids_max:
raise ErreurExecution("{} est plein".format(
conteneur.get_nom()))
objet = importeur.objet.creer_objet(prototype)
conteneur.nourriture.append(objet)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import objects
from nova.tests.unit.objects import test_objects
fake_obj_numa = objects.NUMATopology(
cells=[
objects.NUMACell(
id=0, cpuset=set([1, 2]), memory=512,
cpu_usage=2, memory_usage=256),
objects.NUMACell(
id=1, cpuset=set([3, 4]), memory=512,
cpu_usage=1, memory_usage=128)])
class _TestNUMA(object):
def test_convert_wipe(self):
d1 = fake_obj_numa._to_dict()
d2 = objects.NUMATopology.obj_from_primitive(d1)._to_dict()
self.assertEqual(d1, d2)
def test_pinning_logic(self):
obj = objects.NUMATopology(cells=[
objects.NUMACell(
id=0, cpuset=set([1, 2]), memory=512,
cpu_usage=2, memory_usage=256,
pinned_cpus=set([1])),
objects.NUMACell(
id=1, cpuset=set([3, 4]), memory=512,
cpu_usage=1, memory_usage=128,
pinned_cpus=set([]))
]
)
self.assertEqual(set([2]), obj.cells[0].free_cpus)
self.assertEqual(set([3, 4]), obj.cells[1].free_cpus)
class TestNUMA(test_objects._LocalTest,
_TestNUMA):
pass
class TestNUMARemote(test_objects._RemoteTest,
_TestNUMA):
pass
|
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Tests the ConfigurationAdmin shell commands
:author: Thomas Calmant
"""
# Pelix
import pelix.framework
import pelix.services
import pelix.shell
import pelix.shell.beans as beans
# Standard library
import os
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
import unittest2 as unittest
except ImportError:
import unittest
# ------------------------------------------------------------------------------
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# ------------------------------------------------------------------------------
class ConfigAdminShellTest(unittest.TestCase):
"""
Tests the EventAdmin shell commands
"""
def setUp(self):
"""
Prepares a framework and a registers a service to export
"""
# Use a local configuration folder
conf_folder = os.path.join(os.path.dirname(__file__), "conf")
# Create the framework
self.framework = pelix.framework.create_framework(
('pelix.ipopo.core', 'pelix.shell.core',
'pelix.services.configadmin', 'pelix.shell.configadmin'),
{'configuration.folder': conf_folder})
self.framework.start()
# Get the Shell service
context = self.framework.get_bundle_context()
svc_ref = context.get_service_reference(pelix.shell.SERVICE_SHELL)
self.shell = context.get_service(svc_ref)
# Instantiate the EventAdmin component
context = self.framework.get_bundle_context()
# Get the service
self.config_ref = context.get_service_reference(
pelix.services.SERVICE_CONFIGURATION_ADMIN)
self.config = context.get_service(self.config_ref)
# Remove existing configurations
for config in self.config.list_configurations():
config.delete()
def _run_command(self, command, *args):
"""
Runs the given shell command
"""
# String output
str_output = StringIO()
# Format command
if args:
command = command.format(*args)
# Add the namespace prefix
command = 'config.{0}'.format(command)
# Run command
session = beans.ShellSession(beans.IOHandler(None, str_output))
self.shell.execute(command, session)
return str_output.getvalue()
def tearDown(self):
"""
Cleans up for next test
"""
# Remove existing configurations
for config in self.config.list_configurations():
config.delete()
# Stop the framework
pelix.framework.FrameworkFactory.delete_framework()
self.framework = None
def testLifeCycle(self):
"""
Tests a configuration life cycle
"""
# Create a factory configuration
key = "testConfig"
first_value = "first"
factory_name = "testFactory"
output = self._run_command("create {0} {1}={2}", factory_name,
key, first_value)
# Get the generated configuration
config = next(iter(self.config.list_configurations()))
# Check validity
self.assertIn(config.get_pid(), output)
self.assertEqual(factory_name, config.get_factory_pid())
self.assertDictContainsSubset({key: first_value},
config.get_properties())
# Update it
second_value = "second"
self._run_command("update {0} {1}={2}", config.get_pid(),
key, second_value)
self.assertDictContainsSubset({key: second_value},
config.get_properties())
# Reload it
self._run_command("reload {0}", config.get_pid())
# List it
output = self._run_command('list')
self.assertIn(config.get_pid(), output)
output = self._run_command('list {0}', config.get_pid())
self.assertIn(config.get_pid(), output)
# Delete it
self._run_command("delete {0}", config.get_pid())
self.assertEqual(self.config.list_configurations(), set())
def testInvalidPid(self):
"""
Tests commands with invalid PIDs
"""
self._run_command("delete <invalid>")
self._run_command("list <invalid>")
self._run_command("reload <invalid>")
def testUpdate(self):
"""
Tests the update command
"""
pid = "testPid"
key = "testConfig"
value = "testValue"
# Create the configuration, with no property
self._run_command("update {0}", pid)
# Get the generated configuration
config = next(iter(self.config.list_configurations()))
self.assertEqual(config.get_pid(), pid)
self.assertIsNone(config.get_properties())
# Set a key
self._run_command("update {0} {1}={2}", pid, key, value)
self.assertDictContainsSubset({key: value}, config.get_properties())
# Remove a key
self._run_command("update {0} {1}=None", pid, key)
self.assertNotIn(key, config.get_properties())
def testList(self):
"""
Other tests for the list command
"""
pid = "testPid"
pid2 = "testPidBis"
key = "testConfig"
value = "testValue"
# Nothing at first
output = self._run_command("list")
self.assertIn("No configuration", output)
# List inexistent PID
output = self._run_command("list {0}", pid)
self.assertIn("No configuration", output)
# Create a configuration without properties
config = self.config.get_configuration(pid)
# List it
output = self._run_command("list {0}", pid)
self.assertIn("Not yet updated", output)
# Update it
config.update({key: value})
output = self._run_command("list {0}", pid)
self.assertIn(pid, output)
self.assertIn(key, output)
self.assertIn(value, output)
# Create a second one
config2 = self.config.get_configuration(pid2)
# Delete the first one
config.delete()
self.assertNotIn(config, self.config.list_configurations())
self.assertIn(config2, self.config.list_configurations())
# List it
output = self._run_command("list {0}", pid)
self.assertIn("No configuration", output)
self.assertIn(pid, output)
|
"""
Vanilla DenseNet implementation
Paper: https://arxiv.org/abs/1608.06993
Implementation taken from: https://github.com/pytorch/vision/blob/main/torchvision/models/densenet.py
"""
import re
from collections import OrderedDict
from functools import partial
from typing import Any, List, Optional, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as cp
from torch import Tensor
class _DenseLayer(nn.Module):
def __init__(
self, num_input_features: int, growth_rate: int, bn_size: int, drop_rate: float, memory_efficient: bool = False
) -> None:
super().__init__()
self.norm1: nn.BatchNorm2d
self.add_module("norm1", nn.BatchNorm2d(num_input_features))
self.relu1: nn.ReLU
self.add_module("relu1", nn.ReLU(inplace=True))
self.conv1: nn.Conv2d
self.add_module(
"conv1", nn.Conv2d(num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=False)
)
self.norm2: nn.BatchNorm2d
self.add_module("norm2", nn.BatchNorm2d(bn_size * growth_rate))
self.relu2: nn.ReLU
self.add_module("relu2", nn.ReLU(inplace=True))
self.conv2: nn.Conv2d
self.add_module(
"conv2", nn.Conv2d(bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=False)
)
self.drop_rate = float(drop_rate)
self.memory_efficient = memory_efficient
def bn_function(self, inputs: List[Tensor]) -> Tensor:
concated_features = torch.cat(inputs, 1)
bottleneck_output = self.conv1(self.relu1(self.norm1(concated_features))) # noqa: T484
return bottleneck_output
# todo: rewrite when torchscript supports any
def any_requires_grad(self, input: List[Tensor]) -> bool:
for tensor in input:
if tensor.requires_grad:
return True
return False
@torch.jit.unused # noqa: T484
def call_checkpoint_bottleneck(self, input: List[Tensor]) -> Tensor:
def closure(*inputs):
return self.bn_function(inputs)
return cp.checkpoint(closure, *input)
@torch.jit._overload_method # noqa: F811
def forward(self, input: List[Tensor]) -> Tensor: # noqa: F811
pass
@torch.jit._overload_method # noqa: F811
def forward(self, input: Tensor) -> Tensor: # noqa: F811
pass
# torchscript does not yet support *args, so we overload method
# allowing it to take either a List[Tensor] or single Tensor
def forward(self, input: Tensor) -> Tensor: # noqa: F811
if isinstance(input, Tensor):
prev_features = [input]
else:
prev_features = input
if self.memory_efficient and self.any_requires_grad(prev_features):
if torch.jit.is_scripting():
raise Exception("Memory Efficient not supported in JIT")
bottleneck_output = self.call_checkpoint_bottleneck(prev_features)
else:
bottleneck_output = self.bn_function(prev_features)
new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return new_features
class _DenseBlock(nn.ModuleDict):
_version = 2
def __init__(
self,
num_layers: int,
num_input_features: int,
bn_size: int,
growth_rate: int,
drop_rate: float,
memory_efficient: bool = False,
) -> None:
super().__init__()
for i in range(num_layers):
layer = _DenseLayer(
num_input_features + i * growth_rate,
growth_rate=growth_rate,
bn_size=bn_size,
drop_rate=drop_rate,
memory_efficient=memory_efficient,
)
self.add_module("denselayer%d" % (i + 1), layer)
def forward(self, init_features: Tensor) -> Tensor:
features = [init_features]
for name, layer in self.items():
new_features = layer(features)
features.append(new_features)
return torch.cat(features, 1)
class _Transition(nn.Sequential):
def __init__(self, num_input_features: int, num_output_features: int) -> None:
super().__init__()
self.add_module("norm", nn.BatchNorm2d(num_input_features))
self.add_module("relu", nn.ReLU(inplace=True))
self.add_module("conv", nn.Conv2d(num_input_features, num_output_features, kernel_size=1, stride=1, bias=False))
self.add_module("pool", nn.AvgPool2d(kernel_size=2, stride=2))
class DenseNet(nn.Module):
r"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_.
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 4 ints) - how many layers in each pooling block
num_init_features (int) - the number of filters to learn in the first convolution layer
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_.
"""
def __init__(
self,
growth_rate: int = 32,
block_config: Tuple[int, int, int, int] = (6, 12, 24, 16),
num_init_features: int = 64,
bn_size: int = 4,
drop_rate: float = 0,
num_classes: int = 1000,
memory_efficient: bool = False,
) -> None:
super().__init__()
# First convolution
self.features = nn.Sequential(
OrderedDict(
[
("conv0", nn.Conv2d(3, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
("norm0", nn.BatchNorm2d(num_init_features)),
("relu0", nn.ReLU(inplace=True)),
("pool0", nn.MaxPool2d(kernel_size=3, stride=2, padding=1)),
]
)
)
# Each denseblock
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = _DenseBlock(
num_layers=num_layers,
num_input_features=num_features,
bn_size=bn_size,
growth_rate=growth_rate,
drop_rate=drop_rate,
memory_efficient=memory_efficient,
)
self.features.add_module("denseblock%d" % (i + 1), block)
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2)
self.features.add_module("transition%d" % (i + 1), trans)
num_features = num_features // 2
# Final batch norm
self.features.add_module("norm5", nn.BatchNorm2d(num_features))
# Linear layer
self.classifier = nn.Linear(num_features, num_classes)
# Official init from torch repo.
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.constant_(m.bias, 0)
def forward(self, x: Tensor) -> Tensor:
features = self.features(x)
out = F.relu(features, inplace=True)
out = F.adaptive_avg_pool2d(out, (1, 1))
out = torch.flatten(out, 1)
out = self.classifier(out)
return out
|
# -*- coding: utf-8 -*-
"""module for android forensics."""
import os
import io
import subprocess
import sqlite3
from datetime import datetime
from modules import logger
from modules import manager
from modules import interface
class AndForensicsConnector(interface.ModuleConnector):
NAME = 'andforensics_connector'
DESCRIPTION = 'Module for android'
TABLE_NAME = 'lv1_os_android_andforensics'
_plugin_classes = {}
def __init__(self):
super(AndForensicsConnector, self).__init__()
def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
"""Connector to connect to AndForensics.
Args:
par_id: partition id.
configuration: configuration values.
source_path_spec (dfvfs.PathSpec): path specification of the source file.
knowledge_base (KnowledgeBase): knowledge base.
"""
# 이미지를 복사해와야함 andforensics
if os.path.exists(configuration.source_path):
cmd = 'python3.6 /home/byeongchan/modules/andForensics/andForensics.py -i \'{0:s}\' -o \'{1:s}\' ' \
'-proc {2:d}'.format(os.path.dirname(configuration.source_path),
configuration.tmp_path + os.sep + 'andForensics', 10)
proc = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
ret_code = proc.stdout.read()
f = io.StringIO(str(ret_code))
result_msg = f.readline()
print(result_msg)
f.close()
if result_msg[-14:-3] == 'Process End':
base_name = os.path.basename(configuration.source_path)
output_path = configuration.tmp_path + os.sep + 'andForensics' + os.sep \
+ os.path.basename(configuration.source_path)
analysis_db_path = output_path + os.sep + 'analysis_' + base_name + '.db'
load_db_path = output_path + os.sep + 'loaddb_' + base_name + '.db'
preprocess_db_path = output_path + os.sep + 'preprocess_' + base_name + '.db'
this_file_path = os.path.dirname(
os.path.abspath(__file__)) + os.sep + 'schema' + os.sep + 'android' + os.sep
yaml_list = [this_file_path + 'lv1_os_and_app_list.yaml',
this_file_path + 'lv1_os_and_call_history.yaml',
this_file_path + 'lv1_os_and_emb_file.yaml',
this_file_path + 'lv1_os_and_file_history.yaml',
this_file_path + 'lv1_os_and_geodata.yaml',
this_file_path + 'lv1_os_and_id_pw_hash.yaml',
this_file_path + 'lv1_os_and_web_browser_history.yaml']
old_table_list = ['application_list', 'call_history', 'embedded_file', 'file_history',
'geodata', 'id_password_hash', 'web_browser_history']
new_table_list = ['lv1_os_and_app_list', 'lv1_os_and_call_history', 'lv1_os_and_emb_file',
'lv1_os_and_file_history', 'lv1_os_and_geodata', 'lv1_os_and_id_pw_hash',
'lv1_os_and_web_browser_history']
if not self.check_table_from_yaml(configuration, yaml_list, new_table_list):
return False
info = tuple([par_id, configuration.case_id, configuration.evidence_id])
try:
conn = sqlite3.connect(analysis_db_path)
cursor = conn.cursor()
for idx, table in enumerate(old_table_list):
cursor.execute(f'select * from {table}')
rows = cursor.fetchall()
rows_list = []
for row in rows:
if table is 'application_list':
row = row[:5] + _convert_timestamp(row[5:13]) + row[13:]
rows_list.append(info + row)
print(rows_list)
query = ""
if table is 'application_list':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, " \
f"%s, %s, %s, %s, %s, %s, %s, %s, %s);"
if table is 'call_history':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, " \
f"%s, %s, %s)"
elif table is 'embedded_file':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, " \
f"%s, %s, %s, %s, %s)"
elif table is 'file_history' or table is 'id_password_hash':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
elif table is 'geodata':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, %s)"
elif table is 'web_browser_history':
query = f"Insert into {new_table_list[idx]} values (%s, %s, %s, %s, %s, %s, %s, %s, " \
f"%s, %s, %s)"
configuration.cursor.bulk_execute(query, rows_list)
self.mask_table(configuration, 'call_history')
except Exception as exception:
logger.error('Database error : {0!s}'.format(exception))
finally:
conn.close()
else:
logger.info('')
def mask_table(self, configuration, table_name):
if table_name is 'call_history':
query = "update lv1_os_and_call_history set timestamp = regexp_replace(timestamp, " \
"'(\\\\d{2,3}-)\\\\d{1,2}(\\\\d{2}-)\\\\d{2}(\\\\d{2})', " \
"'\\\\1**\\\\2**\\\\3');"
configuration.cursor.execute_query(query)
query = "update lv1_os_and_call_history set phonenumber = regexp_replace(phonenumber, " \
"'((?:(?:0|\\\\+82)(?:10|2|3[1-3]|4[1-4]|5[0-5]|6[1-4]|70)-?)\\\\d{1,2})\\\\d{2}(-?)\\\\d{2}(\\\\d{2})', " \
"'\\\\1**\\\\2**\\\\3')"
configuration.cursor.execute_query(query)
query = "update lv1_os_and_call_history set file = regexp_replace(file, " \
"'(통화 녹음 )([가-힣]|(?:\\\\d{6}))(?:\\\\s|\\\\S)*(_\\\\d{6}_\\\\d{6})', " \
"'\\\\1\\\\2*\\\\3')"
configuration.cursor.execute_query(query)
query = "update lv1_os_and_call_history SET contents = if(CHAR_LENGTH(contents)-CHAR_LENGTH(REPLACE(contents,'|',''))=2," \
" CONCAT_WS('|'," \
" REGEXP_REPLACE(SUBSTRING_INDEX(contents, '|', 1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(contact_name:string_num\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 2), '|', -1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(contact_name:string\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 3), '|', -1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(contact_name:string_num_mixed\\\\))', '\\\\1*\\\\2')" \
" )," \
" CONCAT_WS('|'," \
" SUBSTRING_INDEX(contents, '|', 1)," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 2), '|', -1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(name:string\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 3), '|', -1)," \
" '(^(?:\\\\S|\\\\s){2})(?:\\\\S|\\\\s)*(\\\\(m_subject:string_num\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 4), '|', -1)," \
" '(^(?:\\\\S|\\\\s){2})(?:\\\\S|\\\\s)*(\\\\(m_subject:string\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 5), '|', -1)," \
" '(^(?:\\\\S|\\\\s){2})(?:\\\\S|\\\\s)*(\\\\(m_subject:string_num_mixed\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 6), '|', -1)," \
" '(^(?:\\\\S|\\\\s){3})(?:\\\\S|\\\\s)*(\\\\(m_content:string_num_mixed\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 7), '|', -1)," \
" '(^(?:\\\\S|\\\\s){3})(?:\\\\S|\\\\s)*(\\\\(m_content:string_num\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 8), '|', -1)," \
" '(^(?:\\\\S|\\\\s){3})(?:\\\\S|\\\\s)*(\\\\(m_content:string\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 9), '|', -1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(cnap_name:string_num_mixed\\\\))', '\\\\1*\\\\2')," \
" REGEXP_REPLACE(SUBSTRING_INDEX(SUBSTRING_INDEX(contents, '|', 10), '|', -1)," \
" '(^\\\\S|\\\\s)(?:\\\\S|\\\\s)*(\\\\(cnap_name:string\\\\))', '\\\\1*\\\\2')" \
" )" \
")"
configuration.cursor.execute_query(query)
manager.ModulesManager.RegisterModule(AndForensicsConnector)
def _convert_timestamp(timestamp):
if timestamp is None:
return 'N/A'
if isinstance(timestamp, tuple):
to_timestamp = []
for t in timestamp:
to_timestamp.append(datetime.fromtimestamp(t).strftime('%Y-%m-%dT%H:%M:%SZ'))
return tuple(to_timestamp)
else:
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
class bitz(Exchange):
def describe(self):
return self.deep_extend(super(bitz, self).describe(), {
'id': 'bitz',
'name': 'Bit-Z',
'countries': ['HK'],
'rateLimit': 2000,
'version': 'v2',
'userAgent': self.userAgents['chrome'],
'has': {
'fetchTickers': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrders': True,
'fetchOrder': True,
'createMarketOrder': False,
'fetchDeposits': True,
'fetchWithdrawals': True,
'fetchTransactions': False,
},
'timeframes': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '60min',
'4h': '4hour',
'1d': '1day',
'5d': '5day',
'1w': '1week',
'1M': '1mon',
},
'hostname': 'apiv2.bitz.com',
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/35862606-4f554f14-0b5d-11e8-957d-35058c504b6f.jpg',
'api': {
'market': 'https://{hostname}',
'trade': 'https://{hostname}',
'assets': 'https://{hostname}',
},
'www': 'https://www.bitz.com',
'doc': 'https://apidoc.bitz.com/en/',
'fees': 'https://www.bitz.com/fee?type=1',
'referral': 'https://u.bitz.com/register?invite_code=1429193',
},
'api': {
'market': {
'get': [
'ticker',
'depth',
'order', # trades
'tickerall',
'kline',
'symbolList',
'currencyRate',
'currencyCoinRate',
'coinRate',
],
},
'trade': {
'post': [
'addEntrustSheet',
'cancelEntrustSheet',
'cancelAllEntrustSheet',
'getUserHistoryEntrustSheet', # closed orders
'getUserNowEntrustSheet', # open orders
'getEntrustSheetInfo', # order
'depositOrWithdraw', # transactions
],
},
'assets': {
'post': [
'getUserAssets',
],
},
},
'fees': {
'trading': {
'maker': 0.002,
'taker': 0.002,
},
'funding': {
'withdraw': {
'BTC': '0.5%',
'DKKT': '0.5%',
'ETH': 0.01,
'USDT': '0.5%',
'LTC': '0.5%',
'FCT': '0.5%',
'LSK': '0.5%',
'HXI': '0.8%',
'ZEC': '0.5%',
'DOGE': '0.5%',
'MZC': '0.5%',
'ETC': '0.5%',
'GXS': '0.5%',
'XPM': '0.5%',
'PPC': '0.5%',
'BLK': '0.5%',
'XAS': '0.5%',
'HSR': '0.5%',
'NULS': 5.0,
'VOISE': 350.0,
'PAY': 1.5,
'EOS': 0.6,
'YBCT': 35.0,
'OMG': 0.3,
'OTN': 0.4,
'BTX': '0.5%',
'QTUM': '0.5%',
'DASH': '0.5%',
'GAME': '0.5%',
'BCH': '0.5%',
'GNT': 9.0,
'SSS': 1500.0,
'ARK': '0.5%',
'PART': '0.5%',
'LEO': '0.5%',
'DGB': '0.5%',
'ZSC': 130.0,
'VIU': 350.0,
'BTG': '0.5%',
'ARN': 10.0,
'VTC': '0.5%',
'BCD': '0.5%',
'TRX': 200.0,
'HWC': '0.5%',
'UNIT': '0.5%',
'OXY': '0.5%',
'MCO': 0.3500,
'SBTC': '0.5%',
'BCX': '0.5%',
'ETF': '0.5%',
'PYLNT': 0.4000,
'XRB': '0.5%',
'ETP': '0.5%',
},
},
},
'precision': {
'amount': 8,
'price': 8,
},
'options': {
'fetchOHLCVVolume': True,
'fetchOHLCVWarning': True,
'lastNonceTimestamp': 0,
},
'commonCurrencies': {
# https://github.com/ccxt/ccxt/issues/3881
# https://support.bit-z.pro/hc/en-us/articles/360007500654-BOX-BOX-Token-
'BOX': 'BOX Token',
'LEO': 'LeoCoin',
'XRB': 'NANO',
'PXC': 'Pixiecoin',
'VTC': 'VoteCoin',
'TTC': 'TimesChain',
},
'exceptions': {
# '200': Success
'-102': ExchangeError, # Invalid parameter
'-103': AuthenticationError, # Verification failed
'-104': ExchangeNotAvailable, # Network Error-1
'-105': AuthenticationError, # Invalid api signature
'-106': ExchangeNotAvailable, # Network Error-2
'-109': AuthenticationError, # Invalid scretKey
'-110': DDoSProtection, # The number of access requests exceeded
'-111': PermissionDenied, # Current IP is not in the range of trusted IP
'-112': OnMaintenance, # Service is under maintenance
'-114': RateLimitExceeded, # The number of daily requests has reached the limit
'-117': AuthenticationError, # The apikey expires
'-100015': AuthenticationError, # Trade password error
'-100044': ExchangeError, # Fail to request data
'-100101': ExchangeError, # Invalid symbol
'-100201': ExchangeError, # Invalid symbol
'-100301': ExchangeError, # Invalid symbol
'-100401': ExchangeError, # Invalid symbol
'-100302': ExchangeError, # Type of K-line error
'-100303': ExchangeError, # Size of K-line error
'-200003': AuthenticationError, # Please set trade password
'-200005': PermissionDenied, # This account can not trade
'-200025': ExchangeNotAvailable, # Temporary trading halt
'-200027': InvalidOrder, # Price Error
'-200028': InvalidOrder, # Amount must be greater than 0
'-200029': InvalidOrder, # Number must be between %s and %d
'-200030': InvalidOrder, # Over price range
'-200031': InsufficientFunds, # Insufficient assets
'-200032': ExchangeError, # System error. Please contact customer service
'-200033': ExchangeError, # Fail to trade
'-200034': OrderNotFound, # The order does not exist
'-200035': OrderNotFound, # Cancellation error, order filled
'-200037': InvalidOrder, # Trade direction error
'-200038': ExchangeError, # Trading Market Error
'-200055': OrderNotFound, # Order record does not exist
'-300069': AuthenticationError, # api_key is illegal
'-300101': ExchangeError, # Transaction type error
'-300102': InvalidOrder, # Price or number cannot be less than 0
'-300103': AuthenticationError, # Trade password error
'-301001': ExchangeNotAvailable, # Network Error-3
},
})
def fetch_markets(self, params={}):
response = self.marketGetSymbolList(params)
#
# { status: 200,
# msg: "",
# data: { ltc_btc: { id: "1",
# name: "ltc_btc",
# coinFrom: "ltc",
# coinTo: "btc",
# numberFloat: "4",
# priceFloat: "8",
# status: "1",
# minTrade: "0.010",
# maxTrade: "500000000.000"},
# qtum_usdt: { id: "196",
# name: "qtum_usdt",
# coinFrom: "qtum",
# coinTo: "usdt",
# numberFloat: "4",
# priceFloat: "2",
# status: "1",
# minTrade: "0.100",
# maxTrade: "500000000.000"}, },
# time: 1535969146,
# microtime: "0.66955600 1535969146",
# source: "api" }
#
markets = self.safe_value(response, 'data')
ids = list(markets.keys())
result = []
for i in range(0, len(ids)):
id = ids[i]
market = markets[id]
numericId = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'coinFrom')
quoteId = self.safe_string(market, 'coinTo')
base = baseId.upper()
quote = quoteId.upper()
base = self.safe_currency_code(base)
quote = self.safe_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'numberFloat'),
'price': self.safe_integer(market, 'priceFloat'),
}
result.append({
'info': market,
'id': id,
'numericId': numericId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'minTrade'),
'max': self.safe_float(market, 'maxTrade'),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.assetsPostGetUserAssets(params)
#
# {
# status: 200,
# msg: "",
# data: {
# cny: 0,
# usd: 0,
# btc_total: 0,
# info: [{
# "name": "zpr",
# "num": "37.49067275",
# "over": "37.49067275",
# "lock": "0.00000000",
# "btc": "0.00000000",
# "usd": "0.00000000",
# "cny": "0.00000000",
# }],
# },
# time: 1535983966,
# microtime: "0.70400500 1535983966",
# source: "api",
# }
#
balances = self.safe_value(response['data'], 'info')
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'name')
code = self.safe_currency_code(currencyId)
account = self.account()
account['used'] = self.safe_float(balance, 'lock')
account['total'] = self.safe_float(balance, 'num')
account['free'] = self.safe_float(balance, 'over')
result[code] = account
return self.parse_balance(result)
def parse_ticker(self, ticker, market=None):
#
# { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" }
#
timestamp = None
symbol = None
if market is None:
marketId = self.safe_string(ticker, 'symbol')
market = self.safe_value(self.markets_by_id, marketId)
if market is not None:
symbol = market['symbol']
last = self.safe_float(ticker, 'now')
open = self.safe_float(ticker, 'open')
change = None
average = None
if last is not None and open is not None:
change = last - open
average = self.sum(last, open) / 2
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bidPrice'),
'bidVolume': self.safe_float(ticker, 'bidQty'),
'ask': self.safe_float(ticker, 'askPrice'),
'askVolume': self.safe_float(ticker, 'askQty'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': self.safe_float(ticker, 'priceChange24h'),
'average': average,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'quoteVolume'),
'info': ticker,
}
def parse_microtime(self, microtime):
if microtime is None:
return microtime
parts = microtime.split(' ')
milliseconds = float(parts[0])
seconds = int(parts[1])
total = self.sum(seconds, milliseconds)
return int(total * 1000)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetTicker(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" },
# time: 1535970397,
# microtime: "0.76341900 1535970397",
# source: "api" }
#
ticker = self.parse_ticker(response['data'], market)
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbols'] = ','.join(ids)
response = self.marketGetTickerall(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { ela_btc: { symbol: "ela_btc",
# quoteVolume: "0.00",
# volume: "3.28",
# priceChange: "0.00",
# priceChange24h: "0.00",
# askPrice: "0.00147984",
# askQty: "5.4580",
# bidPrice: "0.00120230",
# bidQty: "12.5384",
# open: "0.00149078",
# high: "0.00149078",
# low: "0.00149078",
# now: "0.00149078",
# firstId: 115581219,
# lastId: 115581219,
# dealCount: 1,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "73.66",
# usd: "10.79",
# krw: "11995.03" } },
# time: 1535971578,
# microtime: "0.39854200 1535971578",
# source: "api" }
#
tickers = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
result = {}
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
ticker = tickers[id]
market = None
if id in self.markets_by_id:
market = self.markets_by_id[id]
ticker = self.parse_ticker(tickers[id], market)
symbol = ticker['symbol']
if symbol is None:
if market is not None:
symbol = market['symbol']
else:
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if symbol is not None:
result[symbol] = self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
return result
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
response = self.marketGetDepth(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { asks: [["10.00000000", "0.4426", "4.4260"],
# ["1.00000000", "0.8339", "0.8339"],
# ["0.91700000", "0.0500", "0.0458"],
# ["0.20000000", "0.1000", "0.0200"],
# ["0.03987120", "16.1262", "0.6429"],
# ["0.03986120", "9.7523", "0.3887"] ],
# bids: [["0.03976145", "0.0359", "0.0014"],
# ["0.03973401", "20.9493", "0.8323"],
# ["0.03967970", "0.0328", "0.0013"],
# ["0.00000002", "10000.0000", "0.0002"],
# ["0.00000001", "231840.7500", "0.0023"]],
# coinPair: "eth_btc" },
# time: 1535974778,
# microtime: "0.04017400 1535974778",
# source: "api" }
#
orderbook = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.parse_order_book(orderbook, timestamp)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
#
id = self.safe_string(trade, 'id')
timestamp = self.safe_timestamp(trade, 'T')
symbol = None
if market is not None:
symbol = market['symbol']
price = self.safe_float(trade, 'p')
amount = self.safe_float(trade, 'n')
cost = None
if price is not None:
if amount is not None:
cost = self.price_to_precision(symbol, amount * price)
side = self.safe_string(trade, 's')
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': None,
'type': 'limit',
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetOrder(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: [{id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
# {id: 115806811,
# t: "19:33:19",
# T: 1535974399,
# p: "0.03981135",
# n: "9.4612",
# s: "sell" } ],
# time: 1535974583,
# microtime: "0.57118100 1535974583",
# source: "api" }
#
return self.parse_trades(response['data'], market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
#
# {
# time: "1535973420000",
# open: "0.03975084",
# high: "0.03975084",
# low: "0.03967700",
# close: "0.03967700",
# volume: "12.4733",
# datetime: "2018-09-03 19:17:00"
# }
#
return [
self.safe_integer(ohlcv, 'time'),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'volume'),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
duration = self.parse_timeframe(timeframe) * 1000
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
if limit is not None:
request['size'] = min(limit, 300) # 1-300
if since is not None:
request['to'] = self.sum(since, limit * duration * 1000)
else:
if since is not None:
raise ArgumentsRequired(self.id + ' fetchOHLCV requires a limit argument if the since argument is specified')
response = self.marketGetKline(self.extend(request, params))
#
# {
# status: 200,
# msg: "",
# data: {
# bars: [
# {time: "1535973420000", open: "0.03975084", high: "0.03975084", low: "0.03967700", close: "0.03967700", volume: "12.4733", datetime: "2018-09-03 19:17:00"},
# {time: "1535955480000", open: "0.04009900", high: "0.04016745", low: "0.04009900", close: "0.04012074", volume: "74.4803", datetime: "2018-09-03 14:18:00"},
# ],
# resolution: "1min",
# symbol: "eth_btc",
# from: "1535973420000",
# to: "1535955480000",
# size: 300
# },
# time: 1535973435,
# microtime: "0.56462100 1535973435",
# source: "api"
# }
#
data = self.safe_value(response, 'data', {})
bars = self.safe_value(data, 'bars', [])
return self.parse_ohlcvs(bars, market, timeframe, since, limit)
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'open', # partially filled
'2': 'closed', # filled
'3': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# }
#
id = self.safe_string(order, 'id')
symbol = None
if market is None:
baseId = self.safe_string(order, 'coinFrom')
quoteId = self.safe_string(order, 'coinTo')
if (baseId is not None) and (quoteId is not None):
marketId = baseId + '_' + quoteId
if marketId in self.markets_by_id:
market = self.safe_value(self.markets_by_id, marketId)
else:
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
side = self.safe_string(order, 'flag')
if side is not None:
side = 'sell' if (side == 'sale') else 'buy'
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'number')
remaining = self.safe_float(order, 'numberOver')
filled = self.safe_float(order, 'numberDeal')
timestamp = self.safe_integer(order, 'timestamp')
if timestamp is None:
timestamp = self.safe_timestamp(order, 'created')
cost = self.safe_float(order, 'orderTotalPrice')
if price is not None:
if filled is not None:
cost = filled * price
status = self.parse_order_status(self.safe_string(order, 'status'))
return {
'id': id,
'clientOrderId': None,
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': None,
'info': order,
'average': None,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
if type != 'limit':
raise ExchangeError(self.id + ' createOrder allows limit orders only')
market = self.market(symbol)
orderType = '1' if (side == 'buy') else '2'
if not self.password:
raise ExchangeError(self.id + ' createOrder() requires you to set exchange.password = "YOUR_TRADING_PASSWORD"(a trade password is NOT THE SAME as your login password)')
request = {
'symbol': market['id'],
'type': orderType,
'price': self.price_to_precision(symbol, price),
'number': self.amount_to_precision(symbol, amount),
'tradePwd': self.password,
}
response = self.tradePostAddEntrustSheet(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# },
# "time": "1533035297",
# "microtime": "0.41892000 1533035297",
# "source": "api",
# }
#
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
order = self.extend({
'timestamp': timestamp,
}, response['data'])
return self.parse_order(order, market)
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"1000.00000000",
# "lock":"-1000.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"9999.99999999",
# "lock":"9999.99999999"
# }
# },
# "time":"1535464383",
# "microtime":"0.91558000 1535464383",
# "source":"api"
# }
#
return response
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
request = {
'ids': ','.join(ids),
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "744173808":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"899.99999999",
# "lock":"19099.99999999"
# }
# },
# "744173809":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"999.99999999",
# "lock":"18999.99999999"
# }
# }
# },
# "time":"1535525649",
# "microtime":"0.05009400 1535525649",
# "source":"api"
# }
#
return response
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostGetEntrustSheetInfo(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "id":"708279852",
# "uId":"2074056",
# "price":"100.00000000",
# "number":"10.0000",
# "total":"0.00000000",
# "numberOver":"10.0000",
# "numberDeal":"0.0000",
# "flag":"sale",
# "status":"0", #0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "coinFrom":"bz",
# "coinTo":"usdt",
# "orderTotalPrice":"0",
# "created":"1533279876"
# },
# "time":"1533280294",
# "microtime":"0.36859200 1533280294",
# "source":"api"
# }
#
return self.parse_order(response['data'])
def fetch_orders_with_method(self, method, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'coinFrom': market['baseId'],
'coinTo': market['quoteId'],
# 'type': 1, # optional integer, 1 = buy, 2 = sell
# 'page': 1, # optional integer
# 'pageSize': 100, # optional integer, max 100
# 'startTime': 1510235730, # optional integer timestamp in seconds
# 'endTime': 1510235730, # optional integer timestamp in seconds
}
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
if since is not None:
request['startTime'] = int(since / 1000)
# request['endTime'] = int(since / 1000)
response = getattr(self, method)(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "data": [
# {
# "id": "693248739",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3", # 0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "isNew": "N",
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "created": "1533035300",
# },
# {
# "id": "723086996",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3",
# "isNew": "N",
# "coinFrom": "bz",
# "coinTo": "usdt",
# "created": "1533523568",
# },
# ],
# "pageInfo": {
# "limit": "10",
# "offest": "0",
# "current_page": "1",
# "page_size": "10",
# "total_count": "17",
# "page_count": "2",
# }
# },
# "time": "1533279329",
# "microtime": "0.15305300 1533279329",
# "source": "api"
# }
#
orders = self.safe_value(response['data'], 'data', [])
return self.parse_orders(orders, None, since, limit)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserNowEntrustSheet', symbol, since, limit, params)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'1': 'pending',
'2': 'pending',
'3': 'pending',
'4': 'ok',
'5': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# {
# "id": '96275',
# "uid": '2109073',
# "wallet": '0xf4c4141c0127bc37b1d0c409a091920eba13ada7',
# "txid": '0xb7adfa52aa566f9ac112e3c01f77bd91179b19eab12092a9a5a8b33d5086e31d',
# "confirm": '12',
# "number": '0.50000000',
# "status": 4,
# "updated": '1534944168605',
# "addressUrl": 'https://etherscan.io/address/',
# "txidUrl": 'https://etherscan.io/tx/',
# "description": 'Ethereum',
# "coin": 'eth',
# "memo": ''
# }
#
# {
# "id":"397574",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"",
# "confirm":"0",
# "number":"1000.00000000",
# "status":1,
# "updated":"0",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
# {
# "id":"153606",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"aa2b179f84cd6dedafd41845e0fbf7f01e14c0d71ea3140d03d6f5a9ccd93199",
# "confirm":"0",
# "number":"761.11110000",
# "status":4,
# "updated":"1536726133579",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
timestamp = self.safe_integer(transaction, 'updated')
if timestamp == 0:
timestamp = None
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
type = self.safe_string_lower(transaction, 'type')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
return {
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'txid'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': self.safe_string(transaction, 'wallet'),
'tag': self.safe_string(transaction, 'memo'),
'type': type,
'amount': self.safe_float(transaction, 'number'),
'currency': code,
'status': status,
'updated': timestamp,
'fee': None,
'info': transaction,
}
def parse_transactions_by_type(self, type, transactions, code=None, since=None, limit=None):
result = []
for i in range(0, len(transactions)):
transaction = self.parse_transaction(self.extend({
'type': type,
}, transactions[i]))
result.append(transaction)
return self.filter_by_currency_since_limit(result, code, since, limit)
def parse_transaction_type(self, type):
types = {
'deposit': 1,
'withdrawal': 2,
}
return self.safe_integer(types, type, type)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('deposit', code, since, limit, params)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('withdrawal', code, since, limit, params)
def fetch_transactions_for_type(self, type, code=None, since=None, limit=None, params={}):
if code is None:
raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency `code` argument')
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'type': self.parse_transaction_type(type),
}
if since is not None:
request['startTime'] = int(since / str(1000))
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
response = self.tradePostDepositOrWithdraw(self.extend(request, params))
transactions = self.safe_value(response['data'], 'data', [])
return self.parse_transactions_by_type(type, transactions, code, since, limit)
def nonce(self):
currentTimestamp = self.seconds()
if currentTimestamp > self.options['lastNonceTimestamp']:
self.options['lastNonceTimestamp'] = currentTimestamp
self.options['lastNonce'] = 100000
self.options['lastNonce'] = self.sum(self.options['lastNonce'], 1)
return self.options['lastNonce']
def sign(self, path, api='market', method='GET', params={}, headers=None, body=None):
baseUrl = self.implode_params(self.urls['api'][api], {'hostname': self.hostname})
url = baseUrl + '/' + self.capitalize(api) + '/' + path
query = None
if api == 'market':
query = self.urlencode(params)
if len(query):
url += '?' + query
else:
self.check_required_credentials()
body = self.rawencode(self.keysort(self.extend({
'apiKey': self.apiKey,
'timeStamp': self.seconds(),
'nonce': self.nonce(),
}, params)))
body += '&sign=' + self.hash(self.encode(body + self.secret))
headers = {'Content-type': 'application/x-www-form-urlencoded'}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
status = self.safe_string(response, 'status')
if status is not None:
feedback = self.id + ' ' + body
#
# {"status":-107,"msg":"","data":"","time":1535968848,"microtime":"0.89092200 1535968848","source":"api"}
#
if status == '200':
#
# {"status":200,"msg":"","data":-200031,"time":1535999806,"microtime":"0.85476800 1535999806","source":"api"}
#
code = self.safe_integer(response, 'data')
if code is not None:
self.throw_exactly_matched_exception(self.exceptions, code, feedback)
raise ExchangeError(feedback)
else:
return # no error
self.throw_exactly_matched_exception(self.exceptions, status, feedback)
raise ExchangeError(feedback)
|
""" linter and formatter of notebooks
"""
# Copyright (c) 2020 ipyradiant contributors.
# Distributed under the terms of the Modified BSD License.
import json
import shutil
import subprocess
import sys
from hashlib import sha256
from pathlib import Path
import black
import isort
import nbformat
from . import project as P
NODE = [shutil.which("node") or shutil.which("node.exe") or shutil.which("node.cmd")]
NB_METADATA_KEYS = ["kernelspec", "language_info"]
def blacken(source):
"""apply black to a source string"""
return black.format_str(source, mode=black.FileMode(line_length=88))
def nblint_one(nb_node):
"""format/lint one notebook"""
changes = 0
has_empty = 0
nb_metadata_keys = list(nb_node.metadata.keys())
for key in nb_metadata_keys:
if key not in NB_METADATA_KEYS:
nb_node.metadata.pop(key)
for cell in nb_node.cells:
cell_type = cell["cell_type"]
source = "".join(cell["source"])
if not source.strip():
has_empty += 1
if cell_type == "markdown":
args = [
*P.PRETTIER,
"--stdin-filepath",
"foo.md",
"--prose-wrap",
"always",
]
prettier = subprocess.Popen(
list(map(str, args)),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
out, _err = prettier.communicate(source.encode("utf-8"))
new = out.decode("utf-8").rstrip()
if new != source:
cell["source"] = new.splitlines(True)
changes += 1
elif cell_type == "code":
if cell["outputs"] or cell["execution_count"]:
cell["outputs"] = []
cell["execution_count"] = None
changes += 1
if [line for line in source.splitlines() if line.strip().startswith("!")]:
continue
if source.startswith("%"):
continue
new = isort.SortImports(file_contents=source).output
new = blacken(new).rstrip()
if new != source:
cell["source"] = new.splitlines(True)
changes += 1
if has_empty:
changes += 1
nb_node.cells = [
cell for cell in nb_node.cells if "".join(cell["source"]).strip()
]
return nb_node
def nb_hash(nb_text):
"""hash one notebook"""
return sha256(nb_text.encode("utf-8")).hexdigest()
def nblint(nb_paths):
"""lint a number of notebook paths"""
nb_hashes = {}
if P.NBLINT_HASHES.exists():
nb_hashes = json.loads(P.NBLINT_HASHES.read_text())
len_paths = len(nb_paths)
for i, nb_path in enumerate(nb_paths):
hash_key = f"{nb_path}"
log_hash = nb_hashes.get(hash_key)
nb_text = nb_path.read_text()
pre_hash = nb_hash(nb_text)
print(f"[{i + 1} of {len_paths}] {nb_path}")
if log_hash == pre_hash:
continue
nb_node = nblint_one(nbformat.reads(nb_text, 4))
with nb_path.open("w") as fpt:
nbformat.write(nb_node, fpt)
post_hash = nb_hash(nb_path.read_text())
if post_hash != pre_hash:
print("\tformatted")
else:
print("\tno change")
nb_hashes[hash_key] = post_hash
P.NBLINT_HASHES.parent.mkdir(exist_ok=True, parents=True)
P.NBLINT_HASHES.write_text(json.dumps(nb_hashes, indent=2, sort_keys=True))
return 0
if __name__ == "__main__":
sys.exit(nblint([Path(p) for p in sys.argv[1:]] or P.EXAMPLE_IPYNB))
|
#!/usr/bin/env python
from CraftProtocol.Protocol.Packet.BasePacket import BasePacket
from CraftProtocol.Protocol.Packet.PacketDirection import PacketDirection
from CraftProtocol.StreamIO import StreamIO
class KeepAliveServerPacket(BasePacket):
PACKET_ID = 0x0B
PACKET_DIRECTION = PacketDirection.SERVERBOUND
def __init__(self, keepalive_id):
BasePacket.__init__(self)
self._id = long(keepalive_id)
def get_id(self):
return self._id
def set_id(self, keepalive_id):
self._id = long(keepalive_id)
@staticmethod
def write(stream, packet):
StreamIO.write_long(stream, packet.get_id())
@staticmethod
def read(stream, packet_size):
keepalive_id = StreamIO.read_long(stream)
return KeepAliveServerPacket(keepalive_id)
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .abstract_command_descriptor import AbstractCommandDescriptor
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ClassifyCommandDescriptor(AbstractCommandDescriptor):
"""
Command descriptor for querylanguage CLASSIFY command.
"""
def __init__(self, **kwargs):
"""
Initializes a new ClassifyCommandDescriptor object with values from keyword arguments. The default value of the :py:attr:`~oci.log_analytics.models.ClassifyCommandDescriptor.name` attribute
of this class is ``CLASSIFY`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param name:
The value to assign to the name property of this ClassifyCommandDescriptor.
Allowed values for this property are: "COMMAND", "SEARCH", "STATS", "GEO_STATS", "TIME_STATS", "SORT", "FIELDS", "ADD_FIELDS", "LINK", "LINK_DETAILS", "CLUSTER", "CLUSTER_DETAILS", "CLUSTER_SPLIT", "EVAL", "EXTRACT", "JSON_EXTRACT", "XML_EXTRACT", "EVENT_STATS", "BUCKET", "CLASSIFY", "TOP", "BOTTOM", "HEAD", "TAIL", "FIELD_SUMMARY", "REGEX", "RENAME", "TIME_COMPARE", "WHERE", "CLUSTER_COMPARE", "DELETE", "DELTA", "DISTINCT", "SEARCH_LOOKUP", "LOOKUP", "DEMO_MODE", "MACRO", "MULTI_SEARCH", "HIGHLIGHT", "HIGHLIGHT_ROWS", "HIGHLIGHT_GROUPS", "CREATE_VIEW", "MAP", "NLP", "COMPARE"
:type name: str
:param display_query_string:
The value to assign to the display_query_string property of this ClassifyCommandDescriptor.
:type display_query_string: str
:param internal_query_string:
The value to assign to the internal_query_string property of this ClassifyCommandDescriptor.
:type internal_query_string: str
:param category:
The value to assign to the category property of this ClassifyCommandDescriptor.
:type category: str
:param referenced_fields:
The value to assign to the referenced_fields property of this ClassifyCommandDescriptor.
:type referenced_fields: list[oci.log_analytics.models.AbstractField]
:param declared_fields:
The value to assign to the declared_fields property of this ClassifyCommandDescriptor.
:type declared_fields: list[oci.log_analytics.models.AbstractField]
:param top_count:
The value to assign to the top_count property of this ClassifyCommandDescriptor.
:type top_count: int
:param bottom_count:
The value to assign to the bottom_count property of this ClassifyCommandDescriptor.
:type bottom_count: int
:param correlate:
The value to assign to the correlate property of this ClassifyCommandDescriptor.
:type correlate: list[oci.log_analytics.models.FieldsAddRemoveField]
"""
self.swagger_types = {
'name': 'str',
'display_query_string': 'str',
'internal_query_string': 'str',
'category': 'str',
'referenced_fields': 'list[AbstractField]',
'declared_fields': 'list[AbstractField]',
'top_count': 'int',
'bottom_count': 'int',
'correlate': 'list[FieldsAddRemoveField]'
}
self.attribute_map = {
'name': 'name',
'display_query_string': 'displayQueryString',
'internal_query_string': 'internalQueryString',
'category': 'category',
'referenced_fields': 'referencedFields',
'declared_fields': 'declaredFields',
'top_count': 'topCount',
'bottom_count': 'bottomCount',
'correlate': 'correlate'
}
self._name = None
self._display_query_string = None
self._internal_query_string = None
self._category = None
self._referenced_fields = None
self._declared_fields = None
self._top_count = None
self._bottom_count = None
self._correlate = None
self._name = 'CLASSIFY'
@property
def top_count(self):
"""
Gets the top_count of this ClassifyCommandDescriptor.
Value specified in CLASSIFY command in queryString if set limits the results returned to top N.
:return: The top_count of this ClassifyCommandDescriptor.
:rtype: int
"""
return self._top_count
@top_count.setter
def top_count(self, top_count):
"""
Sets the top_count of this ClassifyCommandDescriptor.
Value specified in CLASSIFY command in queryString if set limits the results returned to top N.
:param top_count: The top_count of this ClassifyCommandDescriptor.
:type: int
"""
self._top_count = top_count
@property
def bottom_count(self):
"""
Gets the bottom_count of this ClassifyCommandDescriptor.
Value specified in CLASSIFY command in queryString if set limits the results returned to bottom N.
:return: The bottom_count of this ClassifyCommandDescriptor.
:rtype: int
"""
return self._bottom_count
@bottom_count.setter
def bottom_count(self, bottom_count):
"""
Sets the bottom_count of this ClassifyCommandDescriptor.
Value specified in CLASSIFY command in queryString if set limits the results returned to bottom N.
:param bottom_count: The bottom_count of this ClassifyCommandDescriptor.
:type: int
"""
self._bottom_count = bottom_count
@property
def correlate(self):
"""
Gets the correlate of this ClassifyCommandDescriptor.
Fields specified in CLASSIFY command in queryString if set include / exclude fields in correlate results.
:return: The correlate of this ClassifyCommandDescriptor.
:rtype: list[oci.log_analytics.models.FieldsAddRemoveField]
"""
return self._correlate
@correlate.setter
def correlate(self, correlate):
"""
Sets the correlate of this ClassifyCommandDescriptor.
Fields specified in CLASSIFY command in queryString if set include / exclude fields in correlate results.
:param correlate: The correlate of this ClassifyCommandDescriptor.
:type: list[oci.log_analytics.models.FieldsAddRemoveField]
"""
self._correlate = correlate
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
import datetime
import re
class PublicationUtils:
@staticmethod
def get_month(bibtex_entry):
month = bibtex_entry.get("month")
m = None
try:
m = int(month)
except Exception:
pass
try:
m = datetime.datetime.strptime(month, "%b").month
except Exception:
pass
try:
m = datetime.datetime.strptime(month, "%B").month
except Exception:
pass
return m
@staticmethod
def get_forum(bibtex_entry):
forum = []
if "journal" in bibtex_entry:
forum.append(bibtex_entry["journal"])
if "booktitle" in bibtex_entry:
forum.append(bibtex_entry["booktitle"])
if "series" in bibtex_entry:
forum.append(bibtex_entry["series"])
if "publisher" in bibtex_entry:
forum.append(bibtex_entry["publisher"])
if "school" in bibtex_entry:
forum.append(bibtex_entry["school"])
if "institution" in bibtex_entry:
forum.append(bibtex_entry["institution"])
if "address" in bibtex_entry:
forum.append(bibtex_entry["address"])
return ",".join(forum)
@staticmethod
def get_link(bibtex_entry):
if "note" in bibtex_entry:
m = re.search("^\\\\url{(.+?)}$", bibtex_entry["note"])
if m:
return m.group(1)
if "howpublished" in bibtex_entry:
m = re.search("^\\\\url{(.+?)}$", bibtex_entry["howpublished"])
if m:
return m.group(1)
return None
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-25 08:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0018_auto_20170625_1616'),
]
operations = [
migrations.AddField(
model_name='user',
name='avatar',
field=models.ImageField(blank=True, default='avatar/default.png', upload_to='avatar/%Y/%m'),
),
]
|
import json
import tempfile
import time
from collections import defaultdict
from os import path, remove
import numpy as np
import torch
import torch.distributed as dist
from PIL import Image
from pycocotools.coco import COCO as _COCO
from pycocotools.cocoeval import COCOeval
from pycocotools.mask import encode as mask_encode
from .bbx import invert_roi_bbx, extract_boxes
from .parallel import PackedSequence
from .roi_sampling import roi_sampling
def process_prediction(bbx_pred, cls_pred, obj_pred, msk_pred, img_size, idx, original_size):
# Move everything to CPU
bbx_pred, cls_pred, obj_pred = (t.cpu() for t in (bbx_pred, cls_pred, obj_pred))
msk_pred = msk_pred.cpu() if msk_pred is not None else None
if msk_pred is not None:
if isinstance(msk_pred, torch.Tensor):
# ROI-stile prediction
bbx_inv = invert_roi_bbx(bbx_pred, list(msk_pred.shape[-2:]), list(img_size))
bbx_idx = torch.arange(0, msk_pred.size(0), dtype=torch.long)
msk_pred = roi_sampling(msk_pred.unsqueeze(1).sigmoid(), bbx_inv, bbx_idx, list(img_size), padding="zero")
msk_pred = msk_pred.squeeze(1) > 0.5
elif isinstance(msk_pred, PackedSequence):
# Seeds-style prediction
msk_pred.data = msk_pred.data > 0.5
msk_pred_exp = msk_pred.data.new_zeros(len(msk_pred), img_size[0], img_size[1])
for it, (msk_pred_i, bbx_pred_i) in enumerate(zip(msk_pred, bbx_pred)):
i, j = int(bbx_pred_i[0].item()), int(bbx_pred_i[1].item())
msk_pred_exp[it, i:i + msk_pred_i.size(0), j:j + msk_pred_i.size(1)] = msk_pred_i
msk_pred = msk_pred_exp
# Convert bbx and redo clamping
bbx_pred[:, [0, 2]] = (bbx_pred[:, [0, 2]] / img_size[0] * original_size[0]).clamp(min=0, max=original_size[0])
bbx_pred[:, [1, 3]] = (bbx_pred[:, [1, 3]] / img_size[1] * original_size[1]).clamp(min=0, max=original_size[1])
bbx_pred_size = bbx_pred[:, 2:] - bbx_pred[:, :2]
outs = []
for i, (bbx_pred_i, bbx_pred_size_i, cls_pred_i, obj_pred_i) in \
enumerate(zip(bbx_pred, bbx_pred_size, cls_pred, obj_pred)):
out = dict(image_id=idx, category_id=int(cls_pred_i.item()), score=float(obj_pred_i.item()))
out["bbox"] = [
float(bbx_pred_i[1].item()),
float(bbx_pred_i[0].item()),
float(bbx_pred_size_i[1].item()),
float(bbx_pred_size_i[0].item()),
]
# Expand and convert mask if present
if msk_pred is not None:
segmentation = Image.fromarray(msk_pred[i].numpy()).resize(original_size[::-1], Image.NEAREST)
out["segmentation"] = mask_encode(np.asfortranarray(np.array(segmentation)))
out["segmentation"]["counts"] = str(out["segmentation"]["counts"], "utf-8")
outs.append(out)
return outs
def process_panoptic_prediction(panoptic_pred, num_stuff, idx, img_size, original_size):
# Extract panoptic prediction
msk_pred, cat_pred, obj_pred, iscrowd_pred = panoptic_pred
bbx_pred = extract_boxes(msk_pred, cat_pred.numel())
# Convert bbx and redo clamping
bbx_pred[:, [0, 2]] = (bbx_pred[:, [0, 2]] / img_size[0] * original_size[0]).clamp(min=0, max=original_size[0])
bbx_pred[:, [1, 3]] = (bbx_pred[:, [1, 3]] / img_size[1] * original_size[1]).clamp(min=0, max=original_size[1])
bbx_pred_size = bbx_pred[:, 2:] - bbx_pred[:, :2]
outs = []
for i, (obj_i, cat_i, bbx_i, iscrowd_i, bbx_size_i) in enumerate(zip(
obj_pred, cat_pred, bbx_pred, iscrowd_pred, bbx_pred_size)):
if iscrowd_i.item() == 1 or cat_i.item() < num_stuff or cat_i.item() == 255:
continue
out = dict(image_id=idx, category_id=int(cat_i.item()), score=float(obj_i.item()))
out["bbox"] = [
float(bbx_i[1].item()),
float(bbx_i[0].item()),
float(bbx_size_i[1].item()),
float(bbx_size_i[0].item()),
]
segmentation = msk_pred == i
segmentation = Image.fromarray(segmentation.numpy()).resize(original_size[::-1], Image.NEAREST)
out["segmentation"] = mask_encode(np.asfortranarray(np.array(segmentation)))
out["segmentation"]["counts"] = str(out["segmentation"]["counts"], "utf-8")
outs.append(out)
return outs
def summarize(predictions, annotations_file, img_list, mask=False):
msk_map = 0
with tempfile.NamedTemporaryFile("w") as fid:
json.dump(predictions, fid)
fid.flush()
# Detection
gt = COCO(annotations_file, img_list)
pred = gt.loadRes(fid.name)
pred_eval = COCOeval(gt, pred, "bbox")
pred_eval.evaluate()
pred_eval.accumulate()
pred_eval.summarize()
det_map = pred_eval.stats[0]
if mask:
pred_eval = COCOeval(gt, pred, "segm")
pred_eval.evaluate()
pred_eval.accumulate()
pred_eval.summarize()
msk_map = pred_eval.stats[0]
return det_map, msk_map
def summarize_mp(predictions, annotations_file, img_list, log_dir, mask=False):
# Write partial results to file (all workers)
rank = dist.get_rank()
with open(path.join(log_dir, "coco_ap_{:02d}.json".format(rank)), "w") as fid:
json.dump(predictions, fid)
with open(path.join(log_dir, "img_list_{:02d}.json".format(rank)), "w") as fid:
json.dump(img_list, fid)
dist.barrier()
# Merge results from all workers and run evaluation (only rank 0)
if rank == 0:
predictions = []
img_list = []
for i in range(dist.get_world_size()):
coco_ap_file = path.join(log_dir, "coco_ap_{:02d}.json".format(i))
with open(coco_ap_file) as fid:
predictions += json.load(fid)
remove(coco_ap_file)
img_list_file = path.join(log_dir, "img_list_{:02d}.json".format(i))
with open(img_list_file) as fid:
img_list += json.load(fid)
remove(img_list_file)
det_map, msk_map = summarize(predictions, annotations_file, img_list, mask)
else:
det_map, msk_map = 0, 0
dist.barrier()
return det_map, msk_map
class COCO(_COCO):
"""Modified COCO class that loads only a subset of"""
def __init__(self, annotation_file, img_list):
# load dataset
self.dataset, self.anns, self.cats, self.imgs = dict(), dict(), dict(), dict()
self.imgToAnns, self.catToImgs = defaultdict(list), defaultdict(list)
print('loading annotations into memory...')
tic = time.time()
dataset = json.load(open(annotation_file, 'r'))
assert type(dataset) == dict, 'annotation file format {} not supported'.format(type(dataset))
print('Done (t={:0.2f}s)'.format(time.time() - tic))
# Clean-up dataset, removing all images and annotations that are not in img_list
img_list = set(img_list)
dataset["images"] = [img for img in dataset["images"] if img["id"] in img_list]
dataset["annotations"] = [ann for ann in dataset["annotations"] if ann["image_id"] in img_list]
self.dataset = dataset
self.createIndex()
|
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
class QuotesAvroPipeline:
def process_item(self, item, spider):
return item
|
from setuptools import setup
setup(
name="myhello",
version='0.1',
py_modules=['colors'],
include_package_data=True,
install_requires=[
'Click',
'colorama',
],
entry_points='''
[console_scripts]
myhello=hello:cli
''',
)
|
try:
from zcrmsdk.src.com.zoho.crm.api.exception import SDKException
from zcrmsdk.src.com.zoho.crm.api.util import Constants
except Exception:
from ..exception import SDKException
from ..util import Constants
class Argument(object):
def __init__(self):
"""Creates an instance of Argument"""
self.__name = None
self.__value = None
self.__key_modified = dict()
def get_name(self):
"""
The method to get the name
Returns:
string: A string representing the name
"""
return self.__name
def set_name(self, name):
"""
The method to set the value to name
Parameters:
name (string) : A string representing the name
"""
if name is not None and not isinstance(name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: name EXPECTED TYPE: str', None, None)
self.__name = name
self.__key_modified['name'] = 1
def get_value(self):
"""
The method to get the value
Returns:
string: A string representing the value
"""
return self.__value
def set_value(self, value):
"""
The method to set the value to value
Parameters:
value (string) : A string representing the value
"""
if value is not None and not isinstance(value, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: value EXPECTED TYPE: str', None, None)
self.__value = value
self.__key_modified['value'] = 1
def is_key_modified(self, key):
"""
The method to check if the user has modified the given key
Parameters:
key (string) : A string representing the key
Returns:
int: An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if key in self.__key_modified:
return self.__key_modified.get(key)
return None
def set_key_modified(self, key, modification):
"""
The method to mark the given key as modified
Parameters:
key (string) : A string representing the key
modification (int) : An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if modification is not None and not isinstance(modification, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)
self.__key_modified[key] = modification
|
#
# Copyright (c) 2021 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class bfdsession(base_resource) :
""" Configuration for BFD configuration resource. """
def __init__(self) :
self._localip = None
self._remoteip = None
self._state = None
self._localport = None
self._remoteport = None
self._minimumtransmitinterval = None
self._negotiatedminimumtransmitinterval = None
self._minimumreceiveinterval = None
self._negotiatedminimumreceiveinterval = None
self._multiplier = None
self._remotemultiplier = None
self._vlan = None
self._localdiagnotic = None
self._localdiscriminator = None
self._remotediscriminator = None
self._passive = None
self._multihop = None
self._admindown = None
self._originalownerpe = None
self._currentownerpe = None
self._ownernode = None
self.___count = None
@property
def localip(self) :
r"""IPV4 or IPV6 Address of Local Node.
"""
try :
return self._localip
except Exception as e:
raise e
@localip.setter
def localip(self, localip) :
r"""IPV4 or IPV6 Address of Local Node.
"""
try :
self._localip = localip
except Exception as e:
raise e
@property
def remoteip(self) :
r"""IPV4 or IPV6 Address of Remote Node.
"""
try :
return self._remoteip
except Exception as e:
raise e
@remoteip.setter
def remoteip(self, remoteip) :
r"""IPV4 or IPV6 Address of Remote Node.
"""
try :
self._remoteip = remoteip
except Exception as e:
raise e
@property
def state(self) :
r"""Current state of the BFD session.<br/>Possible values = ADMIN DOWN, DOWN, INIT, UP.
"""
try :
return self._state
except Exception as e:
raise e
@property
def localport(self) :
r"""Source Port used by Local node to send Control packets for the BFD session.
"""
try :
return self._localport
except Exception as e:
raise e
@property
def remoteport(self) :
r"""Source Port used by Remote node to send Control packets for the BFD session.
"""
try :
return self._remoteport
except Exception as e:
raise e
@property
def minimumtransmitinterval(self) :
r"""Minimum trasmit interval, in milliseconds, the local node would like to use when transmitting BFD Control packets.
"""
try :
return self._minimumtransmitinterval
except Exception as e:
raise e
@property
def negotiatedminimumtransmitinterval(self) :
r"""Negotiated Minimum Transmit Interval. This is the interval at which the local node will be sending out BFD control packets.
"""
try :
return self._negotiatedminimumtransmitinterval
except Exception as e:
raise e
@property
def minimumreceiveinterval(self) :
r"""Minimum receive interval, in milliseconds, between received BFD Control packets that the local node is capable of supporting.
"""
try :
return self._minimumreceiveinterval
except Exception as e:
raise e
@property
def negotiatedminimumreceiveinterval(self) :
r"""Negotiated Minimum Receive Interval. This is the interval at which the local node will be expecting BFD control packets.
"""
try :
return self._negotiatedminimumreceiveinterval
except Exception as e:
raise e
@property
def multiplier(self) :
r"""Detection Multiplier. The negotiated transmit interval multiplied by Detection multiplier provides the Detection Time for the remote node for the BFD session.
"""
try :
return self._multiplier
except Exception as e:
raise e
@property
def remotemultiplier(self) :
r"""Your Multiplier. The negotiated receive interval multiplied by Your Multiplier provides the Detection Time for the local node for the BFD session.
"""
try :
return self._remotemultiplier
except Exception as e:
raise e
@property
def vlan(self) :
r"""VLAN ID on which the BDS session is configured.
"""
try :
return self._vlan
except Exception as e:
raise e
@property
def localdiagnotic(self) :
r"""Diagnostic Code specifying the local system's reason for the last change in session state.
"""
try :
return self._localdiagnotic
except Exception as e:
raise e
@property
def localdiscriminator(self) :
r"""A unique discriminator value generated by the local node for the session.
"""
try :
return self._localdiscriminator
except Exception as e:
raise e
@property
def remotediscriminator(self) :
r"""A unique discriminator value as received from the remote node for the session.
"""
try :
return self._remotediscriminator
except Exception as e:
raise e
@property
def passive(self) :
r"""Flag indicating that the session is passive.
"""
try :
return self._passive
except Exception as e:
raise e
@property
def multihop(self) :
r"""Flag indicating if the session is multihop.
"""
try :
return self._multihop
except Exception as e:
raise e
@property
def admindown(self) :
r"""Flag indicating if admin down is being sent.
"""
try :
return self._admindown
except Exception as e:
raise e
@property
def originalownerpe(self) :
r"""Original Owner PE of the BFD session.
"""
try :
return self._originalownerpe
except Exception as e:
raise e
@property
def currentownerpe(self) :
r"""Current Owner PE of the BFD session.
"""
try :
return self._currentownerpe
except Exception as e:
raise e
@property
def ownernode(self) :
r"""The owner node in a Cluster for this BFD session. Owner node can vary from 0 to 31. If ownernode is not specified then the session is treated as Striped session.<br/>Default value: 255.
"""
try :
return self._ownernode
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(bfdsession_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.bfdsession
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
r""" Use this API to fetch all the bfdsession resources that are configured on netscaler.
"""
try :
if not name :
obj = bfdsession()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_args(cls, client, args) :
r""" Use this API to fetch all the bfdsession resources that are configured on netscaler.
# This uses bfdsession_args which is a way to provide additional arguments while fetching the resources.
"""
try :
obj = bfdsession()
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(args)
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
r""" Use this API to fetch filtered set of bfdsession resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bfdsession()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
r""" Use this API to count the bfdsession resources configured on NetScaler.
"""
try :
obj = bfdsession()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
r""" Use this API to count filtered the set of bfdsession resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = bfdsession()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class State:
ADMIN_DOWN = "ADMIN DOWN"
DOWN = "DOWN"
INIT = "INIT"
UP = "UP"
class bfdsession_response(base_response) :
def __init__(self, length=1) :
self.bfdsession = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.bfdsession = [bfdsession() for _ in range(length)]
|
import datetime
import math
import requests_cache
from bs4 import BeautifulSoup
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from api.modules.holidays.constants import HOLIDAYS_PAGE_URL, HINDI_DAY_STRING_MAP, HINDI_MONTH_STRING_MAP
from api.modules.holidays.utils import load_url_content
week_difference = datetime.timedelta(days=7)
requests_cache.install_cache(expire_after=week_difference)
@api_view(['GET'])
def get_upcoming_holidays(request, year):
"""
Returns a list of all the holidays in a given year
:param request:
:param year:
:return: 400 if unable to get response from Holidays Page
:return: 503 if unable to correctly parse Holidays Page
:return: 200 successful
"""
holiday_data = []
try:
html = load_url_content(HOLIDAYS_PAGE_URL.format(year))
if html:
soup = BeautifulSoup(html, 'html.parser')
cells = soup.findAll(['th', 'td'])
row_len = int(math.ceil(len(cells) / 4))
for ctr in range(row_len):
if ctr == 0:
continue
offset = ctr * 4
holiday_type = cells[offset + 3].text.split()
date_string = cells[offset + 0].text.strip().split(" ")
day_string = cells[offset + 1].text.strip()
# Check if HTML response is in Hindi
# If in Hindi, replace with English counterpart
if date_string[1] in HINDI_MONTH_STRING_MAP.keys():
date_string[1] = HINDI_MONTH_STRING_MAP[date_string[1]][:3]
day_string = HINDI_DAY_STRING_MAP[day_string]
try:
dt = datetime.datetime.strptime(" ".join(date_string), '%d %b')
except ValueError:
dt = datetime.datetime.strptime(" ".join(date_string), '%b %d')
holiday_obj = {
'month': dt.strftime('%B'),
'date': int(dt.strftime('%d')),
'day': day_string,
'name': cells[offset + 2].text.strip(),
'type': holiday_type[0]
}
holiday_data.append(holiday_obj)
else:
return Response(holiday_data, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
return Response(str(e), status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(holiday_data, status=status.HTTP_200_OK)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import numpy as np
from .. import Variable
from ..core.utils import (FrozenOrderedDict, Frozen,
NdimSizeLenMixin, DunderArrayMixin)
from ..core import indexing
from .common import AbstractDataStore, DataStorePickleMixin
class NioArrayWrapper(NdimSizeLenMixin, DunderArrayMixin):
def __init__(self, variable_name, datastore):
self.datastore = datastore
self.variable_name = variable_name
array = self.get_array()
self.shape = array.shape
self.dtype = np.dtype(array.typecode())
def get_array(self):
self.datastore.assert_open()
return self.datastore.ds.variables[self.variable_name]
def __getitem__(self, key):
with self.datastore.ensure_open(autoclose=True):
array = self.get_array()
if key == () and self.ndim == 0:
return array.get_value()
return array[key]
class NioDataStore(AbstractDataStore, DataStorePickleMixin):
"""Store for accessing datasets via PyNIO
"""
def __init__(self, filename, mode='r', autoclose=False):
import Nio
opener = functools.partial(Nio.open_file, filename, mode=mode)
self.ds = opener()
# xarray provides its own support for FillValue,
# so turn off PyNIO's support for the same.
self.ds.set_option('MaskedArrayMode', 'MaskedNever')
self._autoclose = autoclose
self._isopen = True
self._opener = opener
self._mode = mode
def open_store_variable(self, name, var):
data = indexing.LazilyIndexedArray(NioArrayWrapper(name, self))
return Variable(var.dimensions, data, var.attributes)
def get_variables(self):
with self.ensure_open(autoclose=False):
return FrozenOrderedDict((k, self.open_store_variable(k, v))
for k, v in self.ds.variables.iteritems())
def get_attrs(self):
with self.ensure_open(autoclose=True):
return Frozen(self.ds.attributes)
def get_dimensions(self):
with self.ensure_open(autoclose=True):
return Frozen(self.ds.dimensions)
def get_encoding(self):
encoding = {}
encoding['unlimited_dims'] = set(
[k for k in self.ds.dimensions if self.ds.unlimited(k)])
return encoding
def close(self):
if self._isopen:
self.ds.close()
self._isopen = False
|
# Copyright (c) Microsoft Corporation. All rights reserved.S
# Licensed under the MIT License. See License.txt in the project root for
# license information.
import os
import sys
import logging # noqa: F401
import json
import time
from concurrent.futures import ThreadPoolExecutor
from paho_client import PahoClient
"""
Uncomment the following lines to enable debug logging
"""
# logging.basicConfig(level=logging.INFO)
# logging.getLogger("paho").setLevel(level=logging.DEBUG)
##################################
# CREATE CLIENTS
##################################
client_1 = PahoClient.create_from_connection_string(
os.environ["CS_VEHICLE_1"], clean_session=True
)
client_2 = PahoClient.create_from_connection_string(
os.environ["CS_VEHICLE_2"], clean_session=True
)
all_clients = (client_1, client_2)
def listen(client: PahoClient) -> None:
##################################
# CONNECT
##################################
print("{}: Connecting".format(client.auth.device_id))
client.start_connect()
if not client.connection_status.wait_for_connected(timeout=20):
print("{}: failed to connect. exiting".format(client.auth.device_id))
sys.exit(1)
##################################
# SUBSCRIBE
##################################
qos = 1
topic_filter = "fleet/alerts/#"
print(
"{}: Subscribing to {} at qos {}".format(
client.auth.device_id, topic_filter, qos
)
)
(rc, mid) = client.subscribe(topic_filter, qos)
ack_result = client.incoming_subacks.wait_for_ack(mid, timeout=20)
if not ack_result:
print("{}: SUBACK not received within 20 seconds".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
sys.exit(1)
elif ack_result[0] == -1:
print("{}: Subscription was rejected".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
sys.exit(1)
else:
print(
"{}: Subscription was granted with qos {}".format(
client.auth.device_id, ack_result[0]
)
)
##################################
# LISTEN
##################################
time_to_listen_in_seconds = 600
end_time = time.time() + time_to_listen_in_seconds
while time.time() <= end_time:
remaining_time = end_time - time.time()
message = client.incoming_messages.pop_next_message(timeout=remaining_time)
if message:
payload_object = json.loads(message.payload)
print(
"{}: Message received on topic {}: {}".format(
client.auth.device_id, message.topic, payload_object
)
)
##################################
# DISCONNECT
##################################
print("{}: Disconnecting".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
##################################
# CREATE_THREADS
##################################
with ThreadPoolExecutor() as tp:
for client in all_clients:
tp.submit(listen, client)
|
from typing import TYPE_CHECKING
from django.db import models
from posthog.models.utils import UUIDModel, sane_repr
if TYPE_CHECKING:
from posthog.models.organization import OrganizationMembership
class ExplicitTeamMembership(UUIDModel):
class Level(models.IntegerChoices):
"""Keep in sync with OrganizationMembership.Level (only difference being organizations having an Owner)."""
MEMBER = 1, "member"
ADMIN = 8, "administrator"
team: models.ForeignKey = models.ForeignKey(
"posthog.Team",
on_delete=models.CASCADE,
related_name="explicit_memberships",
related_query_name="explicit_membership",
)
parent_membership: models.ForeignKey = models.ForeignKey(
"posthog.OrganizationMembership",
on_delete=models.CASCADE,
related_name="explicit_team_memberships",
related_query_name="explicit_team_membership",
)
level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
default=Level.MEMBER, choices=Level.choices
)
joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True)
updated_at: models.DateTimeField = models.DateTimeField(auto_now=True)
class Meta:
constraints = [
models.UniqueConstraint(fields=["team", "parent_membership"], name="unique_explicit_team_membership"),
]
def __str__(self):
return str(self.Level(self.level))
@property
def effective_level(self) -> "OrganizationMembership.Level":
"""If organization level is higher than project level, then that takes precedence over explicit project level.
"""
return max(self.level, self.parent_membership.level)
__repr__ = sane_repr("team", "parent_membership", "level")
|
import sys
class PrintUtils:
progress = 0
total_progress = 0
@classmethod
def print_progress_bar(cls, iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', print_end = "\r"):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
print_end - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
cls.progress = iteration
cls.total_progress = total
if prefix != "":
prefix += " "
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print(f'{prefix}[{bar}] {percent}% {suffix}', end = print_end)
if iteration == total:
print()
@classmethod
def smart_print(cls, msg, end = "\x1b[2K\r\n", file = sys.stdout):
if not isinstance(msg, str):
msg = str(msg)
print("\x1b[2K", end = "\r", flush = True)
if file == sys.stderr:
print("\x1b[31m" + msg + "\x1b[37m", file = sys.stderr, flush = True)
else:
print(msg, flush = True)
if cls.total_progress != 0:
cls.print_progress_bar(cls.progress, cls.total_progress)
|
print('Olá!\nvou te ajudar a calcular o valor do aumento que você receberá.')
sal = float(input('Qual o seu salário atual? \n R$'))
if sal > 1250.00:
print('Seu salário final será de R${:.2f}, pois seu aumento foi de 10%.'.format(sal+sal*0.10))
else:
print('Seu salário final será de R${:.2f}, pois seu aumento foi de 15%.'.format(sal+sal*0.15))
|
"""
HttpCtrl library provides HTTP/HTTPS client and server API to Robot Framework to make REST API testing easy.
Authors: Andrei Novikov
Date: 2018-2021
Copyright: The 3-Clause BSD License
"""
from http.server import SimpleHTTPRequestHandler
from robot.api import logger
from HttpCtrl.internal_messages import TerminationRequest, IgnoreRequest
from HttpCtrl.request import Request
from HttpCtrl.request_storage import RequestStorage
from HttpCtrl.response_storage import ResponseStorage
class HttpHandler(SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
self.server_version = "HttpCtrl.Server/"
self.sys_version = ""
SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
def do_GET(self):
self.__default_handler('GET')
def do_POST(self):
self.__default_handler('POST')
def do_PUT(self):
self.__default_handler('PUT')
def do_OPTIONS(self):
self.__default_handler('OPTIONS')
def do_HEAD(self):
self.__default_handler('HEAD')
def do_PATCH(self):
self.__default_handler('PATCH')
def do_DELETE(self):
self.__default_handler('DELETE')
def log_message(self, format, *args):
return
def log_error(self, format, *args):
return
def log_request(self, code='-', size='-'):
return
def __extract_body(self):
body_length = int(self.headers.get('Content-Length', 0))
if body_length > 0:
return self.rfile.read(body_length).decode('utf-8')
return None
def __default_handler(self, method):
host, port = self.client_address[:2]
body = self.__extract_body()
logger.info("'%s' request is received from '%s:%s'." % (method, host, port))
request = Request(host, port, method, self.path, self.headers, body)
RequestStorage().push(request)
response = ResponseStorage().pop()
if isinstance(response, TerminationRequest) or isinstance(response, IgnoreRequest):
return
try:
self.__send_response(response)
except Exception as exception:
logger.info("Response was not sent to client due to reason: '%s'." % str(exception))
def __send_response(self, response):
if response is None:
logger.error("Response is not provided for incoming request.")
return
self.send_response(response.get_status())
headers = response.get_headers()
for key, value in headers.items():
self.send_header(key, value)
body = response.get_body()
if body is not None:
if isinstance(response.get_body(), str):
body = response.get_body().encode("utf-8")
self.send_header('Content-Length', str(len(body)))
self.end_headers()
if body is not None:
self.wfile.write(body)
|
from henrio import *
import unittest
class QueueTest(unittest.TestCase):
def test_queue(self):
try:
l = get_default_loop()
q = HeapQueue(50)
print(q)
async def d():
return await q.get()
async def a(i):
await sleep(3)
await q.put(i)
for x in range(100):
l.create_task(a(x))
l.create_task(d())
async def task():
await sleep(5)
print(len(l._queue), len(l._tasks))
l.run_until_complete(task())
finally:
self.assertEqual(len(q), 0)
|
'''
samples from all raw data;
by default samples in a non-iid manner; namely, randomly selects users from
raw data until their cumulative amount of data exceeds the given number of
datapoints to sample (specified by --fraction argument);
ordering of original data points is not preserved in sampled data
'''
import argparse
import json
import os
import random
import time
from collections import OrderedDict
from constants import DATASETS, SEED_FILES
from util import iid_divide
parser = argparse.ArgumentParser()
parser.add_argument('--name',
help='name of dataset to parse; default: sent140;',
type=str,
choices=DATASETS,
default='sent140')
parser.add_argument('--iid',
help='sample iid;',
action="store_true")
parser.add_argument('--niid',
help="sample niid;",
dest='iid', action='store_false')
parser.add_argument('--union',
help="sample from union lists;",
dest='union', action='store_true')
parser.add_argument('--fraction',
help='fraction of all data to sample; default: 0.1;',
type=float,
default=0.1)
parser.add_argument('--u',
help=('number of users in iid data set; ignored in niid case;'
'represented as fraction of original total number of users; '
'default: 0.01;'),
type=float,
default=0.01)
parser.add_argument('--seed',
help='seed for random sampling of data',
type=int,
default=None)
parser.set_defaults(iid=False)
args = parser.parse_args()
print('------------------------------')
print('sampling data')
parent_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
data_dir = os.path.join(parent_path, args.name, 'data')
subdir = os.path.join(data_dir, 'all_data')
files = os.listdir(subdir)
files = [f for f in files if f.endswith('.json')]
rng_seed = (args.seed if (args.seed is not None and args.seed >= 0) else int(time.time()))
print ("Using seed {}".format(rng_seed))
rng = random.Random(rng_seed)
print (os.environ.get('LEAF_DATA_META_DIR'))
if os.environ.get('LEAF_DATA_META_DIR') is not None:
seed_fname = os.path.join(os.environ.get('LEAF_DATA_META_DIR'), SEED_FILES['sampling'])
with open(seed_fname, 'w+') as f:
f.write("# sampling_seed used by sampling script - supply as "
"--smplseed to preprocess.sh or --seed to utils/sample.py\n")
f.write(str(rng_seed))
print ("- random seed written out to {file}".format(file=seed_fname))
else:
print ("- using random seed '{seed}' for sampling".format(seed=rng_seed))
if args.union:
print("=== Sampling users for each union")
union_dir = os.path.join(data_dir, 'union')
path_file = os.path.join(union_dir, "union_list_path")
with open(path_file, "r") as f:
union_list_file = f.read()
os.remove(path_file)
with open(union_list_file, "r") as f:
union_list = json.load(f)
total_samples = sum(map(lambda union: sum(map(lambda c: c[1], union)), union_list))
print("total_samples:", total_samples)
unions = list(filter(lambda l: len(l) > 1, union_list))
singles = list(filter(lambda l: len(l) == 1, union_list))
print("Number of unions:", len(unions))
print("Number of singles:", len(singles))
union_num_samples = []
union_sample = []
samples_so_far = 0
for union in unions:
print("-"*80)
print("\tusers:", len(union))
samples_in_this_union = sum(map(lambda c: c[1], union))
print("\tsamples_in_this_union", samples_in_this_union)
frac = args.fraction * samples_in_this_union
print("\tfrac", frac)
selected_users = []
sample_count = 0
for id, samples in union:
if sample_count + samples > frac:
break
selected_users.append(id)
sample_count += samples
print("\tusers in sample:", len(selected_users))
print("\tsamples in sample:", sample_count)
union_sample.append(selected_users)
union_num_samples.append(sample_count)
samples_so_far += sample_count
samples_remain = total_samples * args.fraction - samples_so_far
print("samples remain:", samples_remain)
num_singles = 0
for single in singles:
samples_in_this_user = single[0][1]
id = single[0][0]
if samples_remain - samples_in_this_user < 0:
break
union_sample.append([id])
union_num_samples.append(samples_in_this_user)
samples_remain -= samples_in_this_user
num_singles += 1
union_names = ["union_%d" % i for i in range(len(unions))]
singles_names = ["single_%d" % i for i in range(num_singles)]
names = union_names + singles_names
print("NAMES AND LISTS MATCH:", len(union_sample) == len(names))
print("number of selected singles:", num_singles, "- total singles: ", len(singles))
union_data = dict([(name, {"x": [], "y": []}) for name in names])
for f in files:
print("Looking for users in",f)
file_dir = os.path.join(subdir, f)
with open(file_dir, 'r') as inf:
data = json.load(inf, object_pairs_hook=OrderedDict)
for user, user_data in data['user_data'].items():
for name, union in zip(names,union_sample):
if user in union:
union_data[name]['x'] += user_data['x']
union_data[name]['y'] += user_data['y']
#print([(n,len(d["x"])) for n,d in union_data.items()])
# ------------
# create .json file
all_data = {}
all_data['users'] = names
all_data['num_samples'] = union_num_samples
all_data['unions'] = union_sample
all_data['user_data'] = union_data
slabel = 'union'
arg_frac = str(args.fraction)
arg_frac = arg_frac[2:]
arg_nu = str(args.u)
arg_nu = arg_nu[2:]
arg_label = arg_frac
file_name = '%s_%s.json' % (slabel, arg_label)
ouf_dir = os.path.join(data_dir, 'sampled_data', file_name)
# NOTE: For now, we just write everything to one big json.
# This will give us issues if we use a large sample.
print('writing %s' % file_name)
with open(ouf_dir, 'w') as outfile:
json.dump(all_data, outfile)
if not args.union:
new_user_count = 0 # for iid case
for f in files:
file_dir = os.path.join(subdir, f)
with open(file_dir, 'r') as inf:
# Load data into an OrderedDict, to prevent ordering changes
# and enable reproducibility
data = json.load(inf, object_pairs_hook=OrderedDict)
num_users = len(data['users'])
tot_num_samples = sum(data['num_samples'])
num_new_samples = int(args.fraction * tot_num_samples)
hierarchies = None
if(args.iid):
raw_list = list(data['user_data'].values())
raw_x = [elem['x'] for elem in raw_list]
raw_y = [elem['y'] for elem in raw_list]
x_list = [item for sublist in raw_x for item in sublist] # flatten raw_x
y_list = [item for sublist in raw_y for item in sublist] # flatten raw_y
num_new_users = int(round(args.u * num_users))
if num_new_users == 0:
num_new_users += 1
indices = [i for i in range(tot_num_samples)]
new_indices = rng.sample(indices, num_new_samples)
users = [str(i+new_user_count) for i in range(num_new_users)]
user_data = {}
for user in users:
user_data[user] = {'x': [], 'y': []}
all_x_samples = [x_list[i] for i in new_indices]
all_y_samples = [y_list[i] for i in new_indices]
x_groups = iid_divide(all_x_samples, num_new_users)
y_groups = iid_divide(all_y_samples, num_new_users)
for i in range(num_new_users):
user_data[users[i]]['x'] = x_groups[i]
user_data[users[i]]['y'] = y_groups[i]
num_samples = [len(user_data[u]['y']) for u in users]
new_user_count += num_new_users
else:
ctot_num_samples = 0
users = data['users']
users_and_hiers = None
if 'hierarchies' in data:
users_and_hiers = list(zip(users, data['hierarchies']))
rng.shuffle(users_and_hiers)
else:
rng.shuffle(users)
user_i = 0
num_samples = []
user_data = {}
if 'hierarchies' in data:
hierarchies = []
while(ctot_num_samples < num_new_samples):
hierarchy = None
if users_and_hiers is not None:
user, hier = users_and_hiers[user_i]
else:
user = users[user_i]
cdata = data['user_data'][user]
cnum_samples = len(data['user_data'][user]['y'])
if (ctot_num_samples + cnum_samples > num_new_samples):
cnum_samples = num_new_samples - ctot_num_samples
indices = [i for i in range(cnum_samples)]
new_indices = rng.sample(indices, cnum_samples)
x = []
y = []
for i in new_indices:
x.append(data['user_data'][user]['x'][i])
y.append(data['user_data'][user]['y'][i])
cdata = {'x': x, 'y': y}
if 'hierarchies' in data:
hierarchies.append(hier)
num_samples.append(cnum_samples)
user_data[user] = cdata
ctot_num_samples += cnum_samples
user_i += 1
if 'hierarchies' in data:
users = [u for u, h in users_and_hiers][:user_i]
else:
users = users[:user_i]
# ------------
# create .json file
all_data = {}
all_data['users'] = users
if hierarchies is not None:
all_data['hierarchies'] = hierarchies
all_data['num_samples'] = num_samples
all_data['user_data'] = user_data
slabel = ''
if(args.iid):
slabel = 'iid'
else:
slabel = 'niid'
arg_frac = str(args.fraction)
arg_frac = arg_frac[2:]
arg_nu = str(args.u)
arg_nu = arg_nu[2:]
arg_label = arg_frac
if(args.iid):
arg_label = '%s_%s' % (arg_nu, arg_label)
file_name = '%s_%s_%s.json' % ((f[:-5]), slabel, arg_label)
ouf_dir = os.path.join(data_dir, 'sampled_data', file_name)
print('writing %s' % file_name)
with open(ouf_dir, 'w') as outfile:
json.dump(all_data, outfile)
|
# Author: Kelvin Lai <kelvin@firststreet.org>
# Copyright: This module is owned by First Street Foundation
# Standard Imports
import logging
# Internal Imports
from firststreet.api import csv_format
from firststreet.api.api import Api
from firststreet.errors import InvalidArgument
from firststreet.models.historic import HistoricEvent, HistoricSummary
class Historic(Api):
"""This class receives a list of search_items and handles the creation of a historic product from the request.
Methods:
get_event: Retrieves a list of Historic Event for the given list of IDs
get_summary: Retrieves a list of Historic Summary for the given list of IDs
"""
def get_event(self, search_item, csv=False, output_dir=None, extra_param=None):
"""Retrieves historic event product data from the First Street Foundation API given a list of search_items and
returns a list of Historic Event objects.
Args:
search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a
file of First Street Foundation IDs
csv (bool): To output extracted data to a csv or not
output_dir (str): The output directory to save the generated csvs
extra_param (str): Extra parameter to be added to the url
Returns:
A list of Historic Event
"""
# Get data from api and create objects
api_datas = self.call_api(search_item, "historic", "event", None, extra_param=extra_param)
product = [HistoricEvent(api_data) for api_data in api_datas]
if csv:
csv_format.to_csv(product, "historic", "event", output_dir=output_dir)
logging.info("Historic Event Data Ready.")
return product
def get_events_by_location(self, search_item, location_type, csv=False, output_dir=None, extra_param=None):
"""Retrieves historic summary product data from the First Street Foundation API given a list of location
search_items and returns a list of Historic Summary objects.
Args:
search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a
file of First Street Foundation IDs
location_type (str): The location lookup type
csv (bool): To output extracted data to a csv or not
output_dir (str): The output directory to save the generated csvs
extra_param (str): Extra parameter to be added to the url
Returns:
A list of Historic Event
Raises:
InvalidArgument: The location provided is empty
TypeError: The location provided is not a string
"""
if not location_type:
raise InvalidArgument(location_type)
elif not isinstance(location_type, str):
raise TypeError("location is not a string")
# Get data from api and create objects
api_datas = self.call_api(search_item, "historic", "summary", location_type)
summary = [HistoricSummary(api_data) for api_data in api_datas]
search_item = list(set([event.get("eventId") for sum_hist in summary if sum_hist.historic for
event in sum_hist.historic]))
if search_item:
api_datas_event = self.call_api(search_item, "historic", "event", None, extra_param=extra_param)
else:
api_datas_event = [{"eventId": None}]
event = [HistoricEvent(api_data) for api_data in api_datas_event]
if csv:
csv_format.to_csv([summary, event], "historic", "summary_event", location_type, output_dir=output_dir)
logging.info("Historic Summary Event Data Ready.")
return [summary, event]
def get_summary(self, search_item, location_type, csv=False, output_dir=None, extra_param=None):
"""Retrieves historic summary product data from the First Street Foundation API given a list of search_items and
returns a list of Historic Summary objects.
Args:
search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a
file of First Street Foundation IDs
location_type (str): The location lookup type
csv (bool): To output extracted data to a csv or not
output_dir (str): The output directory to save the generated csvs
extra_param (str): Extra parameter to be added to the url
Returns:
A list of Historic Summary
Raises:
InvalidArgument: The location provided is empty
TypeError: The location provided is not a string
"""
if not location_type:
raise InvalidArgument(location_type)
elif not isinstance(location_type, str):
raise TypeError("location is not a string")
# Get data from api and create objects
api_datas = self.call_api(search_item, "historic", "summary", location_type, extra_param=extra_param)
product = [HistoricSummary(api_data) for api_data in api_datas]
if csv:
csv_format.to_csv(product, "historic", "summary", location_type, output_dir=output_dir)
logging.info("Historic Summary Data Ready.")
return product
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.virtualization_bond_state import VirtualizationBondState
from intersight.model.virtualization_iwe_cluster_relationship import VirtualizationIweClusterRelationship
from intersight.model.virtualization_iwe_dv_uplink_relationship import VirtualizationIweDvUplinkRelationship
from intersight.model.virtualization_iwe_host_interface import VirtualizationIweHostInterface
from intersight.model.virtualization_iwe_host_relationship import VirtualizationIweHostRelationship
from intersight.model.virtualization_iwe_network_relationship import VirtualizationIweNetworkRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['VirtualizationBondState'] = VirtualizationBondState
globals()['VirtualizationIweClusterRelationship'] = VirtualizationIweClusterRelationship
globals()['VirtualizationIweDvUplinkRelationship'] = VirtualizationIweDvUplinkRelationship
globals()['VirtualizationIweHostInterface'] = VirtualizationIweHostInterface
globals()['VirtualizationIweHostRelationship'] = VirtualizationIweHostRelationship
globals()['VirtualizationIweNetworkRelationship'] = VirtualizationIweNetworkRelationship
class VirtualizationIweHostInterfaceRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('link_state',): {
'UNKNOWN': "unknown",
'UP': "up",
'DOWN': "down",
'DEGRADED': "degraded",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('mac',): {
'regex': {
'pattern': r'^$|^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', # noqa: E501
},
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'bond_state': (VirtualizationBondState,), # noqa: E501
'host_name': (str,), # noqa: E501
'host_uuid': (str,), # noqa: E501
'if_type': (str,), # noqa: E501
'ip_addresses': ([str], none_type,), # noqa: E501
'link_state': (str,), # noqa: E501
'mac': (str,), # noqa: E501
'mtu': (int,), # noqa: E501
'name': (str,), # noqa: E501
'vlans': (str,), # noqa: E501
'cluster': (VirtualizationIweClusterRelationship,), # noqa: E501
'dv_uplink': (VirtualizationIweDvUplinkRelationship,), # noqa: E501
'host': (VirtualizationIweHostRelationship,), # noqa: E501
'network': (VirtualizationIweNetworkRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'virtualization.IweHostInterface': VirtualizationIweHostInterface,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'bond_state': 'BondState', # noqa: E501
'host_name': 'HostName', # noqa: E501
'host_uuid': 'HostUuid', # noqa: E501
'if_type': 'IfType', # noqa: E501
'ip_addresses': 'IpAddresses', # noqa: E501
'link_state': 'LinkState', # noqa: E501
'mac': 'Mac', # noqa: E501
'mtu': 'Mtu', # noqa: E501
'name': 'Name', # noqa: E501
'vlans': 'Vlans', # noqa: E501
'cluster': 'Cluster', # noqa: E501
'dv_uplink': 'DvUplink', # noqa: E501
'host': 'Host', # noqa: E501
'network': 'Network', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""VirtualizationIweHostInterfaceRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
bond_state (VirtualizationBondState): [optional] # noqa: E501
host_name (str): The UUID of the host to which this interface belongs to.. [optional] # noqa: E501
host_uuid (str): The UUID of the host to which this interface belongs to.. [optional] # noqa: E501
if_type (str): A hint of the interface type, such as \"ovs-bond\", \"device\", \"openvswitch\".. [optional] # noqa: E501
ip_addresses ([str], none_type): [optional] # noqa: E501
link_state (str): Link state information such as \"up\", \"down\". * `unknown` - The interface line is unknown. * `up` - The interface line is up. * `down` - The interface line is down. * `degraded` - For a bond/team interface, not all member interface is up.. [optional] if omitted the server will use the default value of "unknown" # noqa: E501
mac (str): The MAC address of the interface.. [optional] # noqa: E501
mtu (int): The MTU size of the interface.. [optional] # noqa: E501
name (str): The name of the host to which this interface belongs to.. [optional] # noqa: E501
vlans (str): A list of vlans allowed on this interface.. [optional] # noqa: E501
cluster (VirtualizationIweClusterRelationship): [optional] # noqa: E501
dv_uplink (VirtualizationIweDvUplinkRelationship): [optional] # noqa: E501
host (VirtualizationIweHostRelationship): [optional] # noqa: E501
network (VirtualizationIweNetworkRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
VirtualizationIweHostInterface,
none_type,
],
}
|
#!/usr/bin/python
# The MIT License (MIT)
#
# Copyright (c) 2017 Oracle
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
__author__ = "Michael Shanley (Oracle A-Team)"
__copyright__ = "Copyright (c) 2017 Oracle"
__version__ = "1.0.0.0"
__date__ = "@BUILDDATE@"
__status__ = "Development"
__module__ = "switch_agent_datasource"
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bcc_switch_agent_datasource
short_description: Switch an agents datasource
version_added: "1.0"
description:
- Switch an agents datasource
options:
action:
description:
- Action to be executed against the BCC
required: false
default: switch_agent_datasource
choices: ['switch_agent_datasource']
endpoint:
description:
- BCC REST Endpoint
required: true
default: null
type: str
cookie:
description:
- ATG Cookie
required: false
default: null
type: str
switchTargetID:
description:
- ID of the target the agent(s) you want to switch belong to
required: true
default: null
type: str
agentIDs:
description:
- ID's of the agents you want to switch datasources on
- Comma separated list of ID's
required: true
default: null
type: str
requirements:
- "python >= 2.6"
- "ATG BCCTools module"
...
'''
EXAMPLES = '''
'''
from bcc_rest.bcc_get_agent_id import getAgentID
# Main processing function
def main():
module = AnsibleModule(
argument_spec = dict(
action = dict(default='switch_agent_datasource', choices=['switch_agent_datasource']),
endpoint = dict(required=True, type='str'),
switchTargetID = dict(required=True, type='str'),
agentIDs = dict(required=True, type='str'),
cookie = dict(required=False, type='str')
)
)
endpoint = module.params['endpoint']
switchTargetID = module.params['switchTargetID']
agentIDs = module.params['agentIDs']
cookie = module.params['cookie']
changed = False
try:
if module.params['action'] == 'switch_agent_datasource':
response = getAgentID(endpoint, switchTargetID, agentIDs, cookie)
jsonobj = json.loads(response.text)
if ('formExceptions' in jsonobj):
module.fail_json(msg=jsonobj)
module.exit_json(changed=changed, targetID=jsonobj)
else:
module.fail_json(msg="Unknown action")
except Exception as e:
module.fail_json(msg=str(e.message))
return
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.splitter import *
# Main function to kick off processing
if __name__ == "__main__":
main()
|
from django.contrib import admin
from .models import ToDo
admin.site.register(ToDo)
|
"""
Generic RPC functions for labby
"""
# import asyncio
import asyncio
from cgi import print_exception
import os
from pathlib import Path
from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union
import yaml
from attr import attrib, attrs
from autobahn.wamp.exception import ApplicationError
from labby.console import Console
from labby.resource import LabbyResource, NetworkSerialPort, PowerAction, power_resources, power_resource_from_name
from .labby_error import (LabbyError, failed, invalid_parameter,
not_found)
from .labby_types import (ExporterName, GroupName, LabbyPlace, PlaceName, PowerState, Resource,
ResourceName, Session, Place)
from .labby_util import flatten
def _check_not_none(*args, **kwargs) -> Optional[LabbyError]:
return next((invalid_parameter(f"Missing required parameter: {name}.") for name, val in vars().items() if val is None), None)
@attrs()
class RPCDesc():
name: str = attrib(default=None)
endpoint: str = attrib(default=None)
remote_endpoint: str = attrib(default=None)
info: Optional[str] = attrib(default=None)
parameter: Optional[List[Dict[str, str]]] = attrib(default=None)
return_type: Optional[str] = attrib(default=None)
def _localfile(path):
return Path(os.path.dirname(os.path.realpath(__file__))).joinpath(path)
FUNCTION_INFO = {}
with open(_localfile('rpc_desc.yaml'), 'r', encoding='utf-8') as file:
FUNCTION_INFO = {key: RPCDesc(**val) for key, val in yaml.load(file,
yaml.loader.FullLoader).items() if val is not None}
# non exhaustive list of serializable primitive types
_serializable_primitive: List[Type] = [int, float, str, bool]
def invalidates_cache(attribute, *rec_args, reconstitute: Optional[Callable] = None):
"""
on call clear attribute (e.g. set to None)
"""
def decorator(func: Callable):
def wrapped(self: Session, *args, **kwargs):
setattr(self, attribute, None)
return func(self, *args, **kwargs)
return wrapped
return decorator
def cached(attribute: str):
"""
Decorator defintion to cache data in labby context and fetch data from server
"""
assert attribute is not None
def decorator(func: Callable):
async def wrapped(context: Session, *args, **kwargs):
assert context is not None
if not hasattr(context, attribute):
context.__dict__.update({attribute: None})
data = None
else:
data: Optional[Dict] = context.__getattribute__(
attribute)
if data is None:
data: Optional[Dict] = await func(context, *args, **kwargs)
if not isinstance(data, LabbyError):
context.__setattr__(attribute, data)
return data
return wrapped
return decorator
def labby_serialized(func):
"""
Custom serializer decorator for labby rpc functions
to make sure returned values are cbor/json serializable
"""
async def wrapped(*args, **kwargs) -> Union[None, List, Dict, int, float, str, bool]:
ret = await func(*args, **kwargs)
if ret is None:
return None
if isinstance(ret, LabbyError):
return ret.to_json()
if isinstance(ret, LabbyPlace):
return ret.to_json()
if isinstance(ret, (dict, list)) or type(ret) in _serializable_primitive:
return ret
raise NotImplementedError(
f"{type(ret)} can currently not be serialized!")
return wrapped
async def fetch(context: Session, attribute: str, endpoint: str, *args, **kwargs) -> Any:
"""
QoL function to fetch data drom Coordinator and store in attribute member in Session
"""
assert context is not None
assert attribute is not None
assert endpoint is not None
data: Optional[Dict] = getattr(context, attribute)
if data is None:
data: Optional[Dict] = await context.call(endpoint, *args, **kwargs)
setattr(context, attribute, data)
return data
async def fetch_places(context: Session,
place: Optional[PlaceName]) -> Union[Dict[PlaceName, Place], LabbyError]:
"""
Fetch places from coordinator, update if missing and handle possible errors
"""
assert context is not None
_data = await context.places.get(context) # type: ignore
if _data is None:
if place is None:
return not_found("Could not find any places.")
return not_found(f"Could not find place with name {place}.")
if place is not None:
if place in _data.keys():
return {place: _data[place]}
return not_found(f"Could not find place with name {place}.")
return _data
async def fetch_resources(context: Session,
place: Optional[PlaceName],
resource_key: Optional[ResourceName]) -> Union[Dict, LabbyError]:
"""
Fetch resources from coordinator, update if missing and handle possible errors
"""
assert context is not None
data: Optional[Dict] = await context.resources.get(context)
if data is None:
if place is None:
return not_found("Could not find any resources.")
return not_found(f"No resources found for place {place}.")
if place is not None:
data = {exporter: {k: v for k, v in exporter_data.items() if k == place and v}
for exporter, exporter_data in data.items()}
if resource_key is not None:
data = {exporter:
{place_name:
{k: v for k, v in place_res.items() if k == resource_key if v}
for place_name, place_res in exporter_data.items() if place_res}
for exporter, exporter_data in data.items()}
return data
@cached("peers")
async def fetch_peers(context: Session) -> Union[Dict, LabbyError]:
session_ids = await context.call("wamp.session.list")
sessions = {}
for sess in session_ids: # ['exact']:
tmp = await context.call("wamp.session.get", sess)
if tmp and 'authid' in tmp:
sessions[tmp['authid']] = tmp
return sessions
async def get_exporters(context: Session) -> Union[List[ExporterName], LabbyError]:
peers = await fetch_peers(context)
if isinstance(peers, LabbyError):
return peers
assert peers is not None
return [x.replace('exporter/', '') for x in peers if x.startswith('exporter')]
def _calc_power_for_place(place_name, resources: Iterable[Dict]):
pstate = False
for res in resources:
if isinstance(res['acquired'], Iterable):
pstate |= place_name in res['acquired']
else:
pstate |= res['acquired'] == place_name
return pstate
@cached("power_states")
async def fetch_power_state(context: Session,
place: Optional[PlaceName]) -> Union[PowerState, LabbyError]:
"""
Use fetch resource to determine power state, this may update context.resource
"""
_resources = await fetch_resources(context=context, place=place, resource_key=None)
if isinstance(_resources, LabbyError):
return _resources
if len(_resources) > 0:
_resources = flatten(_resources)
_places = await fetch_places(context, place)
if isinstance(_places, LabbyError):
return _places
power_states = {}
assert _places
for place_name, place_data in _places.items():
if 'acquired_resources' in place_data:
if len(place_data['acquired_resources']) == 0 or place_name not in _resources:
power_states[place_name] = {'power_state': False}
continue
resources_to_check = ((v for k, v in _resources[place_name].items() if any(
(k in a for a in place_data['acquired_resources']))))
power_states[place_name] = {
'power_state': _calc_power_for_place(place_name, resources_to_check)}
return power_states
@labby_serialized
async def places(context: Session,
place: Optional[PlaceName] = None) -> Union[List[LabbyPlace], LabbyError]:
"""
returns registered places as dict of lists
"""
context.log.info("Fetching places.")
data = await fetch_places(context, place)
if isinstance(data, LabbyError):
return data
power_states = await fetch_power_state(context=context, place=place)
assert power_states is not None
if isinstance(power_states, LabbyError):
return power_states
await get_reservations(context)
def token_from_place(name):
return next((token for token, x in context.reservations.items()
if x['filters']['main']['name'] == name), None)
place_res = []
assert data
for place_name, place_data in data.items():
# append the place to acquired places if
# it has been acquired in a previous session
if (place_data and place_data['acquired'] == context.user_name
and place_name not in context.acquired_places
):
context.acquired_places.add(place_name)
if place is not None and place_name != place:
continue
# ??? (Kevin) what if there are more than one or no matches
if len(place_data["matches"]) > 0 and 'exporter' in place_data["matches"]:
exporter = place_data["matches"][0]["exporter"]
else:
exporter = None
place_data.update({
"name": place_name,
"exporter": exporter,
"power_state": power_states.get(place_name, {}).get('power_state', None),
"reservation": token_from_place(place_name)
})
place_res.append(place_data)
return place_res
@labby_serialized
async def list_places(context: Session) -> List[PlaceName]:
"""
Return all place names
"""
await fetch_places(context, None)
return list(context.places.get_soft().keys()) if context.places else []
@labby_serialized
async def resource(context: Session,
place: Optional[PlaceName] = None,
resource_key=None
) -> Union[Dict[ResourceName, Resource], LabbyError]:
"""
rpc: returns resources registered for given place
"""
context.log.info(f"Fetching resources for {place}.")
resource_data = await fetch_resources(context=context, place=place, resource_key=resource_key)
if isinstance(resource_data, LabbyError):
return resource_data
if place is None:
return resource_data
if len(flatten(resource_data)) == 0:
return not_found(f"Place {place} not found.")
return resource_data
@labby_serialized
async def power_state(context: Session,
place: PlaceName,
) -> Union[PowerState, LabbyError]:
"""
rpc: return power state for a given place
"""
if place is None:
return invalid_parameter("Missing required parameter: place.").to_json()
power_data = await fetch_power_state(context=context, place=place)
assert power_data is not None
if isinstance(power_data, LabbyError):
return power_data
if place not in power_data.keys():
return not_found(f"Place {place} not found on Coordinator.").to_json()
return power_data[place]
@labby_serialized
async def resource_overview(context: Session,
place: Optional[PlaceName] = None,
) -> Union[List[Resource], LabbyError]:
"""
rpc: returns list of all resources on target
"""
context.log.info(f"Fetching resources overview for {place}.")
targets = await fetch_resources(context=context, place=place, resource_key=None)
if isinstance(targets, LabbyError):
return targets
ret = []
for exporter, resources in targets.items():
for res_place, res in resources.items():
if place is None or place == res_place:
ret.extend({'name': key, 'target': exporter,
'place': res_place, **values} for key, values in res.items())
return ret
@labby_serialized
async def resource_by_name(context: Session,
name: ResourceName, # filter by name
) -> Union[List[Resource], LabbyError]:
"""
rpc: returns list of all resources of given name on target
"""
if name is None:
return invalid_parameter("Missing required parameter: name.")
resource_data = await fetch_resources(context, place=None, resource_key=None)
if isinstance(resource_data, LabbyError):
return resource_data
ret = []
for target, resources in resource_data.items():
for place, res in resources.items():
ret.extend(
{'name': key, 'target': target, 'place': place, **values}
for key, values in res.items()
if name == key
)
return ret
@labby_serialized
async def resource_names(context: Session) -> List[Dict[str, str]]:
await fetch_resources(context, None, None)
data = context.resources or {}
def it(x): return x.items()
return [
{'exporter': exporter,
'group': grp_name,
'class': x.get('cls'),
'name': name,
}
for exporter, group in it(data) for grp_name, res in it(group) for name, x in it(res)
]
@labby_serialized
async def acquire(context: Session,
place: PlaceName) -> Union[bool, LabbyError]:
"""
rpc for acquiring places
"""
if place is None:
return invalid_parameter("Missing required parameter: place.")
if place in context.acquired_places:
return failed(f"Already acquired place {place}.")
# , group, resource_key, place)
context.log.info(f"Acquiring place {place}.")
try:
acquire_successful = await context.call("org.labgrid.coordinator.acquire_place", place)
except ApplicationError as err:
return failed(f"Got exception while trying to call org.labgrid.coordinator.acquire_place. {err}")
if acquire_successful:
context.acquired_places.add(place)
# remove the reservation if there was one
if token := next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None,):
ret = await cancel_reservation(context, token)
if isinstance(ret, LabbyError):
# context.log.error(f"Could not cancel reservation after acquire: {ret}")
print(f"Could not cancel reservation after acquire: {ret}")
del context.reservations[token]
return acquire_successful
@labby_serialized
async def release(context: Session,
place: PlaceName) -> Union[bool, LabbyError]:
"""
rpc for releasing 'acquired' places
"""
if place is None:
return invalid_parameter("Missing required parameter: place.")
if place not in context.acquired_places:
return failed(f"Place {place} is not acquired")
context.log.info(f"Releasing place {place}.")
try:
release_successful = await context.call('org.labgrid.coordinator.release_place', place)
if place in context.acquired_places: # place update was quicker
context.acquired_places.remove(place)
except ApplicationError as err:
return failed(f"Got exception while trying to call org.labgrid.coordinator.release_place. {err}")
return release_successful
@labby_serialized
async def info(_context=None, func_key: Optional[str] = None) -> Union[List[Dict], LabbyError]:
"""
RPC call for general info for RPC function usage
"""
if func_key is None:
return [desc.__dict__ for desc in globals()["FUNCTION_INFO"].values()]
if func_key not in globals()["FUNCTION_INFO"]:
return not_found(f"Function {func_key} not found in registry.")
return globals()["FUNCTION_INFO"][func_key].__dict__
async def get_reservations(context: Session) -> Dict:
"""
RPC call to list current reservations on the Coordinator
"""
reservation_data: Dict = await context.call("org.labgrid.coordinator.get_reservations")
for token, data in reservation_data.items():
if (data['state'] in ('waiting', 'allocated', 'acquired')
and data['owner'] == context.user_name):
context.to_refresh.add(token)
context.reservations.update(**reservation_data)
return reservation_data
@labby_serialized
async def create_reservation(context: Session, place: PlaceName, priority: float = 0.) -> Union[Dict, LabbyError]:
# TODO figure out filters, priorities, etc
# TODO should multiple reservations be allowed?
if place is None:
return invalid_parameter("Missing required parameter: place.")
await get_reservations(context) # get current state from coordinator
if any((place == x['filters']['main']['name'] for x in context.reservations.values() if 'name' in x['filters']['main'] and x['state'] not in ('expired', 'invalid'))):
return failed(f"Place {place} is already reserved.")
reservation = await context.call("org.labgrid.coordinator.create_reservation",
f"name={place}",
prio=priority)
if not reservation:
return failed("Failed to create reservation")
context.reservations.update(reservation)
context.to_refresh.add((next(iter(reservation.keys()))))
return reservation
async def refresh_reservations(context: Session):
while True:
to_remove = set()
context.reservations = await context.call("org.labgrid.coordinator.get_reservations")
for token in context.to_refresh:
if token in context.reservations:
# context.log.info(f"Refreshing reservation {token}")
state = context.reservations[token]['state']
place_name = context.reservations[token]['filters']['main']['name']
if state == 'waiting':
ret = await context.call("org.labgrid.coordinator.poll_reservation", token)
if not ret:
context.log.error(
f"Failed to poll reservation {token}.")
context.reservations[token] = ret
# acquire the resource, when it has been allocated by the coordinator
elif (context.reservations[token]['state'] == 'allocated'
or (context.reservations[token]['state'] == 'acquired' and place_name not in context.acquired_places)
):
ret = await acquire(context, place_name)
await cancel_reservation(context, place_name)
if not ret:
context.log.error(
f"Could not acquire reserved place {token}: {place_name}")
to_remove.add(token)
else:
to_remove.add(token)
else:
to_remove.add(token)
for token in to_remove:
context.to_refresh.remove(token)
await asyncio.sleep(1.) # !! TODO set to 10s
@labby_serialized
async def cancel_reservation(context: Session, place: PlaceName) -> Union[bool, LabbyError]:
if place is None:
return invalid_parameter("Missing required parameter: place.")
await get_reservations(context) # get current state from coordinator
token = next((token for token, x in context.reservations.items()
if x['filters']['main']['name'] == place), None)
if token is None:
return failed(f"No reservations available for place {place}.")
del context.reservations[token]
return await context.call("org.labgrid.coordinator.cancel_reservation", token)
@labby_serialized
async def poll_reservation(context: Session, place: PlaceName) -> Union[Dict, LabbyError]:
if place is None:
return invalid_parameter("Missing required parameter: place.")
token = next((token for token, x in context.reservations.items()
if x['filters']['main']['name'] == place), None)
if token is None:
return failed(f"No reservations available for place {place}.")
if not token:
return failed("Failed to poll reservation.")
reservation = await context.call("org.labgrid.coordinator.poll_reservation", token)
context.reservations[token] = reservation
return reservation
@labby_serialized
async def reset(context: Session, place: PlaceName) -> Union[bool, LabbyError]:
"""
Send a reset request to a place matching a given place name
Note
"""
check = _check_not_none()
if isinstance(check, LabbyError):
return check
context.log.info(f"Resetting place {place}")
release_later = False
if place not in context.acquired_places:
release_later = True
acq = await acquire(context, place)
if isinstance(acquire, LabbyError):
return acq
if not acq:
return failed(f"Could not acquire place {place}.")
res = await fetch_resources(context, place, None)
if isinstance(res, LabbyError):
return failed(f"Failed to get resources for place {place}.")
res = flatten(res, 2) # remove exporter and group from res
for resname, resdata in res.items():
if resname in power_resources:
try:
context.log.info(f"Resetting {place}/{resname}.")
power_resource = power_resource_from_name(resname, resdata)
url = power_resource.power(PowerAction.cycle)
assert (ssh_session := context.ssh_session) is not None
assert ssh_session.client
(_, _, serr) = ssh_session.client.exec_command(
command=f"curl -Ss '{url}' > /dev/null"
)
if len(msg := serr.read()) > 0:
context.log.error(
f"Got error while resetting console. {msg}")
except ValueError:
pass # not a valid powerresource after all ??
except Exception as e:
raise e # other errors occured o.O
if release_later:
rel = await release(context, place)
if isinstance(rel, LabbyError) or not rel:
return failed(f"Failed to release place {place} after reset.")
return True
@labby_serialized
async def console(context: Session, place: PlaceName):
# TODO allow selection of resource to connect console to
if place is None:
return invalid_parameter("Missing required parameter: place.")
if place not in context.acquired_places:
ret = await acquire(context, place)
if isinstance(ret, LabbyError):
return ret
if not ret:
return failed("Failed to acquire Place (It may already have been acquired).")
if place in context.open_consoles:
return failed(f"There is already a console open for {place}.")
# check that place has a console
_resources = await fetch_resources(context, place, resource_key=None)
if isinstance(_resources, LabbyError):
return _resources
if len(_resources) == 0:
return failed(f"No resources on {place}.")
_resources = flatten(_resources, depth=2) # remove exporter and place
_resource: Optional[LabbyResource] = next(
(
NetworkSerialPort(
cls=data['cls'],
port=data['params']['port'],
host=data['params']['host'],
speed=data['params']['speed'],
protocol=data['params'].get('protocol', 'rfc2217'),
)
for _, data in _resources.items()
if 'cls' in data and data['cls'] == 'NetworkSerialPort'
),
None,
)
if _resource is None:
return failed(f"No network serial port on {place}.")
assert isinstance(_resource, NetworkSerialPort)
assert context.ssh_session.client
context.open_consoles[place] = (_con := Console(host=_resource.host or 'localhost',
speed=_resource.speed,
port=_resource.port,
ssh_session=context.ssh_session.client))
async def _read(read_fn,):
while place in context.open_consoles:
try:
data = await read_fn()
assert context.frontend
context.frontend.publish(f"localhost.consoles.{place}", data)
except (OSError, EOFError):
print_exception()
context.log.error(f"Console closed read on {place}.")
_con.close()
if place in context.open_consoles:
del context.open_consoles[place]
print("Closing read.")
except:
print_exception()
context.log.error(f"Console on {place} read failed.")
_con.close()
if place in context.open_consoles:
del context.open_consoles[place]
print("Closing read exc.")
asyncio.run_coroutine_threadsafe(
_read(_con.read_stdout), asyncio.get_event_loop())
asyncio.run_coroutine_threadsafe(
_read(_con.read_stderr), asyncio.get_event_loop())
return True
@labby_serialized
async def console_write(context: Session, place: PlaceName, data: str) -> Union[bool, LabbyError]:
# TODO implement
if place not in context.acquired_places:
return failed(f"Place {place} is not acquired.")
if not (_console := context.open_consoles.get(place)):
return failed(f"Place {place} has no open consoles.")
if not data:
# data was empty
return failed(f"Could not write to Console {place}. Data was empty")
try:
_console.write_to_stdin(data)
except Exception as e:
context.log.exception(e)
return failed(f"Failed to write to Console {place}.")
#
# do stuff
#
context.log.info(f"Console on {place} received: {data}.")
return True
@labby_serialized
async def console_close(context: Session, place: PlaceName) -> Optional[LabbyError]:
if place not in context.acquired_places:
return failed(f"Place {place} is not acquired.")
if not context.open_consoles.get(place):
return failed(f"Place {place} has no open consoles.")
context.log.info(f"Closing console on {place}.")
context.open_consoles[place].close()
del context.open_consoles[place]
async def video(context: Session, *args):
pass
@labby_serialized
async def forward(context: Session, *args):
"""
Forward a rpc call to the labgrid coordinator
"""
return await context.call(*args)
@labby_serialized
async def create_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]:
"""
Create a new place on the coordinator
"""
if place is None:
return invalid_parameter("Missing required parameter: place.")
_places = await fetch_places(context, place=None)
if isinstance(_places, LabbyError):
return _places
assert _places
if place in _places:
return failed(f"Place {place} already exists.")
return await context.call("org.labgrid.coordinator.add_place", place)
@labby_serialized
async def delete_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]:
if place is None:
return invalid_parameter("Missing required parameter: place.")
_places = await fetch_places(context, place)
assert context.places # should have been set with fetch_places
if isinstance(_places, LabbyError):
return _places
return await context.call("org.labgrid.coordinator.del_place", place)
@labby_serialized
async def create_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]:
# TODO (Kevin) Find a way to do this without being a exporter/ delegate to exporter
if group_name is None:
return invalid_parameter("Missing required parameter: group_name.")
if resource_name is None:
return invalid_parameter("Missing required parameter: resource_name.")
ret = await context.call("org.labgrid.coordinator.set_resource", group_name, resource_name, {})
return ret
@labby_serialized
async def delete_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]:
# TODO (Kevin) Find a way to do this without being a exporter/ delegate to exporter
if group_name is None:
return invalid_parameter("Missing required parameter: group_name.")
if resource_name is None:
return invalid_parameter("Missing required parameter: resource_name.")
ret = await context.call("org.labgrid.coordinator.update_resource", group_name, resource_name, None)
return ret
@labby_serialized
async def places_names(context: Session) -> Union[List[PlaceName], LabbyError]:
_places = await fetch_places(context, None)
if isinstance(_places, LabbyError):
return _places
assert _places
return list(_places.keys())
@labby_serialized
async def get_alias(context: Session, place: PlaceName) -> Union[List[str], LabbyError]:
if place is None:
return invalid_parameter("Missing required parameter: place.")
data = await fetch_places(context, place)
if isinstance(data, LabbyError):
return data
assert data
if len(data) == 0:
return []
return [a for x in data.values() for a in x['aliases']]
@labby_serialized
async def add_match(context: Session,
place: PlaceName,
exporter: ExporterName,
group: GroupName,
cls: ResourceName,
name: ResourceName) -> Union[bool, LabbyError]:
_check_not_none(**vars())
try:
return await context.call("org.labgrid.coordinator.add_place_match", place, f"{exporter}/{group}/{cls}/{name}")
except:
return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.")
@labby_serialized
async def del_match(context: Session,
place: PlaceName,
exporter: ExporterName,
group: GroupName,
cls: ResourceName,
name: ResourceName) -> Union[bool, LabbyError]:
_check_not_none(**vars())
try:
return await context.call("org.labgrid.coordinator.del_place_match", place, f"{exporter}/{group}/{cls}/{name}")
except:
return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.")
@labby_serialized
async def acquire_resource(context: Session,
place_name: PlaceName,
exporter: ExporterName,
group_name: GroupName,
resource_name: ResourceName) -> Union[bool, LabbyError]:
_check_not_none(**vars())
try:
procedure = f"org.labgrid.exporter.{exporter}.acquire"
return await context.call(procedure, group_name, resource_name, place_name)
except:
return failed(f"Failed to acquire resource {exporter}/{place_name}/{resource_name}.")
@labby_serialized
async def release_resource(context: Session,
place_name: PlaceName,
exporter: ExporterName,
group_name: GroupName,
resource_name: ResourceName) -> Union[bool, LabbyError]:
_check_not_none(**vars())
try:
procedure = f"org.labgrid.exporter.{exporter}.release"
return await context.call(procedure, group_name, resource_name, place_name)
except Exception:
return failed(f"Failed to release resource {exporter}/{place_name}/{resource_name}.")
@labby_serialized
async def cli_command(context: Session, command: str) -> Union[str, LabbyError]:
if command is None or not command:
return failed("Command must not be empty.")
assert (ssh_session := context.ssh_session).client
context.log.info(
f"Issuing labgrid-client command: labgrid-client {command}")
try:
(_, sout, serr) = ssh_session.client.exec_command(
command=f"export LG_USERNAME={context.user_name}; labgrid-client {command}")
so = str(sout.read(), encoding='utf-8')
if se := str(serr.read(), encoding='utf-8'):
so += f"\n\n{se}"
return so
except Exception:
return failed("Failed to execute cli command.")
@labby_serialized
async def username(context: Session) -> Union[str, LabbyError]:
return context.user_name or failed("Username has not been set correctly.")
|
from datetime import date
import dateparser
from scrapy import FormRequest, Request
from gazette.items import Gazette
from gazette.spiders.base import BaseGazetteSpider
class VilaVelhaSpider(BaseGazetteSpider):
name = "es_vila_velha"
allowed_domains = ["www.vilavelha.es.gov.br"]
TERRITORY_ID = "3205200"
GAZETTE_URL_CSS = "td:last-child a::attr(href)"
GAZETTE_DATE_CSS = "td:nth-child(2) span b::text"
GAZETTE_ISSUE_CSS = "td:nth-child(3) span b::text"
JAVASCRIPT_POSTBACK_REGEX = r"javascript:__doPostBack\('(.*)',''\)"
start_date = date(2016, 7, 1)
def start_requests(self):
start_date = self.start_date.strftime("%d/%m/%Y")
end_date = self.end_date.strftime("%d/%m/%Y")
base_url = "https://www.vilavelha.es.gov.br/diariooficial/ConsultaDiario.aspx"
gazettes_url = f"{base_url}?dataInicial={start_date}&dataFinal={end_date}"
yield Request(gazettes_url)
def parse(self, response):
for element in response.css("#ctl00_cpConteudo_gvDocumentos tr"):
is_header = element.css("th").extract() != []
if is_header:
continue
date = element.css(self.GAZETTE_DATE_CSS).get()
date = dateparser.parse(date, languages=["pt"]).date()
event_target = element.css(self.GAZETTE_URL_CSS).re_first(
self.JAVASCRIPT_POSTBACK_REGEX
)
gazette_issue = element.css(self.GAZETTE_ISSUE_CSS).get()
is_extra = "EXTRA" in gazette_issue
edition_number = gazette_issue.split(" ")[0]
document_request = FormRequest.from_response(
response, formdata={"__EVENTTARGET": event_target}
)
yield Gazette(
date=date,
file_requests=[document_request],
edition_number=edition_number,
is_extra_edition=is_extra,
power="executive_legislative",
)
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import sphinx_rtd_theme
import os
import sys
# -- Project information -----------------------------------------------------
project = 'GDScript to reStructured'
copyright = '2021, GDScript'
author = 'Nathan Lavato'
# The full version, including alpha/beta/rc tags
version = '0.1.0'
release = version
pygments_style = 'sphinx'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
needs_sphinx = '3.0'
sys.path.append(os.path.abspath("_extensions"))
extensions = [
'sphinx_tabs.tabs',
"notfound.extension",
"sphinx.ext.extlinks"
]
# Warning when the Sphinx Tabs extension is used with unknown
# builders (like the dummy builder) - as it doesn't cause errors,
# we can ignore this so we still can treat other warnings as errors.
sphinx_tabs_nowarn = True
extlinks = {
'godot_class' :
('https://docs.godotengine.org/en/stable/classes/class_%s.html', '')
}
# Custom 4O4 page HTML template.
# https://github.com/readthedocs/sphinx-notfound-page
notfound_context = {
"title": "Page not found",
"body": """
<h1>Page not found</h1>
<p>
Sorry, we couldn't find that page. It may have been renamed or removed
in the version of the documentation you're currently browsing.
</p>
<p>
If you're currently browsing the
<em>latest</em> version of the documentation, try browsing the
<a href="/en/stable/"><em>stable</em> version of the documentation</a>.
</p>
<p>
Alternatively, use the
<a href="#" onclick="$('#rtd-search-form [name=\\'q\\']').focus()">Search docs</a>
box on the left or <a href="/">go to the homepage</a>.
</p>
""",
}
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd:
notfound_urls_prefix = ''
if not os.getenv("SPHINX_NO_GDSCRIPT"):
extensions.append("gdscript")
# if not os.getenv("SPHINX_NO_SEARCH"):
# extensions.append("sphinx_search.extension")
if not os.getenv("SPHINX_NO_DESCRIPTIONS"):
extensions.append("godot_descriptions")
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# You can specify multiple suffix as a list of string: ['.rst', '.md']
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_encoding = 'utf-8-sig'
# The master toctree document
master_doc = 'index'
# Parse Sphinx tags passed from RTD via environment
env_tags = os.getenv("SPHINX_TAGS")
if env_tags is not None:
for tag in env_tags.split(","):
print("Adding Sphinx tag: %s" % tag.strip())
tags.add(tag.strip()) # noqa: F82
supported_languages = {
"en": "Godot Engine (%s) documentation in English",
}
language = os.getenv("READTHEDOCS_LANGUAGE", "en")
if not language in supported_languages.keys():
print("Unknown language: " + language)
print("Supported languages: " + ", ".join(supported_languages.keys()))
print(
"The configured language is either wrong, or it should be added to supported_languages in conf.py. Falling back to 'en'."
)
language = "en"
is_i18n = tags.has("i18n") # noqa: F821
exclude_patterns = ["_build"]
# fmt: off
# These imports should *not* be moved to the start of the file,
# they depend on the sys.path.append call registering "_extensions".
# GDScript syntax highlighting
from gdscript import GDScriptLexer
from sphinx.highlighting import lexers
lexers["gdscript"] = GDScriptLexer()
# fmt: on
smartquotes = False
# Pygments (syntax highlighting) style to use
pygments_style = "sphinx"
highlight_language = "gdscript"
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# These paths are either relative to html_static_path
# or fully qualified paths (eg. https://...)
html_css_files = [
"css/custom.css",
"css/my.css"
]
html_js_files = [
"js/custom.js",
]
html_theme_options = {
'logo_only': True,
'collapse_navigation': False
}
html_logo = "docs_logo.png"
latex_elements = {
'extraclassoptions': 'openany',
'preamble': r'''
\usepackage{subfig}
\usepackage{graphicx}
''',
'papersize': 'a4paper'
}
|
import os
from nornir.plugins.tasks import networking
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
class Test(object):
def test_netmiko_file_transfer(self, nornir):
source_file = os.path.join(THIS_DIR, "data", "test_file.txt")
dest_file = "test_file.txt"
result = nornir.filter(name="dev4.group_2").run(
networking.netmiko_file_transfer,
source_file=source_file,
dest_file=dest_file,
direction="put",
)
assert result
for h, r in result.items():
assert r.result
assert r.changed
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.