text
stringlengths 2
999k
|
|---|
#-----------------------------------------------------------------------------
# Copyright (c) 2015, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_data_files
from PyInstaller.compat import is_win, is_darwin
import os
import sys
# The osgeo libraries require auxiliary data and may have hidden dependencies.
# There are several possible configurations on how these libraries can be
# deployed.
# This hook evaluates the cases when:
# - the `data` folder is present "in-source" (sharing the same namespace folder
# as the code libraries)
# - the `data` folder is present "out-source" (for instance, on Anaconda for
# Windows, in PYTHONHOME/Library/data)
# In this latter case, the hook also checks for the presence of `proj` library
# (e.g., on Windows in PYTHONHOME) for being added to the bundle.
#
# This hook has been tested with gdal (v.1.11.2 and 1.11.3) on:
# - Win7 64bit
# - Ubuntu 15.04 64bit
# - Mac OS X Yosemite 10.10
#
# TODO: Fix for gdal>=2.0: 'NameError: global name 'help' is not defined'
# flag used to identify an Anaconda environment
is_conda = False
# Auxiliary data:
#
# - general case (data in 'osgeo/data/gdal'):
datas = collect_data_files('osgeo', subdir=os.path.join('data', 'gdal'))
# check if the data has been effectively found in 'osgeo/data/gdal'
if len(datas) == 0:
if hasattr(sys, 'real_prefix'): # check if in a virtual environment
root_path = sys.real_prefix
else:
root_path = sys.prefix
# - conda-specific
if is_win:
tgt_gdal_data = os.path.join('Library', 'data')
src_gdal_data = os.path.join(root_path, 'Library', 'data')
else: # both linux and darwin
tgt_gdal_data = os.path.join('share', 'gdal')
src_gdal_data = os.path.join(root_path, 'share', 'gdal')
if os.path.exists(src_gdal_data):
is_conda = True
datas.append((src_gdal_data, tgt_gdal_data))
# a real-time hook takes case to define the path for `GDAL_DATA`
# Hidden dependencies
if is_conda:
# if `proj.4` is present, it provides additional functionalities
if is_win:
proj4_lib = os.path.join(root_path, 'proj.dll')
elif is_darwin:
proj4_lib = os.path.join(root_path, 'lib', 'libproj.dylib')
else: # assumed linux-like settings
proj4_lib = os.path.join(root_path, 'lib', 'libproj.so')
if os.path.exists(proj4_lib):
binaries = [(proj4_lib, ""), ]
|
import matplotlib.pyplot as plt
import pandas as pd
from numpy import arange, array
import os
import logging
logging.basicConfig()
logger = logging.getLogger('PlotTimeCost')
logger.setLevel('INFO')
class PlotTimeCostBar:
def __init__(self, data, path, show=False):
self.data = data
self.path = path
self.show_flag = show
(filepath, tempfilename) = os.path.split(path)
if not os.path.exists(filepath):
os.makedirs(filepath)
(filename, extension) = os.path.splitext(tempfilename)
self.format = extension[1:]
def plot(self):
data = array([0, 0, 0])
data[1:] = self.data['Time Cost'].values
fig = plt.figure(figsize=(6, 6))
ax = fig.add_subplot(111)
width = 0.5
xticks = self.data.index
n = data.shape[0]
ind = arange(n)
data = data / 3600
colors = ['black', 'tab:blue', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown']
plt.bar(x=ind, height=data, width=width, color=colors)
ax.set_xticks(ind[1:])
ax.set_xticklabels(xticks)
# ax.set_xlabel('Multi-fidelity control strategy', fontsize=16)
ax.tick_params(labelsize=12)
ax.set_ylabel('Time Cost (h)', fontsize=16)
if self.show_flag:
plt.show()
fig.savefig(self.path, format=self.format, dpi=80, bbox_inches='tight')
|
"""
Copyright 2017 Arm Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
ARCH_SPECIFIC_LIBS = ['mkl', 'otherarch']
"""Libraries that are not available on aarch64."""
|
apiKey = 'yours'
apiSecret = 'yours'
callbackUrl = 'http://fledna.duapp.com/query'
openid = 'yours'
accessToken = 'yours'
|
# https://stockmarketmba.com/globalstockexchanges.php
exchanges = {
'USA': None,
'Germany': 'XETR',
'Hong Kong': 'XHKG',
'Japan': 'XTKS',
'France': 'XPAR',
'Canada': 'XTSE',
'United Kingdom': 'XLON',
'Switzerland': 'XSWX',
'Australia': 'XASX',
'South Korea': 'XKRX',
'The Netherlands': 'XAMS',
'Spain': 'XMAD',
'Russia': 'MISX',
'Italy': 'XMIL',
'Belgium': 'XBRU',
'Mexiko': 'XMEX',
'Sweden': 'XSTO',
'Norway': 'XOSL',
'Finland': 'XHEL',
'Denmark': 'XCSE',
'Austria': 'XWBO'
}
exchanges_untested = {
'Argentina': 'XBUE',
'Australia_XNEC': 'XNEC',
'Australia': 'XASX',
'Austria': 'XWBO',
'Bahrain': 'XBAH',
'Bangladesh': 'XDHA',
'Belgium': 'XBRU',
'Brazil': 'BVMF',
'Canada_XCNQ': 'XCNQ',
'Canada': 'XTSE',
'Canada_XTSX': 'XTSX',
'Canada_NEOE': 'NEOE',
'Chile': 'XSGO',
'China_SHG': 'XSHG',
'China': 'XSHE',
'Colombia': 'XBOG',
'Croatia': 'XZAG',
'Cyprus': 'XCYS',
'Czech Republic': 'XPRA',
'Denmark': 'XCSE',
'Egypt': 'XCAI',
'Finland': 'XHEL',
'France': 'XPAR',
'Germany_XEQT': 'XEQT',
'Germany_XBER': 'XBER',
'Germany_XDUS': 'XDUS',
'Germany_XFRA': 'XFRA',
'Germany_XMUN': 'XMUN',
'Germany_XSTU': 'XSTU',
'Germany': 'XETR',
'Germany_XQTX': 'XQTX',
'Greece': 'XATH',
'Hong Kong': 'XHKG',
'Hungary': 'XBUD',
'Iceland': 'XICE',
'India_XBOM': 'XBOM',
'India': 'XNSE',
'Indonesia': 'XIDX',
'Ireland': 'XDUB',
'Israel': 'XTAE',
'Italy': 'MTAA',
'Japan': 'XTKS',
'Jordan': 'XAMM',
'Kenya': 'XNAI',
'Kuwait': 'XKUW',
'Luxembourg': 'XLUX',
'Malaysia': 'XKLS',
'Mexico': 'XMEX',
'Morocco': 'XCAS',
'New Zealand': 'XNZE',
'Nigeria': 'XNSA',
'Norway': 'XOSL',
'Norway_NOTC': 'NOTC',
'Oman': 'XMUS',
'Pakistan': 'XKAR',
'Peru': 'XLIM',
'Philippines': 'XPHS',
'Poland': 'XWAR',
'Portugal': 'XLIS',
'Qatar': 'DSMD',
'Romania': 'XBSE',
'Russia': 'MISX',
'Saudi Arabia': 'XSAU',
'Senegal': 'XBRV',
'Singapore': 'XSES',
'Slovenia': 'XLJU',
'South Africa': 'XJSE',
'South Korea': 'XKRX',
'South Korea_XKOS': 'XKOS',
'Spain': 'XMAD',
'Sri Lanka': 'XCOL',
'Sweden_XNGM': 'XNGM',
'Sweden': 'XSTO',
'Switzerland': 'XSWX',
'Switzerland_XVTX': 'XVTX',
'Syria': 'XDSE',
'Taiwan': 'XTAI',
'Thailand': 'XBKK',
'The Netherlands_XTOMX': 'TOMX',
'The Netherlands': 'XAMS',
'Turkey': 'XIST',
'United Arab Emirates_XDFM': 'XDFM',
'United Arab Emirates_DIFX': 'DIFX',
'United Arab Emirates': 'XADS',
'United Kingdom_BATE': 'BATE',
'United Kingdom_CHIX': 'CHIX',
'United Kingdom': 'XLON',
'United Kingdom_XPOS': 'XPOS',
'United Kingdom_TRQX': 'TRQX',
'United Kingdom_BOAT': 'BOAT',
'USA_XASE': 'XASE',
'USA_BATS': 'BATS',
'USA_XNYS': 'XNYS',
'USA_ARCX': 'ARCX',
'USA_XNMS': 'XNMS',
'USA_XNCM': 'XNCM',
'USA_OOTC': 'OOTC',
'USA_XNGS': 'XNGS',
'USA': None,
'Vietnam': 'XSTC',
'Vietnam_HSTC': 'HSTC'
}
currencies = [
'ALL',
'AFN',
'ARS',
'AWG',
'AUD',
'AZN',
'BSD',
'BBD',
'BYN',
'BZD',
'BMD',
'BOB',
'BAM',
'BWP',
'BGN',
'BRL',
'BND',
'KHR',
'CAD',
'KYD',
'CLP',
'CNY',
'COP',
'CRC',
'HRK',
'CUP',
'CZK',
'DKK',
'DOP',
'XCD',
'EGP',
'SVC',
'EUR',
'FKP',
'FJD',
'GHS',
'GIP',
'GTQ',
'GGP',
'GYD',
'HNL',
'HKD',
'HUF',
'ISK',
'INR',
'IDR',
'IRR',
'IMP',
'ILS',
'JMD',
'JPY',
'JEP',
'KZT',
'KPW',
'KRW',
'KGS',
'LAK',
'LBP',
'LRD',
'MKD',
'MYR',
'MUR',
'MXN',
'MNT',
'MZN',
'NAD',
'NPR',
'ANG',
'NZD',
'NIO',
'NGN',
'NOK',
'OMR',
'PKR',
'PAB',
'PYG',
'PEN',
'PHP',
'PLN',
'QAR',
'RON',
'RUB',
'SHP',
'SAR',
'RSD',
'SCR',
'SGD',
'SBD',
'SOS',
'ZAR',
'LKR',
'SEK',
'CHF',
'SRD',
'SYP',
'TWD',
'THB',
'TTD',
'TRY',
'TVD',
'UAH',
'GBP',
'USD',
'UYU',
'UZS',
'VEF',
'VND',
'YER',
'ZWD'
]
|
"""About and help services.
(help browser anyone?)
"""
import importlib
import importlib_metadata
from gi.repository import Gtk
from gaphor.abc import ActionProvider, Service
from gaphor.core import action
class HelpService(Service, ActionProvider):
def __init__(self, session):
self.session = session
def shutdown(self):
pass
@property
def window(self):
return self.session.get_service("main_window").window
@action(name="app.about")
def about(self):
builder = Gtk.Builder()
with importlib.resources.path(
"gaphor.services.helpservice", "about.ui"
) as glade_file:
builder.add_objects_from_file(str(glade_file), ("about",))
about = builder.get_object("about")
about.set_version(importlib_metadata.version("gaphor"))
about.set_modal(True)
about.set_transient_for(self.window)
about.show()
@action(name="app.shortcuts")
def shortcuts(self):
builder = Gtk.Builder()
with importlib.resources.path(
"gaphor.services.helpservice", "shortcuts.ui"
) as glade_file:
builder.add_objects_from_file(str(glade_file), ("shortcuts-gaphor",))
shortcuts = builder.get_object("shortcuts-gaphor")
shortcuts.set_modal(True)
shortcuts.set_transient_for(self.window)
shortcuts.show()
return shortcuts
|
import requests
import zipfile
import io
import json
import os
import traceback
GITLAB_TOKEN=""
token_header = {'PRIVATE-TOKEN': GITLAB_TOKEN}
GROUP = 'zoe-apps'
ZAPP_STORE_PATH = '/mnt/cephfs/zoe-apps/'
def get_projects(group):
prj_list = []
r = requests.get("http://gitlab.eurecom.fr/api/v4/groups/{}/projects".format(group), headers=token_header)
for project in r.json():
prj_list.append((project['name'], project['id']))
return prj_list
def get_images_from_zapp(zapp):
images = []
for s in zapp['services']:
images.append(s['image'])
return images
def pull_images(images):
for image in images:
print(image)
os.system("docker -H 192.168.47.5:2380 pull {}".format(image))
def main(project_name, project):
r = requests.get("http://gitlab.eurecom.fr/api/v4/projects/{}/pipelines?status=success".format(project), headers=token_header)
pipelines = r.json()
if len(pipelines) > 0:
latest_pipeline_run = pipelines[0]['id']
else:
return
r = requests.get("http://gitlab.eurecom.fr/api/v4/projects/{}/pipelines/{}/jobs?scope=success".format(project, latest_pipeline_run), headers=token_header)
jobs = r.json()
if len(jobs) == 0:
return
for good_job in jobs:
r = requests.get("http://gitlab.eurecom.fr/api/v4/projects/{}/jobs/{}/artifacts".format(project, good_job['id']), headers=token_header)
artifact = r.content
f_obj = io.BytesIO(artifact)
zp = zipfile.ZipFile(f_obj)
for member in zp.namelist():
if member[-5:] != ".json" or member == "manifest.json":
continue
zapp_bytes = zp.read(member)
zapp = json.loads(zapp_bytes.decode('utf-8'))
images = get_images_from_zapp(zapp)
pull_images(images)
print(project_name + "/" + member)
if os.path.exists(os.path.join(ZAPP_STORE_PATH, project_name)):
open(os.path.join(ZAPP_STORE_PATH, project_name, member), 'wb').write(zapp_bytes)
if __name__ == "__main__":
for p in get_projects(GROUP):
try:
main(*p)
except Exception as e:
traceback.print_exc()
continue
|
import os
import cv2
import time
import argparse
import numpy as np
from mtcnn import detect_face
import tensorflow as tf
from PIL import Image, ImageDraw
## MTCNN face localizer
def mtcnn_localize_faces(image, pnet, rnet, onet, minsize=20, threshold=[0.7, 0.8, 0.85], factor=0.75):
"""
Localize faces & its landmarks in image using MTCNN
Params
:image
:minsize - min. face size
:threshold - a list/array with 3 values. The thresholds for pnet, rnet & onet, respectively
:factor - sclaing factor for image octave
Return
:bbs - list of bounding boxes
:lds - list of face landmarks
"""
image = image[:, :, 0:3]
bounding_boxes, landmarks = detect_face.detect_face(image, minsize, pnet, rnet, onet, threshold, factor)
nrof_faces = bounding_boxes.shape[0]
bbs = list()
lds = list()
if nrof_faces > 0:
det = bounding_boxes[:, 0:4]
bb = np.zeros((nrof_faces,4), dtype=np.int32)
lands = np.zeros((nrof_faces,10), dtype=np.int32)
landmarks = np.reshape(landmarks, (nrof_faces, 10))
for i in range(nrof_faces):
## Convert to int32
lands[i] = np.ravel(landmarks[i])
bb[i] = np.ravel(det[i])
# inner exception
if bb[i][0] <= 0 or bb[i][1] <= 0 or bb[i][2] >= len(image[0]) or bb[i][3] >= len(image):
print('face is inner of range!')
continue
else:
## get as top, right, bottom, left
bbs.append((bb[i][1], bb[i][2], bb[i][3], bb[i][0]))
lds.append(lands[i])
return bbs, lds
def load_images(images_path):
"""
Read images from directory
Params
:images_path - path to images
Return
:image_l - list of images as arrays
: images_name - list of images' file names
"""
# list of images, as arrays
images_l = []
# get images
images_name = os.listdir(images_path)
# read images
for i in images_name:
image = cv2.imread(os.path.join(images_path, i))
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# if image.endswith(".png"):
# images_l.append(image)
images_l.append(image)
return images_l, images_name
def main(args):
st = time.time()
#check if input directory exists
if not os.path.exists(args.input_directory):
print("Error! No input direcotory", args.input_directory)
return -1
# read images
images_l, images_paths = load_images(args.input_directory)
#create tensorflow session
# init. tensorflow session
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.75)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False))
with sess.as_default():
pnet, rnet, onet = detect_face.create_mtcnn(sess, './mtcnn')
#localize and blur faces, iterate over images
for image, image_path in zip(images_l, images_paths):
print("Processing", image_path + "...")
bbs, lds = mtcnn_localize_faces(image, pnet, rnet, onet, minsize=20, threshold=[0.7, 0.8, 0.85], factor=0.75)
# jumpt iteration if there's no face
if len(bbs) == 0:
print("Couldn't find faces!")
continue
#get faces
for bb, ld in zip(bbs, lds):
#get bounding box
#top, righ, bottom, left
top = bb[0]
right = bb[1]
bottom = bb[2]
left = bb[3]
# build landmarks' x, y pairs
points = []
for x, y in zip(ld[:5], ld[5:]):
points.append(x)
points.append(y)
#get face thumbnail
face_image = image[top:bottom, left:right]
#blur face thumbnail
if args.blur > 0:
face_image = cv2.GaussianBlur(face_image, (105, 105), args.blur)
#black
else:
face_image = np.zeros(face_image.shape)
#write blured face to image
image[top:bottom, left:right] = face_image
#PIL image
# pil_image = Image.fromarray(image)
# pil_image_face = Image.fromarray(face_image)
#eyes' landmarks: first two pairs
# get larger rectangle
# points[0] = points[0] * 0.9
# points[1] = points[1] * 0.9
# points[2] = points[2] * 1.1
# points[3] = points[3] * 1.1
# draw = ImageDraw.Draw(pil_image)
#cover eyes with rectangle
# draw.rectangle(points[:4], fill="black")
#create output directory if it doesn't exist
if not os.path.exists(args.output_directory):
os.makedirs(args.output_directory)
#save image
pil_image = Image.fromarray(image)
pil_image.save(os.path.join(args.output_directory, image_path))
print("Total running time:", time.time() - st, "sec.")
return 0
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-id', '--input_directory', type=str, nargs='?', default="./images")
parser.add_argument('-od', '--output_directory', type=str, nargs='?', default="./blurs")
parser.add_argument('-b', '--blur', type=int, nargs='?', default=46)
args = parser.parse_args()
main(args)
|
"""Encoder
Description:
This module encodes Planning Problem to Propositional Formulas in CNF
(Conjunctive Normal Form)
License:
Copyright 2021 Debby Nirwan
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .pddl_adapter import PlanningProblem
from enum import Enum
from itertools import combinations
class Operator(Enum):
AND = 0,
OR = 1,
IMPLIES = 2
class Clause(object):
def __init__(self, fluent=None):
if fluent:
self._clause = [fluent]
self._single = True
else:
self._clause = []
self._single = False
def __repr__(self):
return f"Clause object. {self._clause}"
def __len__(self):
return len(self._clause)
def __getitem__(self, item):
return self._clause[item]
def __contains__(self, item):
return True if item in self._clause else False
def __eq__(self, other):
if self._single == other.is_single and self._clause == other.clause:
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def add(self, fluent, operator: Operator):
if len(self._clause) == 0:
self._single = True
else:
self._single = False
self._clause.append(operator)
self._clause.append(fluent)
return self
@property
def clause(self):
return self._clause
@property
def is_single(self):
return self._single
@property
def empty(self):
return self._clause == []
class PlanningProblemEncoder(object):
def __init__(self, dom_file: str, problem_file: str, length=1):
self._problem = PlanningProblem(dom_file, problem_file)
self._length = length
self._propositional_formulas = self._encode()
def _encode(self):
actions = self._problem.actions
fluents = self._problem.fluents
# 1. encode initial state
init_state = list(self._problem.initial_state)
init_state_clauses = []
for fluent in list(fluents):
if fluent not in init_state:
fluent = ('not',) + fluent
fluent = fluent + ('0',)
init_state_clauses.append(Clause(fluent))
# 2. encode goal state
goal_state = list(self._problem.goal_state)
goal_state_clauses = []
for goal in goal_state:
goal_state_clauses.append(Clause(goal + (str(self._length),)))
enc_actions_clauses = []
explanatory_frame_axioms = []
complete_exclusion_axiom = []
for step in range(self._length):
# 3. encode actions
for act in actions:
if act.effect_pos.issubset(act.precondition_pos):
continue
action_tuple = ('not', act, str(step))
# preconditions
for p in act.precondition_pos:
if 'adjacent' in p:
continue
action_clause = Clause(action_tuple)
p = p + (str(step),)
action_clause.add(p, Operator.OR)
enc_actions_clauses.append(action_clause)
# positive effects
for e in act.effect_pos:
e = e + (str(step + 1),)
action_clause = Clause(action_tuple)
action_clause.add(e, Operator.OR)
enc_actions_clauses.append(action_clause)
# negative effects
for e in act.effect_neg:
e = ('not',) + e + (str(step + 1),)
action_clause = Clause(action_tuple)
action_clause.add(e, Operator.OR)
enc_actions_clauses.append(action_clause)
# 4. explanatory frame axioms
for fluent in fluents:
act_with_pos_effect = []
act_with_neg_effect = []
for act in actions:
if act.effect_pos.issubset(act.precondition_pos):
continue
if fluent in act.effect_pos:
act_with_pos_effect.append(act)
elif fluent in act.effect_neg:
act_with_neg_effect.append(act)
if act_with_pos_effect:
a_pos = fluent + (str(step),)
b_pos = ('not',) + fluent + (str(step + 1),)
clause_pos = Clause(a_pos)
clause_pos.add(b_pos, Operator.OR)
for act in act_with_pos_effect:
c_pos = (act, str(step))
clause_pos.add(c_pos, Operator.OR)
explanatory_frame_axioms.append(clause_pos)
if act_with_neg_effect:
a_neg = ('not',) + fluent + (str(step),)
b_neg = fluent + (str(step + 1),)
clause_neg = Clause(a_neg)
clause_neg.add(b_neg, Operator.OR)
for act in act_with_neg_effect:
c_neg = (act, str(step))
clause_neg.add(c_neg, Operator.OR)
explanatory_frame_axioms.append(clause_neg)
# 5. complete exclusion axiom
for action_pair in combinations(actions, 2):
if action_pair[0].effect_pos.issubset(
action_pair[0].precondition_pos):
continue
if action_pair[1].effect_pos.issubset(
action_pair[1].precondition_pos):
continue
action0_tuple = ('not', action_pair[0], str(step))
action1_tuple = ('not', action_pair[1], str(step))
action_pair_clause = Clause(action0_tuple)
action_pair_clause.add(action1_tuple, Operator.OR)
complete_exclusion_axiom.append(action_pair_clause)
proposition_formulas = init_state_clauses + goal_state_clauses + \
enc_actions_clauses + explanatory_frame_axioms + \
complete_exclusion_axiom
return proposition_formulas
@property
def propositional_formulas(self):
return self._propositional_formulas
|
from src.main.config import config
import requests
import json
def validate_email(email):
try:
api_response = requests.post(
config.EMAIL_VERIFICATION_URL.format(config.NEVERBOUNCE_API_KEY, email)
).content
api_response = json.loads(api_response)
if api_response['result'] == 'invalid':
raise Exception('Invalid email')
except Exception:
raise Exception('Error(s) happened when validating email')
|
#!/usr/bin/env python
"""
This is the unittest for gridcellarea module.
python -m unittest -v tests/test_gridcellarea.py
python -m pytest --cov=pyjams --cov-report term-missing -v tests/test_gridcellarea.py
"""
import unittest
def _flatten(itr):
import numpy as np
fitr = np.array(itr).flatten()
if len(fitr) == 0:
return list(fitr)
else:
if isinstance(fitr[0], str):
return [ i for i in fitr ]
else:
return [ i if np.isfinite(i) else np.finfo(float).max
for i in fitr ]
class TestGridcellarea(unittest.TestCase):
"""
Tests for gridcellarea.py
"""
def test_gridcellarea(self):
import numpy as np
from pyjams import gridcellarea
lat = [0., 2.5, 5.0]
lon = [0., 3.75, 7.5]
rearth = 6371009.
fsoll = [[1.15906555e+11, 1.15906555e+11, 1.15906555e+11],
[1.15796237e+11, 1.15796237e+11, 1.15796237e+11],
[1.15465495e+11, 1.15465495e+11, 1.15465495e+11]]
rearth1 = 6371000.
fsoll1 = [[1.15906227e+11, 1.15906227e+11, 1.15906227e+11],
[1.15795910e+11, 1.15795910e+11, 1.15795910e+11],
[1.15465169e+11, 1.15465169e+11, 1.15465169e+11]]
# descending latitudes
dlat = [0., -2.5, -5.0]
# meridian within longitudes
lon360 = [360., 3.75, 7.5]
# dateline within longitudes
lon180 = [180., -180.+3.75, -180.+7.5]
# list
fout = gridcellarea(lat, lon)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# tuple, list
fout = gridcellarea(tuple(lat), lon)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# 2 tuple
fout = gridcellarea(tuple(lat), tuple(lon))
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# array, list
fout = gridcellarea(np.array(lat), lon)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# 2 array
fout = gridcellarea(np.array(lat), np.array(lon))
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# rearth
fout = gridcellarea(lat, lon, rearth=rearth)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# rearth classic
fout = gridcellarea(lat, lon, rearth=rearth1)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll1))
# globe
fout = gridcellarea(lat, lon, globe=True)
fsoll2 = [[3.79774834e+12, 3.79774834e+12, 3.79774834e+12],
[1.15796240e+11, 1.15796240e+11, 1.15796240e+11],
[3.61823239e+12, 3.61823239e+12, 3.61823239e+12]]
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -4)), _flatten(fsoll2))
# descending lats
fout = gridcellarea(dlat, lon, globe=True)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -4)), _flatten(fsoll2))
# meridian in lon
fout = gridcellarea(lat, lon360)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# date line in lon
fout = gridcellarea(lat, lon180)
assert isinstance(fout, np.ndarray)
self.assertEqual(_flatten(np.around(fout, -3)), _flatten(fsoll))
# errors
# lat > 90
lat1 = [0., 2.5, 95.0]
self.assertRaises(AssertionError, gridcellarea, lat1, lon)
if __name__ == "__main__":
unittest.main()
|
from .agent import A2CAgent
|
# This file is to get a rough estimation of how much you need to pay or how many months you need to pay for a loan
import pandas as pd
import numpy as np
from IPython.display import display
def group(number):
"""show money in laks and crores (indian way of presenting money)"""
s = '%d' % number
groups = []
groups.append(s[-3:])
s = s[:-3]
while s and s[-1].isdigit():
groups.append(s[-2:])
s = s[:-2]
return s + ','.join(reversed(groups))
class loan:
def __init__(self, R=8.1, principal=30, years=5):
"""R is yearly interest
principal is principal amount in lakhs
years = number of years
"""
self.R = R * 0.01
self.r = R * 0.01 * (1 / 12)
self.principal = principal * 100000
self.years = years
self.num_months = self.years * 12
self.months = {"Jan": 31, "Feb": 28, "Mar": 31, "Apr": 30, "May": 31, "June": 30, "Jul": 31, "Aug": 31,
"Sep": 30, "Oct": 31, "Nov": 30, "Dec": 31}
def find_monthly_emi_flat(self, print_=True):
""" find how much emi need to be paid given some principal, interest, and number of months when the interest scheme is flat"""
total = self.principal * (1 + self.R * (self.num_months / 12))
if print_:
print("------------- flat interest -------------------")
print("total amount you are paying over full period:", total)
print("monthly installment/emi : {}".format(total / self.num_months))
return total, total / self.num_months
def num_months_emi_diminishing(self, emi, principal=0, interest=0, print_=True):
"""find the number of months you need to pay for, if you are paying emi every month"""
"""emi is in rupees, principal is in lakhs, interest is yearly interest"""
"""n = np.log((E/r)/(E/r -P))/np.log(1+r) """
if not principal:
principal = self.principal
if not interest:
interest = self.r
num_months = np.log((emi / interest) / (emi / interest - principal)) / np.log(1 + interest)
if print_:
print("------------- diminishing interest -------------------")
print("you need to pay {} monthly, for {} months".format(emi, num_months))
return num_months
def find_monthly_emi_diminishing(self, num_months=0, principal=0, print_=True):
""" find how much emi need to be paid given some principal, interest, and number of months when the interest scheme is flat"""
"""P*r*(1 + 1/(np.power(1+r,60)-1))"""
if not num_months:
num_months = self.num_months
if not principal:
principal = self.principal
else:
principal *= 100000
monthly_emi = principal * self.r * (1 + 1 / (np.power(1 + self.r, num_months) - 1))
if print_:
print("------------- diminishing interest -------------------")
print(" you need to pay {} monthly, for {} months".format(monthly_emi, num_months))
print("total amount you will pay over full period is roughly {}".format(monthly_emi * num_months))
return monthly_emi
def confirm_diminishing(self, emi, print_=False):
""" function to confirm if the interest scheme is dimishing"""
principal = self.principal
i = 1
while principal > 0:
principal += ((self.r) * principal - emi)
if print_:
print(i, principal)
i += 1
if abs(principal / self.principal) < 0.001:
print("final net amount is {} after {} months".format(principal, i - 1))
return principal, i
## Usage
R = 10.5 #10.5 % monthly interest rate
principal = 30 # principal is 30 lakhs
years = 4.5 # loan term period is 4.5 years
loan1 = loan(R,principal,years) # initialize a loan instance
loan1.find_monthly_emi_flat()
loan1.num_months_emi_diminishing(35000)
loan1.find_monthly_emi_diminishing()
#-----------output-----------------------
# ------------- flat interest -------------------
# total amount you are paying over full period: 4417500.0
# monthly installment/emi : 81805.55555555556
# ------------- diminishing interest -------------------
# you need to pay 35000 monthly, for 159.1257820098328 months
# ------------- diminishing interest -------------------
# you need to pay 69948.58010333449 monthly, for 54.0 months
# total amount you will pay over full period is roughly 3777223.3255800623
def get_df():
# make a table to find how much emi to be paid for different principals over different tenure/periods
loan1 = loan(10.5,principal = 30, years =5)
# print(loan1.find_monthly_emi_diminishing())
years = [2,3,4,5]
amounts = [15,20,25]
yearss = [str(x)+'y' for x in years]
df = pd.DataFrame(columns=yearss)
total = pd.DataFrame(columns = yearss)
for amount in amounts:
arr=[]
arr1 = []
for year in years:
temp = loan1.find_monthly_emi_diminishing(num_months=year*12, principal=amount,print_ = False)
arr.append(group(round(int(temp),-2))) # rounding to closest hundred
arr1.append(group(round(int(temp*year*12),-2)))
df.loc[str(amount)+'Lks']=arr
total.loc[str(amount)+'Lks']=arr1
print("--------------------- emi ------------------")
display(df)
print("---------------------- total ---------------------")
display(total)
# get_df()
|
import numpy as np
import matplotlib.pyplot as plt
from tqdm import trange
import seaborn as sns
import random
# ========================== CFG =======================
class CFG:
HIT = 1
STOP = 0
actions = [STOP, HIT]
WIN = 1
DRAW = 0
LOSE = -1
# ======================== function ======================
def random_card():
card = np.random.randint(13) + 1
card = min(card, 10)
return card
def value_card(card):
if (card == 1):
return 11
else:
return card
def random_play(policy_player, policy_dealer, init_state = None, debug = False):
player_ace = 0
player_ace_1 = 0
dealer_ace = 0
dealer_ace_1 = 0
player_sum = 0
dealer_sum = 0
dealer_show = 0
his = []
if (init_state):
(player_ace, dealer_show, player_sum, action) = init_state
if (debug):
print(f'player init {player_sum} dealer show {dealer_show} action {action}')
if (dealer_show == 1):
dealer_ace += 1
dealer_sum += value_card(dealer_show)
card = random_card()
if (card == 1):
dealer_ace += 1
dealer_sum += value_card(card)
if (dealer_sum > 21):
dealer_sum -= 10
dealer_ace_1 += 1
his.append((player_ace > player_ace_1, player_sum, dealer_show, action))
if (action == CFG.HIT):
card = random_card()
if (debug):
print(f'player {player_sum} {card}')
if (card == 1):
player_ace += 1
player_sum += value_card(card)
if (player_sum > 21 and player_ace > player_ace_1):
player_sum -= 10
player_ace_1 += 1
else:
while(player_sum <12):
card = random_card()
if (card == 1):
player_ace += 1
player_sum += value_card(card)
if (player_sum > 21):
player_sum -= 10
player_ace_1 += 1
if (True):
card = random_card()
dealer_show = card
if (card == 1):
dealer_ace += 1
dealer_sum += value_card(card)
card = random_card()
if (card == 1):
dealer_ace += 1
dealer_sum += value_card(card)
if (dealer_sum > 21):
dealer_sum -= 10
dealer_ace_1 += 1
while(True):
if (player_sum > 21):
if (debug):
print(f'quát {player_sum}')
return his, -1
action = policy_player[int(player_ace > player_ace_1), player_sum, dealer_show]
his.append((player_ace > player_ace_1, player_sum, dealer_show, action))
if (action == CFG.STOP):
break
card = random_card()
if (debug):
print(f'player {player_sum} {card}')
if (card == 1):
player_ace += 1
player_sum += value_card(card)
if (player_sum > 21 and player_ace > player_ace_1):
player_sum -= 10
player_ace_1 += 1
while(True):
if (dealer_sum == 21):
if(debug):
print(f'player {player_sum} dealer {dealer_sum}')
if (player_sum == 21):
return his, 0
else:
return his, -1
if (dealer_sum > 21):
return his, 1
action = policy_dealer[dealer_sum]
if (action == CFG.STOP):
break
card = random_card()
if(debug):
print(f'dealer {dealer_sum} {card}')
if (card == 1):
dealer_ace += 1
dealer_sum += value_card(card)
if(dealer_sum > 21 and dealer_ace > dealer_ace_1):
dealer_sum -= 10
dealer_ace_1 += 1
if(debug):
print(f'player sum {player_sum} dealer sum {dealer_sum}')
if (player_sum < dealer_sum):
return his, -1
if (player_sum == dealer_sum):
return his, 0
if (player_sum > dealer_sum):
return his, 1
def MonteCarloPrediction(Num_iter, debug = False):
# ========================== init =======================
policy_dealer = np.zeros((22))
policy_dealer[:17] = CFG.HIT
policy_dealer[17:] = CFG.STOP
policy_player = np.zeros((2, 22, 11), dtype = int)
for i in range(2):
for j in range(22):
for k in range(11):
policy_player[i,j,k] = random.choice(CFG.actions)
value_action = np.zeros((2, 10, 10, 2))
cnt = np.ones((2, 10, 10, 2))
for iter in trange(Num_iter):
if (debug):
print(f'---------------- {iter} -------------------------')
check = set()
init_usable = random.choice(range(2))
init_show = random_card()
init_player_sum = random.choice(range(12,22))
init_action = random.choice(CFG.actions)
his, reward = random_play(policy_player, policy_dealer,
(init_usable, init_show, init_player_sum, init_action), debug)
if (debug):
print(his, reward)
for (usable, player_sum, dealer_show, action) in his:
if ((usable, player_sum, dealer_show, action) in check):
continue
check.add((usable, player_sum, dealer_show, action))
value_action[int(usable), player_sum - 12, dealer_show - 1, action] += reward
cnt[int(usable), player_sum - 12, dealer_show - 1, action] += 1
Q = np.zeros((2))
Q[0] = value_action[int(usable), player_sum - 12, dealer_show - 1, 0]/cnt[int(usable), player_sum - 12, dealer_show - 1, 0]
Q[1] = value_action[int(usable), player_sum - 12, dealer_show - 1, 1]/cnt[int(usable), player_sum - 12, dealer_show - 1, 1]
policy_player[int(usable), player_sum, dealer_show] = np.argmax(Q)
arr = value_action/cnt
return policy_player[0, 12:,1:], policy_player[1, 12:,1:], arr
# ======================== main ==========================
NoUsable500k, Usable500k, arr = MonteCarloPrediction(10000000)
value = np.zeros((2,10,10))
for i in range(2):
for j in range(10):
for k in range(10):
value[i,j,k] = np.max(arr[i,j,k,:])
ax = sns.heatmap(value[0,...], cmap="YlGnBu", xticklabels=range(1, 11)
,yticklabels=list(range(12, 22)))
plt.savefig('figure_5_5_value_NoUsable.png')
plt.close()
ax = sns.heatmap(value[1,...], cmap="YlGnBu", xticklabels=range(1, 11)
,yticklabels=list(range(12, 22)))
plt.savefig('figure_5_5_value_Usable.png')
plt.close()
ax = sns.heatmap(NoUsable500k, cmap="YlGnBu", xticklabels=range(1, 11)
,yticklabels=list(range(12, 22)))
plt.savefig('figure_5_5_policy_NoUsable.png')
plt.close()
ax = sns.heatmap(Usable500k, cmap="YlGnBu", xticklabels=range(1, 11)
,yticklabels=list(range(12, 22)))
plt.savefig('figure_5_5_policy_Usable.png')
plt.close()
|
# Copyright (c) 2021 ICHIRO ITS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# flake8: noqa
import yakusha.data_types
# flake8: noqa
from yakusha.json_to_msg import dict_to_msg, json_to_msg
# flake8: noqa
from yakusha.msg_to_json import msg_to_dict, msg_to_json
|
# Import the necessary modules
from sklearn.naive_bayes import MultinomialNB
from sklearn import metrics
# Instantiate a Multinomial Naive Bayes classifier: nb_classifier
nb_classifier = MultinomialNB()
# Fit the classifier to the training data
nb_classifier.fit(count_train, y_train)
# Create the predicted tags: pred
pred = nb_classifier.predict(count_test)
# Calculate the accuracy score: score
score = metrics.accuracy_score(y_test, pred)
print(score)
# Calculate the confusion matrix: cm
cm = metrics.confusion_matrix(y_test, pred, labels=['FAKE', 'REAL'])
print(cm)
|
import torch
from torch.nn.modules.pooling import MaxPool2d
from .activation import ReLU6, Hardswish, ELU, LeakyReLU, Sigmoid
from .batchnorm import BatchNorm2d, BatchNorm3d
from .normalization import LayerNorm, GroupNorm, InstanceNorm1d, \
InstanceNorm2d, InstanceNorm3d
from .conv import _ConvNd, Conv1d, Conv2d, Conv3d
from .conv import ConvTranspose1d, ConvTranspose2d, ConvTranspose3d
from .linear import Linear
from .embedding_ops import Embedding, EmbeddingBag
from .functional_modules import FloatFunctional, FXFloatFunctional, QFunctional
class Quantize(torch.nn.Module):
r"""Quantizes an incoming tensor
Args:
`scale`: scale of the output Quantized Tensor
`zero_point`: zero_point of output Quantized Tensor
`dtype`: data type of output Quantized Tensor
Attributes:
`scale`, `zero_point`, `dtype`
Examples::
>>> t = torch.tensor([[1., -1.], [1., -1.]])
>>> scale, zero_point, dtype = 1.0, 2, torch.qint8
>>> qm = Quantize(scale, zero_point, dtype)
>>> qt = qm(t)
>>> print(qt)
tensor([[ 1., -1.],
[ 1., -1.]], size=(2, 2), dtype=torch.qint8, scale=1.0, zero_point=2)
"""
scale: torch.Tensor
zero_point: torch.Tensor
def __init__(self, scale, zero_point, dtype):
super(Quantize, self).__init__()
self.register_buffer('scale', torch.tensor([scale]))
self.register_buffer('zero_point', torch.tensor([zero_point], dtype=torch.long))
self.dtype = dtype
def forward(self, X):
return torch.quantize_per_tensor(X, float(self.scale),
int(self.zero_point), self.dtype)
@staticmethod
def from_float(mod):
assert hasattr(mod, 'activation_post_process')
scale, zero_point = mod.activation_post_process.calculate_qparams()
return Quantize(scale.float().item(), zero_point.long().item(), mod.activation_post_process.dtype)
def extra_repr(self):
return 'scale={}, zero_point={}, dtype={}'.format(self.scale, self.zero_point, self.dtype)
class DeQuantize(torch.nn.Module):
r"""Dequantizes an incoming tensor
Examples::
>>> input = torch.tensor([[1., -1.], [1., -1.]])
>>> scale, zero_point, dtype = 1.0, 2, torch.qint8
>>> qm = Quantize(scale, zero_point, dtype)
>>> quantized_input = qm(input)
>>> dqm = DeQuantize()
>>> dequantized = dqm(quantized_input)
>>> print(dequantized)
tensor([[ 1., -1.],
[ 1., -1.]], dtype=torch.float32)
"""
def __init__(self):
super(DeQuantize, self).__init__()
def forward(self, Xq):
return Xq.dequantize()
@staticmethod
def from_float(mod):
return DeQuantize()
__all__ = [
'BatchNorm2d',
'BatchNorm3d',
'_ConvNd',
'Conv1d',
'Conv2d',
'Conv3d',
'ConvTranspose1d',
'ConvTranspose2d',
'ConvTranspose3d',
'DeQuantize',
'ELU',
'Embedding',
'EmbeddingBag',
'GroupNorm',
'Hardswish',
'InstanceNorm1d',
'InstanceNorm2d',
'InstanceNorm3d',
'LayerNorm',
'LeakyReLU',
'Linear',
'MaxPool2d',
'Quantize',
'ReLU6',
'Sigmoid',
# Wrapper modules
'FloatFunctional',
'FXFloatFunctional',
'QFunctional',
]
|
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the Instance."""
from tests.common.gcp_type.test_data import fake_instance
from tests.unittest_utils import ForsetiTestCase
from google.cloud.forseti.common.gcp_type import instance
class InstanceTest(ForsetiTestCase):
"""Test Instance class."""
def test_network_interface_creation(self):
"""Test that network_interface creation is correct."""
network_interfaces = (instance.Instance(
'name-0', **fake_instance.FAKE_INSTANCE_RESPONSE_1)
.create_network_interfaces())
self.assertEqual(len(network_interfaces), 1)
network_interface = network_interfaces[0]
self.assertEqual('compute#networkInterface', network_interface.kind)
self.assertEqual('nic0', network_interface.name)
self.assertEqual('https://www.googleapis.com/compute/v1/projects/'
'project-1/global/networks/network-1',
network_interface.network)
self.assertEqual('000.000.000.000', network_interface.network_ip)
self.assertEqual('https://www.googleapis.com/compute/v1/projects'
'/project-1/regions/datacenter'
'/subnetworks/subnetwork-1',
network_interface.subnetwork)
self.assertEqual([{u'kind': u'compute#accessConfig',
u'type': u'ONE_TO_ONE_NAT', u'name': u'External NAT',
u'natIP': u'000.000.000.001'}],
network_interface.access_configs)
def test_recognize_two_network_interfaces(self):
"""Test that it recognizes two network_interfaces."""
network_interfaces = (instance.Instance(
'name-1', **fake_instance.FAKE_INSTANCE_RESPONSE_2)
.create_network_interfaces())
self.assertEqual(len(network_interfaces), 2)
def test_legacy_networks(self):
""" Test legacy networks without a subnet works."""
network_interfaces = (instance.Instance(
'name-0', **fake_instance.FAKE_INSTANCE_RESPONSE_LEGACY)
.create_network_interfaces())
self.assertEqual(len(network_interfaces), 1)
network_interface = network_interfaces[0]
self.assertEqual('compute#networkInterface', network_interface.kind)
self.assertEqual('nic0', network_interface.name)
self.assertEqual('https://www.googleapis.com/compute/v1/projects/'
'project-1/global/networks/network-1',
network_interface.network)
self.assertEqual('000.000.000.000', network_interface.network_ip)
self.assertEqual([{u'kind': u'compute#accessConfig',
u'type': u'ONE_TO_ONE_NAT', u'name': u'External NAT',
u'natIP': u'000.000.000.001'}],
network_interface.access_configs)
if __name__ == '__main__':
unittest.main()
|
""" Script to fetch supplemental informations about all the samples that are linked with the geo series of interest"""
import numpy as n
import pandas as pd
import pickle
import os
import sys
import re
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
import time
try:
from urllib.error import HTTPError # for Python 3
except ImportError:
from urllib2 import HTTPError # for Python 2
print("\n## fetch detailed informations about all samples that are contained in the already fetched geo series ##\n")
## Set paths
# cdir = dir of this script
cdir = os.path.dirname(os.path.realpath(__file__))
# basedir = root dir of the repository
basedir = os.path.dirname(os.path.dirname(cdir))
#os.path.getmtime(path)
dir_data_in = basedir+"/data/interim/records_samples/"
dir_data_out = dir_data_in+"samples_suppl/"
# filenamebase for the large file containing all possibly relevant supplemental informations about the samples
fnameb_suppl = """samples_suppl"""
# filenamebase for the smaller file containing the informations about the sample title and the characteristics of the sample channels
fnameb_simple = 'samples_suppl_simple'
# if files were already writtn before, this variable is overwritten with the last geo series accession number from which samples have been read
last_gse_acc = ''
# number of already written files
fN = -1
if not os.path.exists(dir_data_out):
os.makedirs(dir_data_out)
else:
# check whether files have been produced before
pattern = re.compile(r"""^"""+fnameb_suppl+"""_(?P<N>\d*).pkl$""")
fN_list = []
for f in os.listdir(dir_data_out):
match = pattern.match(f)
if(match):
fN_list.append(int(match.group("N")))
if len(fN_list) > 0:
fN = max(fN_list)
last_file = pickle.load( open(dir_data_out+fnameb_suppl+'_'+str(fN)+'.pkl','rb') )
last_gse_acc = last_file['start_end_gse_acc'][-1]
def import_df(fname):
print("start load samples database")
df = pickle.load( open(dir_data_in+fname, 'rb') )
print("done")
return df
# import records dataframe
df_records = import_df('records.pkl')
if len(last_gse_acc) > 0:
start_ndx = df_records.index[df_records['Accession'] == last_gse_acc].to_list()[0]
if start_ndx != df_records.index[-1]:
df_records = df_records.iloc[(start_ndx+1):,:]
else:
sys.exit("\nAll suppl. GEO samples data has been fetched and their relevant data been stored\n")
# url base from which we query
urlbase = 'https://www.ncbi.nlm.nih.gov/geo/query/acc.cgi?'
# pattern to read the lines of the fetched data:
# tries to catch the key and the corresponding value of each line, both as strings
pattern = re.compile(r"""^[!\^]Sample[\_]*(?P<key>\D*?)(_ch)*(?P<ch>\d*)\s*=\s*(?P<value>.*)$""", re.IGNORECASE)
# the list of channel specific keys of samples for which we want to extract the values
ch_key_list = ['source_name', 'characteristics', 'treatment_protocol', 'growth_protocol', 'molecule', 'extract_protocol']
# the list of general keys of samples for which we want to extract the value
gn_key_list = ['title', 'status', 'submission_date', 'type', 'channel_count', 'description', 'data_processing']
# keys for which we try to flatten their list entries by joining them as strings
keys_flt_vals = ['treatment_protocol', 'extract_protocol', 'growth_protocol', 'data_processing']
# global list containing all sample accession numbers of all samples that will be fetched
glob_samples_acc_key_list = []
# global list containing all sample accession numbers of all samples that will be fetched. This one will be reset after each batchsize number of samples have been fetched
glob_samples_acc_key_list_reset = []
# global list containing as elements all possibly relevant entries of the samples as dicts of all samples that will be fetched
glob_samples_dict_list = []
# global list containing as elements the name of the samples and the channel characteristics as dicts of all samples that will be fetched
glob_samples_dict_list_simple = []
# count the number of fetched samples, reset after each 10000 fetched samples
cnt_samples = 0
# global count the number of fetched samples, not reset
cnt_samples_glob = 0
# counter for files to be pickled
cnt_file = fN + 1
# the number of samples whose informations are contained in each part of the to be written samples_supp dict
batchsize = 10000
# max number of samples to be fetched, if <= 0, fetch all available samples
max_samples = -1
# flag to break the for loops when max_samples samples have been fetched
flag_break = False
# flag for determining if batchsize of samples have been reached, use -1 to start the loop, 0 if within the loop the batchsize has not been reached and 1 else
reached_batchsize = -1
class Sample:
""" class to generate sample output from a list of strings that belong to the sample entry in the corresponding geo series and a regular expression pattern used for matching key, ch(annel number) and, (key) val(ue)
- output is
- self.out: dict of the gn_key_list keys and corresponding values together with the channel entries as further nested dicts
- self.out_simple: dict with title as key and corresponding value together with numbers as additional keys identifying the channels whose values are the channel characteristics
"""
def __init__(self, sample_lines, pattern):
""" intit class """
self.__lines = sample_lines[1:]
self.__loc_sample_nest_key_list = gn_key_list[:]
self.__loc_sample_nest_val_list = [[] for i in range(len(gn_key_list))]
self.key = ''
self.key_old = ''
self.ch_old = -1
self.ch = -1
self.val = -1
self.__parse_lines(pattern)
self.__out()
def __parse_lines(self, pattern):
""" parse the lines in and match key, ch(annel number) and, (key) val(ue) with the help of the regular expression pattern """
for line in self.__lines:
match = pattern.match(line)
if(bool(match)):
self.key = match.group('key')
self.ch = match.group('ch')
self.val = match.group('value')
self.__clean()
if len(self.ch) > 0:
self.ch = int(self.ch.strip())
else:
self.ch = 0
if self.key in gn_key_list:
if self.key != 'channel_count':
self.__loc_sample_nest_val_list[gn_key_list.index(self.key)].append(self.val)
else:
self.val = self.val.strip()
try:
self.val = int(self.val)
except:
pass
self.__loc_sample_nest_val_list[gn_key_list.index(self.key)] = self.val
else:
if self.ch > 0:
if self.key in ch_key_list:
if self.ch not in self.__loc_sample_nest_key_list:
self.__loc_sample_nest_key_list.append(self.ch)
self.__loc_sample_nest_val_list.append([[] for i in range(len(ch_key_list))])
self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch)][ch_key_list.index(self.key)].append(self.val)
else:
self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch)][ch_key_list.index(self.key)].append(self.val)
self.key_old = self.key
self.ch_old = self.ch
self.__clean()
def __clean(self):
""" clean the values list entries by joining string list entries to a single string list entry for certain keys """
if( self.key_old != self.key ):
if self.key_old in gn_key_list:
if isinstance(self.__loc_sample_nest_val_list[gn_key_list.index(self.key_old)], list):
l_tmp_list = len(self.__loc_sample_nest_val_list[gn_key_list.index(self.key_old)])
if l_tmp_list > 1:
if self.key_old in keys_flt_vals:
self.__loc_sample_nest_val_list[gn_key_list.index(self.key_old)] = [" ".join(self.__loc_sample_nest_val_list[gn_key_list.index(self.key_old)])]
else:
if self.ch_old > 0:
if self.key_old in ch_key_list:
if self.ch_old in self.__loc_sample_nest_key_list:
if isinstance(self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch_old)][ch_key_list.index(self.key_old)], list):
l_tmp_list = len(self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch_old)][ch_key_list.index(self.key_old)])
if l_tmp_list > 1:
if self.key_old in keys_flt_vals:
self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch_old)][ch_key_list.index(self.key_old)] = [" ".join(self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch_old)][ch_key_list.index(self.key_old)])]
def __out(self):
""" method to store the relevant sample informations for writing """
ch_count_list = list(set(self.__loc_sample_nest_key_list).difference(set(gn_key_list)))
self.__loc_sample_nest_key_list_simple = ['title']
self.__loc_sample_nest_val_list_simple = [self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index('title')]]
if len(ch_count_list) > 0:
for self.ch in ch_count_list:
self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch)] = dict(zip(ch_key_list, self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch)]))
self.__loc_sample_nest_key_list_simple.append(self.ch)
self.__loc_sample_nest_val_list_simple.append(self.__loc_sample_nest_val_list[self.__loc_sample_nest_key_list.index(self.ch)]['characteristics'])
self.out = dict(zip(self.__loc_sample_nest_key_list, self.__loc_sample_nest_val_list))
self.out_simple = dict(zip(self.__loc_sample_nest_key_list_simple, self.__loc_sample_nest_val_list_simple))
for acc_series in df_records['Accession']:
print(acc_series)
# urllib request string: acc_series is the geo series acc number
# tries to fetch all informations in the SOFT(form=text) format
request = 'acc='+acc_series+'&targ=all&form=text&view=brief'
if(reached_batchsize in [-1,1]):
start_acc_series = acc_series
reached_batchsize = 0
code=404
while(code == 404 or code == 501):
try:
urllib_query = urlopen(urlbase, request.encode())
except HTTPError as e:
code = e.code
print("HTTPError: "+str(code))
time.sleep(1)
except URLError as e:
code = e.code
print("URLError: "+str(code))
time.sleep(1)
else:
code = urllib_query.getcode()
gse_str_entries_raw = urllib_query.read()
urllib_query.close()
gse_str_entries_raw = gse_str_entries_raw.decode('utf-8', 'replace' )
gse_str_entries = gse_str_entries_raw.splitlines()
ndxl = [i for i, x in enumerate(gse_str_entries) if x.startswith('^SAMPLE')]
ndxl.append(len(gse_str_entries))
for i in range(len(ndxl[0:-1])):
sample_lines = gse_str_entries[ndxl[i]:ndxl[i+1]]
match = pattern.match(sample_lines[0])
sample_acc_id = match.group('value')
if sample_acc_id not in glob_samples_acc_key_list:
glob_samples_acc_key_list.append(sample_acc_id)
glob_samples_acc_key_list_reset.append(sample_acc_id)
cnt_samples += 1
cnt_samples_glob += 1
print((cnt_samples_glob,sample_acc_id))
sample = Sample(sample_lines, pattern)
glob_samples_dict_list.append(sample.out)
glob_samples_dict_list_simple.append(sample.out_simple)
# break from inner loop when max_samples have been fetched
if max_samples > 0:
if cnt_samples_glob == max_samples:
flag_break = True
break
# break from loop over sample text paragraphs, if flag_break is true
if flag_break:
break
if cnt_samples > batchsize:
# full filename for the part of the sample_supp dictionary to be stored to a file
glob_samples_acc_key_list_reset.append('start_end_gse_acc')
glob_samples_dict_list.append([start_acc_series, acc_series])
glob_samples_dict_list_simple.append([start_acc_series, acc_series])
fname_suppl = fnameb_suppl+'_'+str(cnt_file)+'.pkl'
# write to pickle file part of the detailed info sample dict
f = open(dir_data_out+fname_suppl,"wb")
pickle.dump(dict(zip(glob_samples_acc_key_list_reset, glob_samples_dict_list)),f)
f.close()
# write to pickle file part of the simple info sample dict
fname_simple = fnameb_simple+'_'+str(cnt_file)+'.pkl'
# write to pickle file
f = open(dir_data_out+fname_simple,"wb")
pickle.dump(dict(zip(glob_samples_acc_key_list_reset, glob_samples_dict_list_simple)),f)
f.close()
# reset the global lists of acc keys and possibly relevant entries for all fetched samples so far
glob_samples_acc_key_list_reset = []
glob_samples_dict_list = []
glob_samples_dict_list_simple = []
# reset counter for samples
cnt_samples = 0
# increase counter that indicates the part of the samples_supp dict to be written to a file
cnt_file += 1
reached_batchsize = 1
# break from loop over geo series, if flag_break is true
if flag_break:
break
if 'start_end_gse_acc' not in glob_samples_acc_key_list_reset:
glob_samples_acc_key_list_reset.append('start_end_gse_acc')
glob_samples_dict_list.append([start_acc_series, acc_series])
glob_samples_dict_list_simple.append([start_acc_series, acc_series])
# write last part of the sample_supp dictionary to a file
fname_suppl = fnameb_suppl+'_'+str(cnt_file)+'.pkl'
# write to pickle file
f = open(dir_data_out+fname_suppl,"wb")
pickle.dump(dict(zip(glob_samples_acc_key_list_reset, glob_samples_dict_list)),f)
f.close()
# write the sample_supp dictionary to a file
fname_simple = fnameb_simple+'_'+str(cnt_file)+'.pkl'
# write to pickle file
f = open(dir_data_out+fname_simple,"wb")
pickle.dump(dict(zip(glob_samples_acc_key_list_reset, glob_samples_dict_list_simple)),f)
f.close()
|
'''Tests for bdpy.preprocessor'''
from unittest import TestCase, TestLoader, TextTestRunner
import numpy as np
from scipy.signal import detrend
from bdpy import preproc
class TestPreprocessor(TestCase):
'''Tests of 'preprocessor' module'''
@classmethod
def test_average_sample(cls):
'''Test for average_sample'''
x = np.random.rand(10, 100)
group = np.array([1, 1, 1, 1, 1, 2, 2, 2, 2, 2])
exp_output_x = np.vstack((np.average(x[0:5, :], axis=0),
np.average(x[5:10, :], axis=0)))
exp_output_ind = np.array([0, 5])
test_output_x, test_output_ind = preproc.average_sample(x, group,
verbose=True)
np.testing.assert_array_equal(test_output_x, exp_output_x)
np.testing.assert_array_equal(test_output_ind, exp_output_ind)
@classmethod
def test_detrend_sample_default(cls):
'''Test for detrend_sample (default)'''
x = np.random.rand(20, 10)
group = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
exp_output = np.vstack((detrend(x[0:10, :], axis=0, type='linear')
+ np.mean(x[0:10, :], axis=0),
detrend(x[10:20, :], axis=0, type='linear')
+ np.mean(x[10:20, :], axis=0)))
test_output = preproc.detrend_sample(x, group, verbose=True)
np.testing.assert_array_equal(test_output, exp_output)
@classmethod
def test_detrend_sample_nokeepmean(cls):
'''Test for detrend_sample (keep_mean=False)'''
x = np.random.rand(20, 10)
group = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
exp_output = np.vstack((detrend(x[0:10, :], axis=0, type='linear'),
detrend(x[10:20, :], axis=0, type='linear')))
test_output = preproc.detrend_sample(x, group, keep_mean=False,
verbose=True)
np.testing.assert_array_equal(test_output, exp_output)
@classmethod
def test_normalize_sample(cls):
'''Test for normalize_sample (default)'''
x = np.random.rand(20, 10)
group = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
mean_a = np.mean(x[0:10, :], axis=0)
mean_b = np.mean(x[10:20, :], axis=0)
exp_output = np.vstack((100 * (x[0:10, :] - mean_a) / mean_a,
100 * (x[10:20, :] - mean_b) / mean_b))
test_output = preproc.normalize_sample(x, group, verbose=True)
np.testing.assert_array_equal(test_output, exp_output)
@classmethod
def test_shift_sample_singlegroup(cls):
'''Test for shift_sample (single group, shift_size=1)'''
x = np.array([[1, 2, 3],
[11, 12, 13],
[21, 22, 23],
[31, 32, 33],
[41, 42, 43]])
grp = np.array([1, 1, 1, 1, 1])
exp_output_data = np.array([[11, 12, 13],
[21, 22, 23],
[31, 32, 33],
[41, 42, 43]])
exp_output_ind = [0, 1, 2, 3]
# Default shift_size = 1
test_output_data, test_output_ind = preproc.shift_sample(x, grp,
verbose=True)
np.testing.assert_array_equal(test_output_data, exp_output_data)
np.testing.assert_array_equal(test_output_ind, exp_output_ind)
@classmethod
def test_shift_sample_twogroup(cls):
'''Test for shift_sample (two groups, shift_size=1)'''
x = np.array([[1, 2, 3],
[11, 12, 13],
[21, 22, 23],
[31, 32, 33],
[41, 42, 43],
[51, 52, 53]])
grp = np.array([1, 1, 1, 2, 2, 2])
exp_output_data = np.array([[11, 12, 13],
[21, 22, 23],
[41, 42, 43],
[51, 52, 53]])
exp_output_ind = [0, 1, 3, 4]
# Default shift_size=1
test_output_data, test_output_ind = preproc.shift_sample(x, grp,
verbose=True)
np.testing.assert_array_equal(test_output_data, exp_output_data)
np.testing.assert_array_equal(test_output_ind, exp_output_ind)
@classmethod
def test_select_top_default(cls):
'''Test for select_top (default, axis=0)'''
test_data = np.array([[1, 2, 3, 4, 5],
[11, 12, 13, 14, 15],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35],
[41, 42, 43, 44, 45]])
test_value = np.array([15, 3, 6, 20, 0])
test_num = 3
exp_output_data = np.array([[1, 2, 3, 4, 5],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35]])
exp_output_index = np.array([0, 2, 3])
test_output_data, test_output_index = preproc.select_top(test_data,
test_value,
test_num)
np.testing.assert_array_equal(test_output_data, exp_output_data)
np.testing.assert_array_equal(test_output_index, exp_output_index)
@classmethod
def test_select_top_axisone(cls):
'''Test for select_top (axis=1)'''
test_data = np.array([[1, 2, 3, 4, 5],
[11, 12, 13, 14, 15],
[21, 22, 23, 24, 25],
[31, 32, 33, 34, 35],
[41, 42, 43, 44, 45]])
test_value = np.array([15, 3, 6, 20, 0])
test_num = 3
exp_output_data = np.array([[1, 3, 4],
[11, 13, 14],
[21, 23, 24],
[31, 33, 34],
[41, 43, 44]])
exp_output_index = np.array([0, 2, 3])
test_output_data, test_output_index = preproc.select_top(test_data,
test_value,
test_num,
axis=1)
np.testing.assert_array_equal(test_output_data, exp_output_data)
np.testing.assert_array_equal(test_output_index, exp_output_index)
if __name__ == '__main__':
test_suite = TestLoader().loadTestsFromTestCase(TestPreprocessor)
TextTestRunner(verbosity=2).run(test_suite)
|
"""
This file offers the methods to automatically retrieve the graph Streptomyces flavidovirens.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 22:51:05.041684
The undirected graph Streptomyces flavidovirens has 6208 nodes and 745893
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.03871 and has 31 connected components, where the component
with most nodes has 6140 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 201, the mean node degree is 240.30,
and the node degree mode is 2. The top 5 most central nodes are 1123319.AUBE01000020_gene3023
(degree 2822), 1123319.AUBE01000003_gene803 (degree 1858), 1123319.AUBE01000022_gene2882
(degree 1842), 1123319.AUBE01000016_gene5937 (degree 1794) and 1123319.AUBE01000016_gene5980
(degree 1776).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import StreptomycesFlavidovirens
# Then load the graph
graph = StreptomycesFlavidovirens()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def StreptomycesFlavidovirens(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Streptomyces flavidovirens graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Streptomyces flavidovirens graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 22:51:05.041684
The undirected graph Streptomyces flavidovirens has 6208 nodes and 745893
weighted edges, of which none are self-loops. The graph is dense as it
has a density of 0.03871 and has 31 connected components, where the component
with most nodes has 6140 nodes and the component with the least nodes has
2 nodes. The graph median node degree is 201, the mean node degree is 240.30,
and the node degree mode is 2. The top 5 most central nodes are 1123319.AUBE01000020_gene3023
(degree 2822), 1123319.AUBE01000003_gene803 (degree 1858), 1123319.AUBE01000022_gene2882
(degree 1842), 1123319.AUBE01000016_gene5937 (degree 1794) and 1123319.AUBE01000016_gene5980
(degree 1776).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import StreptomycesFlavidovirens
# Then load the graph
graph = StreptomycesFlavidovirens()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="StreptomycesFlavidovirens",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
# -*- coding: utf-8 -*-
# --------------------------
# Copyright © 2014 - Qentinel Group.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ---------------------------
import os
def is_root():
try:
# Windows doesn't have getuid. We just assume that user is not root. We
# most likely won't need proper Windows support here anyway.
uid = os.getuid() # pylint: disable=no-member
except AttributeError:
return False
# User id 0 is reserved for superuser aka root
if uid == 0:
return True
return False
def is_docker():
path = '/proc/self/cgroup'
return (
os.path.exists('/.dockerenv')
or os.path.isfile(path) and any('docker' in line for line in open(path)) # noqa: W503
)
|
# Generated by Django 2.2.10 on 2020-02-24 11:38
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('finance', '0002_auto_20200224_1125'),
]
operations = [
migrations.AlterUniqueTogether(
name='category',
unique_together={('name', 'user')},
),
]
|
"""empty message
Revision ID: 156b555e16b7
Revises: fc1cedce5988
Create Date: 2020-05-04 10:39:56.803842
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '156b555e16b7'
down_revision = 'fc1cedce5988'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('posts', 'author',
existing_type=sa.VARCHAR(length=128),
nullable=False)
op.alter_column('posts', 'description',
existing_type=sa.VARCHAR(length=256),
nullable=False)
op.alter_column('posts', 'title',
existing_type=sa.VARCHAR(length=128),
nullable=False)
op.drop_index('ix_posts_timestamp', table_name='posts')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_posts_timestamp', 'posts', ['timestamp'], unique=False)
op.alter_column('posts', 'title',
existing_type=sa.VARCHAR(length=128),
nullable=True)
op.alter_column('posts', 'description',
existing_type=sa.VARCHAR(length=256),
nullable=True)
op.alter_column('posts', 'author',
existing_type=sa.VARCHAR(length=128),
nullable=True)
# ### end Alembic commands ###
|
import sys
def error():
quit(f'Error on Line {line_num}:\n{line}')
__author__ = 'Aarav Dave'
if len(sys.argv) > 1:
__file__ = sys.argv[1]
else:
__file__ = 'code.qps'
vars = {}
nest = []
with open(__file__) as file:
for line_num, line in enumerate(file.readlines()):
line = line.rstrip()
if (not line) or line.startswith('//'):
continue
line = line.lstrip()
current = ['']
in_string = 0
for char in line:
if char == '\'':
in_string = 1 - in_string
if char in '(). ' and not in_string:
current.append('')
continue
if char == ';':
break
current[-1] += char
while '' in current:
current.remove('')
main, *rest = current
if main == 'log':
if rest:
if len(rest) > 1:
if rest[0] in vars:
rest[0] = vars[rest[0]]
print(rest[0].strip('\''))
else:
error()
else:
print()
if main == 'var':
name, _, *rest = rest
else:
print(current)
|
from stanza.pipeline.core import Pipeline
from stanza.models.common.doc import Document
from stanza.utils.resources import download
from stanza._version import __version__, __resources_version__
import logging.config
logging.config.dictConfig(
{
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": "%(asctime)s %(levelname)s: %(message)s",
'datefmt': '%Y-%m-%d %H:%M:%S'
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "standard",
}
},
"loggers": {
"": {"handlers": ["console"]}
},
}
)
|
# NOTE: A place for helper utilities and decorators.
from wtoolzexceptions import exceptions
import flask
import marshmallow
def parse(schema, location):
if location == "args":
p = flask.request.args
elif location == "json":
p = flask.request.json
elif location == "view_args":
p = flask.request.view_args
else:
raise ValueError("location not args, json, or view_args.")
try:
return schema.load(p)
except marshmallow.ValidationError:
exceptions.ohoh(400)
|
#!/usr/bin/python
# Victor del Pino
import sys
import re
linea = 0
ignorar = ""
contador = 100
impreso=0
"""
f1 = open('../ext/movies/movies.csv', 'r')
for line in f1:
if linea == 0:
re.sub(r'^\W+|\W+$', '', ignorar)
linea = linea + 1
else:
line = re.sub(r'^\W+|\W+$', '', line) # parsear linea
numbers = line.split(',', 4) # se trocea la linea para obtener los datos
print(numbers[0] + "\t" + numbers[1] + "\t-1")
f1.close()
f2 = open('../ext/movies/ratings.csv', 'r')
for line in f2:
if linea == 0:
re.sub(r'^\W+|\W+$', '', ignorar)
linea = linea + 1
else:
line = re.sub(r'^\W+|\W+$', '', line) # parsear linea
numbers = line.split(',', 4) # se trocea la linea para obtener los datos
print(numbers[1] + "\t-1" + "\t" + numbers[2])
f2.close()
"""
"""
"""
f1 = open('../ext/movies/movies.csv', 'r')
f2 = open('../ext/movies/ratings.csv', 'r')
for line in f1:
if linea == 0:
re.sub(r'^\W+|\W+$', '', ignorar)
linea = linea + 1
else:
line = re.sub(r'^\W+|\W+$', '', line) # parsear linea
numbers = line.split(',', 4) # se trocea la linea para obtener los datos
linea2 = 0
for line2 in f2:
if linea2 == 0:
re.sub(r'^\W+|\W+$', '', ignorar)
linea2 = linea2 + 1
else:
line2 = re.sub(r'^\W+|\W+$', '', line2) # parsear linea
numbers2 = line2.split(',', 4) # se trocea la linea para obtener los datos
if numbers[0] == numbers2[1]:
print(numbers[1] + "\t" + numbers2[2])
impreso = 1
f2.seek(0)
if impreso == 0:
print(numbers[0] + "\t" + numbers[1] + "\t-1")
impreso = 0
f1.close()
f2.close()
"""
"""
"""
for line in sys.stdin:
if linea == 0:
re.sub(r'^\W+|\W+$', '', ignorar)
linea = linea + 1
else:
line = re.sub(r'^\W+|\W+$', '', line) # parsear linea
numbers = line.split(',', 4) # se trocea la linea para obtener los datos
print(numbers[1] + "\t" + numbers[2])
"""
|
"""
Functions for calculating LOFAR hardware specific properties.
"""
import tkp.telescope.lofar.antennaarrays
import tkp.telescope.lofar.beam
import tkp.telescope.lofar.noise
import tkp.telescope.lofar.quality
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017, Data61
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# ABN 41 687 119 230.
#
# This software may be distributed and modified according to the terms of
# the BSD 2-Clause license. Note that NO WARRANTY is provided.
# See "LICENSE_BSD2.txt" for details.
#
# @TAG(DATA61_BSD)
import six
import math
from PyQt5 import QtGui, QtWidgets, QtCore
import Connection_Widget
from Model import Common
from Interface.Property import PropertyInterface
from Instance_Property_Widget import InstancePropertyWidget
# TODO: Delete itself from all connections when __del__ ed
class InstanceWidget(QtWidgets.QGraphicsWidget, PropertyInterface):
"""
InstanceWidget - a View representation of camkes.ast.Instance.
If model changes, update the fields in InstanceWidget.
"""
# Constants and private class variables
_bounding_rect = None
_border_thickness = 7
@property
def velocity(self):
if self._velocity is None:
self._velocity = QtCore.QPointF(0, 0)
return self._velocity
@velocity.setter
def velocity(self, value):
assert isinstance(value, QtCore.QPointF)
self._velocity = value
# --- Information about Instance ---
@property
def name(self):
if self._name is None:
self._name = "Uninitialised widget"
return self._name
@name.setter
def name(self, value):
assert isinstance(value, six.string_types)
self._name = value
self.update_ui()
@property
def component_type(self):
if self._component_type is None:
self._component_type = "Uninitialised widget"
return self._component_type
@component_type.setter
def component_type(self, value):
assert isinstance(value, six.string_types)
self._component_type = value
self.update_ui()
@property
def control(self):
return self._control
@control.setter
def control(self, value):
assert isinstance(value, bool)
self._control = value
self.update_ui()
@property
def hardware(self):
return self._hardware
@hardware.setter
def hardware(self, value):
assert isinstance(value, bool)
self._hardware = value
self.update_ui()
@property
def hidden(self):
return self._hidden
@hidden.setter
def hidden(self, value):
assert isinstance(value, bool)
self._hidden = value
if value:
self.setZValue(3)
else:
self.setZValue(5)
for connection in self.connection_list:
# This will only set if both source and destination is not hidden
connection.hidden = value
connection.update()
self.update()
# Provides
@property
def provides(self):
if self._provides is None:
self._provides = []
return self._provides
# TODO: Handle multiple connections
def add_provide(self, name, interface_type, connection=None):
assert isinstance(name, six.string_types)
assert isinstance(interface_type, six.string_types)
self.provides.append({'Name': name,
'Interface_type': interface_type,
'Connection_Widget': connection})
self.update_ui()
def add_provide_connection(self, interface_name, connection):
assert self._provides is not None
for dictionary in self.provides:
if dictionary['Name'] == interface_name:
dictionary['Connection_Widget'] = connection
break
def remove_provide_connection(self, interface_name, connection):
assert self._provides is not None
for dictionary in self.provides:
if dictionary['Name'] == interface_name and dictionary['Connection_Widget'] is connection:
dictionary['Connection_Widget'] = None
break
def delete_provide(self, name):
raise NotImplementedError
# Uses
@property
def uses(self):
if self._uses is None:
self._uses = []
return self._uses
def add_use(self, name, interface_type, connection=None):
assert isinstance(name, six.string_types)
assert isinstance(interface_type, six.string_types)
self.uses.append({'Name': name, 'Interface_type': interface_type, 'Connection_Widget': connection})
self.update_ui()
# TODO NotImplementedError
def add_use_connection(self, interface_name, connection):
assert self._uses is not None
for dictionary in self.uses:
if dictionary['Name'] == interface_name:
dictionary['Connection_Widget'] = connection
break
def remove_use_connection(self, interface_name, connection):
assert self._uses is not None
for dictionary in self.uses:
if dictionary['Name'] == interface_name and dictionary['Connection_Widget'] is connection:
dictionary['Connection_Widget'] = None
break
def delete_use(self, name):
raise NotImplementedError
# Emits
@property
def emits(self):
if self._emits is None:
self._emits = []
return self._emits
def add_emit(self, name, interface_type, connection=None):
assert isinstance(name, six.string_types)
assert isinstance(interface_type, six.string_types)
self.emits.append({'Name': name, 'Interface_type': interface_type, 'Connection_Widget': connection})
self.update_ui()
# TODO NotImplementedError
def add_emit_connection(self, interface_name, connection):
assert self._emits is not None
for dictionary in self.emits:
if dictionary['Name'] == interface_name:
dictionary['Connection_Widget'] = connection
break
def remove_emit_connection(self, interface_name, connection):
assert self._emits is not None
for dictionary in self.emits:
if dictionary['Name'] == interface_name and dictionary['Connection_Widget'] is connection:
dictionary['Connection_Widget'] = None
break
def delete_emit(self, name):
raise NotImplementedError
# Consumes
@property
def consumes(self):
if self._consumes is None:
self._consumes = []
return self._consumes
def add_consume(self, name, interface_type, optional, connection=None):
assert isinstance(name, six.string_types)
assert isinstance(interface_type, six.string_types)
assert isinstance(optional, bool)
self.consumes.append({'Name': name, 'Interface_type': interface_type, 'Optional': optional,
'Connection_Widget': connection})
self.update_ui()
# TODO NotImplementedError
def add_consume_connection(self, interface_name, connection):
assert self._consumes is not None
for dictionary in self.consumes:
if dictionary['Name'] == interface_name:
dictionary['Connection_Widget'] = connection
break
def remove_consume_connection(self, interface_name, connection):
assert self._consumes is not None
for dictionary in self.consumes:
if dictionary['Name'] == interface_name and \
dictionary['Connection_Widget'] is connection:
dictionary['Connection_Widget'] = None
break
def delete_consume(self, name):
raise NotImplementedError
# Dataport
@property
def dataport(self):
if self._dataport is None:
self._dataport = []
return self._dataport
def add_dataport(self, name, interface_type, optional, connection=None):
assert isinstance(name, six.string_types)
assert isinstance(interface_type, six.string_types)
assert isinstance(optional, bool)
if self._dataport is None:
self._dataport = []
self._dataport.append({'Name': name, 'Interface_type': interface_type, 'Optional': optional,
'Connection_Widget': connection})
self.update_ui()
# TODO NotImplementedError
def add_dataport_connection(self, interface_name, connection):
assert self._dataport is not None
for dictionary in self.dataport:
if dictionary['Name'] == interface_name:
dictionary['Connection_Widget'] = connection
break
def remove_dataport_connection(self, interface_name, connection):
assert self._dataport is not None
for dictionary in self.dataport:
if dictionary['Name'] == interface_name and \
dictionary['Connection_Widget'] is connection:
dictionary['Connection_Widget'] = None
break
def delete_dataport(self, name):
raise NotImplementedError
@property
def connection_list(self):
return self._connections_list
# TODO: connection overrides, for multiway connection. Eg. eigenConnection
def add_connection(self, connection):
assert isinstance(connection, Connection_Widget.ConnectionWidget)
if connection.source_instance_widget is self:
if connection.source_connection_type == Common.Event:
self.add_emit_connection(connection.source_interface_name, connection)
elif connection.source_connection_type == Common.Procedure:
self.add_use_connection(connection.source_interface_name, connection)
elif connection.source_connection_type == Common.Dataport:
self.add_dataport_connection(connection.source_interface_name, connection)
elif connection.dest_instance_widget is self:
if connection.dest_connection_type == Common.Event:
self.add_consume_connection(connection.dest_interface_name, connection)
elif connection.dest_connection_type == Common.Procedure:
self.add_provide_connection(connection.dest_interface_name, connection)
elif connection.dest_connection_type == Common.Dataport:
self.add_dataport_connection(connection.dest_interface_name, connection)
else:
raise NotImplementedError # Something is wrong
self._connections_list.append(connection)
self.update_connection_position(connection)
def remove_connection(self, connection):
assert isinstance(connection, Connection_Widget.ConnectionWidget)
if connection.source_instance_widget is self:
if connection.source_connection_type == Common.Event:
self.remove_emit_connection(connection.source_interface_name, connection)
elif connection.source_connection_type == Common.Procedure:
self.remove_use_connection(connection.source_interface_name, connection)
elif connection.source_connection_type == Common.Dataport:
self.remove_dataport_connection(connection.source_interface_name, connection)
elif connection.dest_instance_widget is self:
if connection.dest_connection_type == Common.Event:
self.remove_consume_connection(connection.dest_interface_name, connection)
elif connection.dest_connection_type == Common.Procedure:
self.remove_provide_connection(connection.dest_interface_name, connection)
elif connection.dest_connection_type == Common.Dataport:
self.remove_dataport_connection(connection.dest_interface_name, connection)
else:
raise NotImplementedError # Something is wrong
self._connections_list.remove(connection)
@property
def context_menu(self):
return self._context_menu
@context_menu.setter
def context_menu(self, value):
assert isinstance(value, QtWidgets.QGraphicsProxyWidget)
assert isinstance(value.widget(), QtWidgets.QMenu)
self._context_menu = value
@property
def property_widget(self):
self._property_widget = InstancePropertyWidget(self)
return self._property_widget
# -------
# Signals & Slots
widget_moved = QtCore.pyqtSignal()
# --- INITIALISATION
def __init__(self, context_menu, preferred_point=None):
super(InstanceWidget, self).__init__()
# Model
self._preferred_point = preferred_point
self._pinned = False
self._velocity = None
self._name = None
self._component_type = None
self._control = False
self._hardware = False
self._provides = None
self._uses = None
self._emits = None
self._consumes = None
self._dataport = None
self._context_menu = None
self.context_menu = context_menu
self._hidden = False
self._property_widget = None
self._connections_list = []
# GUI
self.color = QtGui.QColor(245,245,245)
self.setFlag(QtWidgets.QGraphicsWidget.ItemIsMovable)
self.update_ui()
# --- UI FUNCTIONS ---
def paint(self, painter, style_options, widget=None):
"""
Overridden function, paints the box with name, type and the C H symbols.
:param painter:
:param style_options:
:param widget:
:return:
"""
assert isinstance(painter, QtGui.QPainter)
assert isinstance(style_options, QtWidgets.QStyleOptionGraphicsItem)
# assert isinstance(widget, QtWidgets.QWidget)
super(InstanceWidget, self).paint(painter, style_options, widget)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
# -- If hidden, changing alpha values to transparent --
color = self.color
if self.hidden:
color.setAlphaF(0.2)
else:
color.setAlphaF(1)
# Setting brush color
brush = painter.brush()
brush.setColor(color)
painter.setBrush(brush)
pen = painter.pen()
pen_color = pen.color()
if self.hidden:
pen_color.setAlphaF(0.2)
else:
pen_color.setAlphaF(1)
pen.setColor(pen_color)
painter.setPen(pen)
rounded_rect = QtGui.QPainterPath()
assert isinstance(rounded_rect, QtGui.QPainterPath)
# If instance is control or hardware, boundedRect will compensate for that.
inner_rect = self.boundingRect().adjusted(0,0,0,0) # Hacking way to get a copy of rect
if self.hardware or self.control:
inner_rect.adjust(2,2,-2,-2)
rounded_rect.addRoundedRect(inner_rect,5,5)
painter.fillPath(rounded_rect, color)
painter.drawPath(rounded_rect)
# Draw an outline if the instance is control or hardware
# Assumption is, an instance cannot be both control and hardware
outline_rect = inner_rect.adjusted(-1,-1,1,1)
outline_rect_path = QtGui.QPainterPath()
outline_rect_path.addRoundedRect(outline_rect, 5, 5)
stroker = QtGui.QPainterPathStroker()
stroker.setWidth(5)
outline_rounded_rect = stroker.createStroke(outline_rect_path)
# Draw outline to highlight control components
if self.control:
# Make a BLUE color pen
pen_color.setRed(30)
pen_color.setGreen(136)
pen_color.setBlue(229)
painter.fillPath(outline_rounded_rect, pen_color)
# Draw outline to highlight hardware components
if self.hardware:
pen_color.setRed(67)
pen_color.setGreen(160)
pen_color.setBlue(71)
painter.fillPath(outline_rounded_rect, pen_color)
# TODO IDEA: Update rect with new size
# Printing instance name
font = QtGui.QFont("Helvetica", 15, QtGui.QFont.Normal)
painter.setFont(font)
font_metrics = painter.fontMetrics()
assert isinstance(font_metrics, QtGui.QFontMetrics)
bounding_rect_font = painter.boundingRect(QtCore.QRectF(1, 1, 1, 1), QtCore.Qt.AlignCenter, self.name)
bounding_rect_font.moveTo(inner_rect.center().x() - bounding_rect_font.width() / 2,
inner_rect.center().y() - font_metrics.ascent())
painter.drawText(bounding_rect_font, QtCore.Qt.AlignCenter, self.name)
control_hardware_x_pos = bounding_rect_font.x()
# Printing component name
font.setPointSize(11)
painter.setFont(font)
bounding_rect_font = painter.boundingRect(QtCore.QRectF(1, 1, 1, 1), QtCore.Qt.AlignCenter, self.component_type)
bounding_rect_font.moveTo(inner_rect.center().x() - bounding_rect_font.width() / 2,
inner_rect.center().y() + font_metrics.descent())
painter.drawText(bounding_rect_font, QtCore.Qt.AlignCenter, self.component_type)
if bounding_rect_font.x() < control_hardware_x_pos:
control_hardware_x_pos = bounding_rect_font.x()
control_hardware_x_pos -= 5
# The C
font.setPointSize(12)
painter.setFont(font)
font_metrics = painter.fontMetrics()
bounding_rect_font = painter.boundingRect(QtCore.QRectF(1, 1, 1, 1), QtCore.Qt.AlignCenter, "C")
bounding_rect_font.moveTo(control_hardware_x_pos - bounding_rect_font.width(),
inner_rect.center().y() - font_metrics.ascent())
if self.control:
painter.drawText(bounding_rect_font, QtCore.Qt.AlignCenter, "C")
# The H
bounding_rect_font = painter.boundingRect(QtCore.QRectF(1, 1, 1, 1), QtCore.Qt.AlignCenter, "H")
bounding_rect_font.moveTo(control_hardware_x_pos - bounding_rect_font.width(),
inner_rect.center().y() + font_metrics.descent())
if self.hardware:
painter.drawText(bounding_rect_font, QtCore.Qt.AlignCenter, "H")
def update_ui(self):
"""
Recalculates the expected size of the view, and calls a repaint.
:return:
"""
# Calculate rect for instance name
practise_font = QtGui.QFont("Helvetica", 15, QtGui.QFont.Normal)
practise_font_metrics = QtGui.QFontMetrics(practise_font)
instance_name_rect = practise_font_metrics.boundingRect(self.name)
# Calculate rect for component type
practise_font.setPointSize(11)
practise_font_metrics = QtGui.QFontMetrics(practise_font)
component_name_rect = practise_font_metrics.boundingRect(self.component_type)
# Calculate rects for control and hardware symbols
practise_font.setPointSize(12)
practise_font_metrics = QtGui.QFontMetrics(practise_font)
control_rect = practise_font_metrics.boundingRect("C")
hardware_rect = practise_font_metrics.boundingRect("H")
# Find the max height
max_height = 2 * self._border_thickness + instance_name_rect.height() + hardware_rect.height() + 7
# Find the max width
max_width = 2 * self._border_thickness + 2 * control_rect.width() + 10
if instance_name_rect.width() > component_name_rect.width():
max_width = max_width + instance_name_rect.width()
else:
max_width = max_width + component_name_rect.width()
# Set bounding rect to new max width and height
self._bounding_rect = QtCore.QRectF(self.scenePos().x(), self.scenePos().y(), max_width, max_height)
# Adjust for new hardware or control border
if self.hardware or self.control:
self._bounding_rect.adjust(-2,-2,2,2)
self.setPreferredSize(self._bounding_rect.width(), self._bounding_rect.height())
self.update() # Call a repaint
def boundingRect(self):
"""
:return: QRect - bounding rectangle of this widget
"""
return self._bounding_rect
def update_connections(self):
"""
Forces all connections and connecting instances to update.
:return:
"""
for connection in self.connection_list:
self.update_connection_position(connection)
if connection.source_instance_widget is self:
connection.dest_instance_widget.update_connection_position(connection)
else:
connection.source_instance_widget.update_connection_position(connection)
def update_connection_position(self, connection):
"""
Updates the touching point between the connection and this widget.
:param connection: The connection to be updated
:return:
"""
assert isinstance(connection, Connection_Widget.ConnectionWidget)
decrease_angle = None
# Find the direction of the angle on the other end - if it is set.
if connection.source_instance_widget is self:
other_widget = connection.dest_instance_widget
if connection.dest_angle:
decrease_angle = connection.dest_angle >= 0
else:
other_widget = connection.source_instance_widget
if connection.source_angle:
decrease_angle = connection.source_angle >= 0
# --- Find position based on straight line distance between this and other widget ---
# -- Vector between other and this --
assert isinstance(other_widget, InstanceWidget)
our_pos = self.scenePos()
# Get middle of widget
our_pos.setX(our_pos.x() + self.boundingRect().width() / 2)
our_pos.setY(our_pos.y() + self.boundingRect().height() / 2)
other_widget_pos = other_widget.scenePos()
# Get middle of widget
other_widget_pos.setX(other_widget_pos.x() + other_widget.boundingRect().width() / 2)
other_widget_pos.setY(other_widget_pos.y() + other_widget.boundingRect().height() / 2)
vector = other_widget_pos - our_pos
# -- Finding intersection between vector and edge of this widget --
final_pos = self.edge_intersection(our_pos, vector)
# Check if final_pos is inside other_widget
# If inside, use the centre of this widget instead.
other_widget_top_left = other_widget.scenePos()
other_widget_bottom_right = QtCore.QPointF(other_widget.scenePos().x() + \
other_widget.boundingRect().width(),
other_widget.scenePos().y() + \
other_widget.boundingRect().height())
if final_pos.x() >= other_widget_top_left.x() and \
final_pos.x() <= other_widget_bottom_right.x() and \
final_pos.y() >= other_widget_top_left.y() and \
final_pos.y() <= other_widget_bottom_right.y():
final_pos = our_pos
# Find unclashing angle
angle = self.find_free_angle(final_pos, connection, decrease_angle)
# Set our newly found position and angle (at the appropriate side of the connection)
if connection.source_instance_widget is self:
connection.set_source_pos_angle(final_pos, angle)
else:
connection.set_dest_pos_angle(final_pos, angle)
# TODO: Potentially inefficient algorithm
def edge_intersection(self, our_pos, vector):
"""
Finding the intersection between the vector + pos , to the edge of the widget
:param our_pos: The starting position of the vector (usually centre of widget)
:param vector: The vector from the starting position
:return: PyQt5.QPointF - The position of the intersection with the edge.
"""
# Consider the case where x is bigger than y
# .
# . .
# . .
# ............
# We reduce y, proportional to x, such that x is equal to width of widget.
# If the x is 0, then it is a horizontal
if vector.x() == 0:
y_pos = self.boundingRect().height()
# If original y is negative, new y must also be negative
if vector.y() < 0:
y_pos = -y_pos
else:
# Using ratios to get y value
y_pos = vector.y() * math.fabs((self.boundingRect().width() / 2) / vector.x())
half_height = self.boundingRect().height() / 2 + 1 # Bit of room for rounding
# If y is within the box then above assumption is correct
if -half_height <= y_pos <= half_height:
vector.setY(y_pos)
if vector.x() < 0:
vector.setX(-self.boundingRect().width() / 2)
else:
vector.setX(self.boundingRect().width() / 2)
else:
# If y wasn't within the box, then we assumption is wrong, y is bigger than x
# .
# .
# . .
# .
# . .
# .
# ......
# We reduce x, proportional to y, such that y is equal to height.
# If y is 0, then it is vertical
if vector.y() == 0:
x_pos = self.boundingRect().width()
if vector.x() < 0:
x_pos = -x_pos
else:
# Using ratios to get x value
x_pos = vector.x() * math.fabs((self.boundingRect().height() / 2) / vector.y())
vector.setX(x_pos)
if vector.y() < 0:
vector.setY(-self.boundingRect().height() / 2)
else:
vector.setY(self.boundingRect().height() / 2)
# We got a vector from the center, now we get the final position
final_pos = our_pos + vector
return final_pos
# TODO: Potentially inefficient algorithm
def find_free_angle(self, pos, connection, decrease_angle=None):
"""
Find a angle which doesn't collide with any other connection
at the same position.
:param pos: Position to find angle
:param connection: The current connection we are checking for
:param decrease_angle: If a specific direction is required, then use this variable
to specific whether the final angle is positive or negative.
Default is None.
"""
angle = 0
if decrease_angle is None:
decrease_angle = False
angle_set = False
else:
angle_set = True
# Choose an angle, start with 0 degrees, and search through all connection points,
# looking for clashes
for compare in self.connection_list:
assert isinstance(compare, Connection_Widget.ConnectionWidget)
if compare is connection:
continue # Not interested in the same connection, find others
# Get the current position and angle of the potential clashing connection
if compare.source_instance_widget is self:
compare_pos = compare.source_pos
compare_angle = compare.source_angle
elif compare.dest_instance_widget is self:
compare_pos = compare.dest_pos
compare_angle = compare.dest_angle
else:
raise NotImplementedError # Something went wrong
if compare_pos != pos:
continue # Does not clash, continue searching
# If clashing, find a angle which doesn't clash
while compare_angle == angle:
if angle_set:
if decrease_angle:
angle -= 35
else:
angle += 35
else:
# If angle is not set, try 0, -35, 35, -70, 70 etc
# In order to alternate between positive and negative,
# use decrease_angle as a toggle
angle = -angle
if decrease_angle:
angle -= 35
decrease_angle = not decrease_angle
return angle
# --- EVENTS ---
def itemChange(self, change, value):
"""
Deals with position changes. Updates connections when ever position changes
:param change:
:param value:
:return:
"""
if change == QtWidgets.QGraphicsWidget.ItemPositionHasChanged:
self.update_connections()
return super(InstanceWidget, self).itemChange(change, value)
def mousePressEvent(self, mouse_event):
"""
Deals with instances being pressed. Right now doesn't do anything special other than
printing the name, type and number of connections
:param mouse_event:
:return:
"""
string = " "
for connection in self.connection_list:
string += "%s " % connection.name
print "%s contains: %s" % (self.name, string)
no_of_connections = len(self.dataport) + len(self.provides) + len(self.consumes) + len(self.uses) + \
len(self.emits)
print "\tNumber of connections is: %s" % str(no_of_connections)
print "\tdataport: %s" % str(len(self.dataport))
print "\tprovides: %s" % str(len(self.provides))
print "\tconsumes: %s" % str(len(self.consumes))
print "\tuses: %s" % str(len(self.uses))
print "\temits: %s" % str(len(self.emits))
def mouseMoveEvent(self, mouse_event):
"""
Deals with this instance being clicked and dragged. Emits a signal that component was moved.
:param mouse_event:
:return:
"""
self.widget_moved.emit()
super(InstanceWidget, self).mouseMoveEvent(mouse_event)
def contextMenuEvent(self, event):
"""
Shows a context menu for this instance, asking to either show or hide the component.
Uses context menu given by graph widget.
:param event:
:return:
"""
assert isinstance(event, QtWidgets.QGraphicsSceneContextMenuEvent)
# Get menu widget from proxy widget
menu = self.context_menu.widget()
assert isinstance(menu, QtWidgets.QMenu)
# If current hidden, action is "Show" otherwise "Hide"
menu.clear()
if self.hidden:
showComponentAction = menu.addAction("Show component")
showComponentAction.triggered.connect(self.show_component)
else:
hideComponentAction = menu.addAction("Hide component")
hideComponentAction.triggered.connect(self.hide_component)
# Set the current position [of proxy widget] to mouse click position
self.context_menu.setPos(event.scenePos())
menu.exec_()
def show_component(self):
self.hidden = False
def hide_component(self):
self.hidden = True
|
from collections import namedtuple
Item = namedtuple('Item', ['item', 'at'])
ObsItem = namedtuple('ObsItem', ['at'])
Link = namedtuple('Link', ['from_x', 'from_y', 'to_x', 'to_y'])
class Observable(object):
def __init__(self, start, is_child=False):
self.label = None
self.start = start
self.end = start
self.is_child = is_child
self.items = []
self.completed = None
self.error = None
def set_label(self, label):
self.label = label
def on_next_at(self, item, at):
self.items.append(Item(item, at))
def on_observable_at(self, at):
self.items.append(ObsItem(at))
def on_completed_at(self, at):
self.completed = at
self.end = at
def on_error_at(self, at):
self.error = at
self.end = at
def on_continued_at(self, at):
self.end = at
class Operator(object):
def __init__(self, start, end, text):
self.start = start
self.end = end
self.text = text
class Marble(object):
def __init__(self):
self.layers = []
self.higher_order_links = []
self.item_links = []
self.label_links = []
return
def add_observable(self, observable):
self.layers.append(observable)
def add_operator(self, operator):
self.layers.append(operator)
def _compute_higher_order_links(self):
def nearest_links(parents, children):
links = []
for parent in parents:
dist = None
nearest = None
for child in children:
d = abs(parent[0] - child[0])
if nearest is None or d < dist:
dist = d
nearest = child
if nearest is not None:
links.append(Link(
from_x=parent[0], from_y=parent[1],
to_x=nearest[0], to_y=nearest[1],
))
return links
children = []
parents = []
links = []
for layer_index, layer in enumerate(self.layers):
if type(layer) is Operator:
links.extend(nearest_links(parents, children))
children.clear()
parents.clear()
elif type(layer) is Observable:
if layer.is_child is True:
children.append((layer.start, layer_index))
else:
for item in layer.items:
if type(item) is ObsItem:
parents.append((item.at, layer_index))
links.extend(nearest_links(parents, children))
return links
@staticmethod
def _append_links(links, top_layer, bottom_layer, items):
for item in items:
if top_layer is not None:
links.append(Link(
from_x=item[0], from_y=top_layer,
to_x=item[0], to_y=item[1],
))
if bottom_layer is not None:
links.append(Link(
from_x=item[0], from_y=item[1],
to_x=item[0], to_y=bottom_layer,
))
return links
def _compute_item_links(self):
top_layer = None
items = []
links = []
for layer_index, layer in enumerate(self.layers):
if type(layer) is Operator:
Marble._append_links(links, top_layer, layer_index, items)
items.clear()
top_layer = layer_index
elif type(layer) is Observable:
if layer.label is None:
for item in layer.items:
items.append((item.at, layer_index))
Marble._append_links(links, top_layer, None, items)
return links
def _compute_label_links(self):
top_layer = None
items = []
links = []
for layer_index, layer in enumerate(self.layers):
if type(layer) is Operator:
Marble._append_links(links, top_layer, layer_index, items)
items.clear()
top_layer = layer_index
elif type(layer) is Observable:
if layer.label is not None:
items.append((layer.start, layer_index))
Marble._append_links(links, top_layer, None, items)
return links
def build(self):
self.higher_order_links = self._compute_higher_order_links()
self.item_links = self._compute_item_links()
self.label_links = self._compute_label_links()
|
# RUN WITH /usr/bin/python3 minet.py (python 3.6)
import sys
import numpy as np
from sklearn.metrics import roc_curve, auc
import pandas as pd
def compute_aggregated_matrix(matrixfiles_num, matrixfiles, savematrixfile, saveresultfile, coeffs=[1, 1, 1, 1]):
# matrixfiles_num = int(sys.argv[1])
# matrixfiles = [sys.argv[i] for i in range(2, matrixfiles_num + 2)]
# savematrixfile = sys.argv[matrixfiles_num + 2]
# saveresultfile = sys.argv[matrixfiles_num + 3]
matrices = [pd.read_csv(f, index_col=0, sep='\t') for f in matrixfiles]
genes = matrices[0].index
# print(genes)
# print(matrices)
sz = len(matrices[0])
for matrix in matrices:
assert len(matrix) == sz
for matrix in matrices:
for column in matrix:
temp = matrix[column].argsort()
ranks = np.empty_like(temp)
ranks[temp] = np.arange(len(matrix[column]))
matrix[column] = ranks
res = np.zeros(shape=(sz, sz))
for s in range(sz):
for i, matrix in enumerate(matrices):
res[s] += matrix.iloc[:, s].values * coeffs[i]
res[s] /= len(matrices)
for row in res:
row /= row.sum()
result_df = pd.DataFrame(res, columns=genes, index=genes)
result_df.to_csv(saveresultfile, index=True, header=True, sep='\t')
# print(result_df)
return result_df
matricesdirname = "/home/user/Sirius/gene_network_sirius_2019/Matrices_1"
savematricesdirname = "/home/user/Sirius/gene_network_sirius_2019/Matrices_6"
predictedfilename = matricesdirname + "/{1}_{0}_predicted.txt"
truefilename = matricesdirname + "/{1}_{0}_true.txt"
savematricesfilename = savematricesdirname + "/{0}_predicted.txt"
# datalist = ['exps_10', 'exps_10_2', 'exps_10_bgr', 'exps_50', 'exps_50_2', 'exps_50_bgr', 'exps_100', 'exps_100_2', 'exps_100_bgr', 'genes_200_exps_10_bgr', 'genes_400_exps_10_bgr', 'genes_600_exps_10_bgr', 'genes_700_exps_10_bgr', 'genes_1000_exps_10_bgr']
datalist = ['genes_200_exps_10_bgr', 'genes_200_exps_20_bgr', 'genes_200_exps_40_bgr', 'genes_400_exps_10_bgr', 'genes_400_exps_40_bgr', 'genes_400_exps_80_bgr', 'genes_500_exps_10_bgr', 'genes_500_exps_50_bgr', 'genes_500_exps_100_bgr']
algolist = ['aracne', 'mrnet', 'mrnetb']
saveresultsfile = "/home/user/Sirius/gene_network_sirius_2019/RankAggregation/res_arrgeg_on_petr_big_data_many_exps.txt"
tmpfile = "/home/user/Sirius/gene_network_sirius_2019/RankAggregation/data/tmp5.txt"
if __name__ == "__main__":
results = np.zeros(shape=(len(datalist)))
for i, dataname in enumerate(datalist):
true_df = pd.read_csv(truefilename.format(dataname, algolist[1]), index_col=0, sep='\t')
predicted_df = compute_aggregated_matrix(len(algolist), [predictedfilename.format(dataname, algo) for algo in algolist], tmpfile, savematricesfilename.format(dataname))
true_df.to_csv(savematricesdirname + "/{0}_true.txt".format(dataname), index=True, header=True, sep='\t')
# print(true_df)
true_array = true_df.values[np.triu_indices(true_df.values.shape[0], k=1)]
predicted_array = predicted_df.values[np.triu_indices(predicted_df.values.shape[0], k=1)]
roc_auc = 0
# try:
# fpr, tpr, thresholds = roc_curve(true_array, predicted_array)
# roc_auc = auc(fpr, tpr)
# except:
# print("error", dataname, algo)
fpr, tpr, thresholds = roc_curve(true_array, predicted_array)
roc_auc = auc(fpr, tpr)
results[i] = roc_auc
with open(savematricesdirname + "/{0}_auc.txt".format(dataname), 'w') as f:
f.write(str(roc_auc) + '\n')
print("done", dataname, results[i])
with open(saveresultsfile, "a") as f:
f.write("done " + dataname + str(results[i]))
# print("done", dataname, algo)
print(results)
|
from django.core.exceptions import ObjectDoesNotExist
from django.http import JsonResponse
from rest_framework import status, permissions
from rest_framework.decorators import api_view, permission_classes
from task.models import TaskModel, Status
from worker import WORKER_LIST
from worker.settings import NEW_TASK_EVENT
@api_view(['POST'])
@permission_classes((permissions.IsAuthenticated,))
def create_task(request):
task = TaskModel(status=Status.IN_QUEUE)
task.save()
if NEW_TASK_EVENT.is_set():
NEW_TASK_EVENT.clear()
NEW_TASK_EVENT.set()
return JsonResponse({"task_id": task.pk})
@api_view(['GET'])
def get_info(request, task_id):
try:
task = TaskModel.objects.get(pk=task_id) # type: TaskModel
return JsonResponse({
'status': task.status.label,
'create_time': task.create_time,
'start_time': task.start_time,
'time_to_execute': str(task.exec_time - task.start_time) if task.exec_time is not None else None,
})
except ObjectDoesNotExist:
return JsonResponse({"message": "Task #%s does not exists" % task_id}, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
@permission_classes((permissions.IsAdminUser,))
def start_workers(request):
try:
for x in WORKER_LIST:
x.start()
return JsonResponse({"message": "workers are running"})
except Exception as e:
return JsonResponse({"message": str(e)}, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
@permission_classes((permissions.IsAdminUser,))
def disable_workers(request):
try:
for x in WORKER_LIST:
x.disable()
return JsonResponse({"message": "workers are disabled"})
except Exception as e:
return JsonResponse({"message": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
import logging
import json
from typing import List, Type, Union
from keras.models import Model
from keras.layers.merge import Concatenate
from keras.layers import (
Dense, LSTM, Bidirectional, Embedding, Input, Dropout,
TimeDistributed
)
import delft.sequenceLabelling.wrapper
from delft.utilities.layers import ChainCRF
from delft.sequenceLabelling.models import BaseModel
from delft.sequenceLabelling.models import get_model as _get_model, BidLSTM_CRF_FEATURES
from sciencebeam_trainer_delft.sequence_labelling.config import ModelConfig
LOGGER = logging.getLogger(__name__)
class CustomModel(BaseModel):
def __init__(
self, config, ntags,
require_casing: bool = False,
use_crf: bool = False,
supports_features: bool = False,
require_features_indices_input: bool = False,
stateful: bool = False):
super().__init__(config, ntags)
self.require_casing = require_casing
self.use_crf = use_crf
self.supports_features = supports_features
self.require_features_indices_input = require_features_indices_input
self.stateful = stateful
def _concatenate_inputs(inputs: list, **kwargs):
if len(inputs) == 1:
return inputs[0]
return Concatenate(**kwargs)(inputs)
# renamed copy of BidLSTM_CRF to demonstrate a custom model
class CustomBidLSTM_CRF(CustomModel):
"""
A Keras implementation of BidLSTM-CRF for sequence labelling.
References
--
Guillaume Lample, Miguel Ballesteros, Sandeep Subramanian, Kazuya Kawakami, Chris Dyer.
"Neural Architectures for Named Entity Recognition". Proceedings of NAACL 2016.
https://arxiv.org/abs/1603.01360
"""
def __init__(self, config: ModelConfig, ntags=None):
super().__init__(
config, ntags,
require_casing=False, use_crf=True, supports_features=True,
stateful=config.stateful
)
stateful = self.stateful
# stateful RNNs require the batch size to be passed in
input_batch_size = config.batch_size if stateful else None
model_inputs = []
lstm_inputs = []
# build input, directly feed with word embedding by the data generator
word_input = Input(
shape=(None, config.word_embedding_size),
batch_shape=(input_batch_size, None, config.word_embedding_size),
name='word_input'
)
model_inputs.append(word_input)
lstm_inputs.append(word_input)
# build character based embedding
char_input = Input(
shape=(None, config.max_char_length),
batch_shape=(input_batch_size, None, config.max_char_length),
dtype='int32',
name='char_input'
)
model_inputs.append(char_input)
if config.char_embedding_size:
assert config.char_vocab_size, 'config.char_vocab_size required'
char_embeddings = TimeDistributed(Embedding(
input_dim=config.char_vocab_size,
output_dim=config.char_embedding_size,
mask_zero=config.char_input_mask_zero,
name='char_embeddings_embedding'
), name='char_embeddings')(char_input)
chars = TimeDistributed(
Bidirectional(LSTM(
config.num_char_lstm_units,
dropout=config.char_input_dropout,
recurrent_dropout=config.char_lstm_dropout,
return_sequences=False
)),
name='char_lstm'
)(char_embeddings)
lstm_inputs.append(chars)
# length of sequence not used for the moment (but used for f1 communication)
length_input = Input(batch_shape=(None, 1), dtype='int32', name='length_input')
# combine characters and word embeddings
LOGGER.debug('model, config.use_features: %s', config.use_features)
if config.use_features:
LOGGER.info('model using features')
assert config.max_feature_size > 0
features_input = Input(
batch_shape=(input_batch_size, None, config.max_feature_size),
name='features_input'
)
model_inputs.append(features_input)
features = features_input
if config.features_embedding_size:
features = TimeDistributed(Dense(
config.features_embedding_size,
name='features_embeddings_dense'
), name='features_embeddings')(features)
LOGGER.info(
'word_input=%s, chars=%s, features=%s',
word_input, chars, features
)
lstm_inputs.append(features)
x = _concatenate_inputs(lstm_inputs, name='word_lstm_input')
x = Dropout(config.dropout, name='word_lstm_input_dropout')(x)
x = Bidirectional(LSTM(
units=config.num_word_lstm_units,
return_sequences=True,
recurrent_dropout=config.recurrent_dropout,
stateful=stateful,
), name='word_lstm')(x)
x = Dropout(config.dropout, name='word_lstm_output_dropout')(x)
x = Dense(
config.num_word_lstm_units, name='word_lstm_dense', activation='tanh'
)(x)
x = Dense(ntags, name='dense_ntags')(x)
self.crf = ChainCRF(name='crf')
pred = self.crf(x)
model_inputs.append(length_input)
self.model = Model(inputs=model_inputs, outputs=[pred])
self.config = config
# copied from
# https://github.com/kermitt2/delft/blob/d2f8390ac01779cab959f57aa6e1a8f1d2723505/
# delft/sequenceLabelling/models.py
class CustomBidLSTM_CRF_FEATURES(CustomModel):
"""
A Keras implementation of BidLSTM-CRF for sequence labelling which create features
from additional orthogonal information generated by GROBID.
References
--
Guillaume Lample, Miguel Ballesteros, Sandeep Subramanian, Kazuya Kawakami, Chris Dyer.
"Neural Architectures for Named Entity Recognition". Proceedings of NAACL 2016.
https://arxiv.org/abs/1603.01360
"""
name = 'CustomBidLSTM_CRF_FEATURES'
def __init__(self, config, ntags=None):
super().__init__(
config, ntags,
require_casing=False, use_crf=True, supports_features=True,
require_features_indices_input=True
)
# build input, directly feed with word embedding by the data generator
word_input = Input(shape=(None, config.word_embedding_size), name='word_input')
# build character based embedding
char_input = Input(shape=(None, config.max_char_length), dtype='int32', name='char_input')
char_embeddings = TimeDistributed(Embedding(
input_dim=config.char_vocab_size,
output_dim=config.char_embedding_size,
mask_zero=True,
name='char_embeddings'
))(char_input)
chars = TimeDistributed(Bidirectional(LSTM(
config.num_char_lstm_units,
return_sequences=False
)))(char_embeddings)
# layout features input and embeddings
features_input = Input(
shape=(None, len(config.features_indices)),
dtype='float32',
name='features_input'
)
assert config.features_vocabulary_size, "config.features_vocabulary_size required"
assert config.features_embedding_size, "config.features_embedding_size required"
# features_vocabulary_size (default 12) * number_of_features + 1
# (the zero is reserved for masking / padding)
features_embedding = TimeDistributed(
Embedding(
input_dim=config.features_vocabulary_size * len(config.features_indices) + 1,
output_dim=config.features_embedding_size,
mask_zero=True,
trainable=True,
name='features_embedding'),
name="features_embedding_td_1"
)(features_input)
assert config.features_lstm_units, "config.features_lstm_units required"
features_embedding_bd = TimeDistributed(
Bidirectional(LSTM(config.features_lstm_units, return_sequences=False)),
name="features_embedding_td_2"
)(features_embedding)
features_embedding_out = Dropout(config.dropout)(features_embedding_bd)
# length of sequence not used for the moment (but used for f1 communication)
length_input = Input(batch_shape=(None, 1), dtype='int32', name='length_input')
# combine characters and word embeddings
x = Concatenate()([word_input, chars, features_embedding_out])
x = Dropout(config.dropout)(x)
x = Bidirectional(LSTM(
units=config.num_word_lstm_units,
return_sequences=True,
recurrent_dropout=config.recurrent_dropout
))(x)
x = Dropout(config.dropout)(x)
x = Dense(config.num_word_lstm_units, activation='tanh')(x)
x = Dense(ntags)(x)
self.crf = ChainCRF()
pred = self.crf(x)
self.model = Model(
inputs=[word_input, char_input, features_input, length_input],
outputs=[pred]
)
self.config = config
DEFAULT_MODEL_NAMES = [
'BidLSTM_CRF', 'BidLSTM_CNN', 'BidLSTM_CNN_CRF', 'BidGRU_CRF', 'BidLSTM_CRF_CASING',
BidLSTM_CRF_FEATURES.name
]
MODEL_MAP = {
'CustomBidLSTM_CRF': CustomBidLSTM_CRF,
CustomBidLSTM_CRF_FEATURES.name: CustomBidLSTM_CRF_FEATURES
}
IMPLICIT_MODEL_CONFIG_PROPS_MAP = {
BidLSTM_CRF_FEATURES.name: dict(
use_features=True,
use_features_indices_input=True
),
CustomBidLSTM_CRF_FEATURES.name: dict(
use_features=True,
use_features_indices_input=True
)
}
def register_model(name: str, model_class: Type[CustomModel]):
MODEL_MAP[name] = model_class
def updated_implicit_model_config_props(model_config: ModelConfig):
implicit_model_config_props = IMPLICIT_MODEL_CONFIG_PROPS_MAP.get(model_config.model_type)
if not implicit_model_config_props:
return
for key, value in implicit_model_config_props.items():
setattr(model_config, key, value)
def _create_model(
model_class: Type[CustomModel],
config: ModelConfig,
ntags=None) -> CustomModel:
return model_class(config, ntags=ntags)
def is_model_stateful(model: Union[BaseModel, CustomModel]) -> bool:
try:
return model.stateful
except AttributeError:
return False
def get_model(config, preprocessor, ntags=None):
LOGGER.info(
'get_model, config: %s, ntags=%s',
json.dumps(vars(config), indent=4),
ntags
)
model_class = MODEL_MAP.get(config.model_type)
if not model_class:
return _get_model(config, preprocessor, ntags=ntags)
model = _create_model(model_class, config, ntags=ntags)
config.use_crf = model.use_crf
preprocessor.return_casing = model.require_casing
if config.use_features and not model.supports_features:
LOGGER.warning('features enabled but not supported by model (disabling)')
config.use_features = False
preprocessor.return_features = config.use_features
return model
def get_model_names() -> List[str]:
return sorted(set(DEFAULT_MODEL_NAMES) | set(MODEL_MAP.keys()))
def patch_get_model():
delft.sequenceLabelling.wrapper.get_model = get_model
|
# -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import time
from rqalpha.utils.datetime_func import TimeRange
TRADING_PERIOD_DICT = dict()
STOCK_TRADING_PERIOD = [
TimeRange(start=time(9, 31), end=time(11, 30)),
TimeRange(start=time(13, 1), end=time(15, 0)),
]
# | 商品期货 WR, FU, CS, C, L, V, PP, BB, FB, JD, WH, PM, RI, SF, SM, RS, JR, LR, AP | 09:01~10:15, 10:31~11:30, 13:31~15:00 |
time_period1 = [
TimeRange(start=time(9, 1), end=time(10, 15)),
TimeRange(start=time(10, 31), end=time(11, 30)),
TimeRange(start=time(13, 31), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update({
underlying_symbol: time_period1
for underlying_symbol in
["WR", "FU", "CS", "C", "L", "V", "PP", "BB", "FB", "JD", "WH", "PM", "RI", "SF", "SM", "RS", "JR", "LR", "AP"]
})
# | 商品期货 Y, M, A, B, P, J, JM, I, CF, SR, OI, TA, MA, ZC, FG, RM, CY | 21:01~23:30, 09:01~10:15, 10:31~11:30, 13:31~15:00 |
time_period2 = [
TimeRange(start=time(21, 1), end=time(23, 30)),
TimeRange(start=time(9, 1), end=time(10, 15)),
TimeRange(start=time(10, 31), end=time(11, 30)),
TimeRange(start=time(13, 31), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update({
underlying_symbol: time_period2
for underlying_symbol in ["Y", "M", "A", "B", "P", "J", "JM", "I", "CF", "SR", "OI", "TA", "MA", "ZC", "FG", "RM", "CY"]
})
# | 商品期货 CU, AL, ZN, PB, SN, NI | 21:01~1:00, 09:01~10:15, 10:31~11:30, 13:31~15:00 |
time_period3 = [
TimeRange(start=time(21, 1), end=time(23, 59)),
TimeRange(start=time(0, 0), end=time(1, 0)),
TimeRange(start=time(9, 1), end=time(10, 15)),
TimeRange(start=time(10, 31), end=time(11, 30)),
TimeRange(start=time(13, 31), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update(
{underlying_symbol: time_period3
for underlying_symbol in ["CU", "AL", "ZN", "PB", "SN", "NI"]})
# | 商品期货 RB, HC, BU, RU | 21:01~23:00, 09:01~10:15, 10:31~11:30, 13:31~15:00 |
time_period4 = [
TimeRange(start=time(21, 1), end=time(23, 0)),
TimeRange(start=time(9, 1), end=time(10, 15)),
TimeRange(start=time(10, 31), end=time(11, 30)),
TimeRange(start=time(13, 31), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update({underlying_symbol: time_period4 for underlying_symbol in ["RB", "HC", "BU", "RU"]})
# | 商品期货 AU, AG | 21:01~2:30, 09:01~10:15, 10:31~11:30, 13:31~15:00 |
time_period5 = [
TimeRange(start=time(21, 1), end=time(23, 59)),
TimeRange(start=time(0, 0), end=time(2, 30)),
TimeRange(start=time(9, 1), end=time(10, 15)),
TimeRange(start=time(10, 31), end=time(11, 30)),
TimeRange(start=time(13, 31), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update({underlying_symbol: time_period5 for underlying_symbol in ["AU", "AG", "SC"]})
# | 股指期货 product='Index' | 09:31~11:30, 13:01~15:00 |
time_period6 = [
TimeRange(start=time(9, 31), end=time(11, 30)),
TimeRange(start=time(13, 1), end=time(15, 0)),
]
TRADING_PERIOD_DICT.update({underlying_symbol: time_period6 for underlying_symbol in ["IF", "IH", "IC"]})
# | 国债期货 product='Government' | 09:16~11:30, 13:01~15:15|
time_period7 = [
TimeRange(start=time(9, 16), end=time(11, 30)),
TimeRange(start=time(13, 1), end=time(15, 15)),
]
TRADING_PERIOD_DICT.update({underlying_symbol: time_period7 for underlying_symbol in ["T", "TF", "TS"]})
|
from django.apps import AppConfig
from django.db import connections as djcs
from django.core.exceptions import ImproperlyConfigured
class ExplorerAppConfig(AppConfig):
name = 'explorer'
def ready(self):
from explorer.schema import build_async_schemas
_validate_connections()
build_async_schemas()
def _get_default():
from explorer.app_settings import EXPLORER_DEFAULT_CONNECTION
return EXPLORER_DEFAULT_CONNECTION
def _get_explorer_connections():
from explorer.app_settings import EXPLORER_CONNECTIONS
return EXPLORER_CONNECTIONS
def _validate_connections():
# Validate connections
if _get_default() not in _get_explorer_connections().values():
raise ImproperlyConfigured(
'EXPLORER_DEFAULT_CONNECTION is %s, but that alias is not present in the values of EXPLORER_CONNECTIONS'
% _get_default())
for name, conn_name in _get_explorer_connections().items():
if conn_name not in djcs:
raise ImproperlyConfigured(
'EXPLORER_CONNECTIONS contains (%s, %s), but %s is not a valid Django DB connection.'
% (name, conn_name, conn_name))
|
# #beautifulsoup does not work with dynamically created sites
# import requests
# from bs4 import BeautifulSoup
# # The url to scrape added in tasks.py
# # URL = "https://www.opendatani.gov.uk/dataset?q=defib|AED|defibrilator"
#
#
# def scrape(url):
# # set the page and use the requests page on the url
# page = requests.get(url)
# # create a BeatifulSoup object looking and load the page content and parse
# soup = BeautifulSoup(page.content, "html.parser")
# # id="", _class=""
# results = soup.find(_class="dataset-resources unstyled")
# print(results)
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
def scrape(url):
options = webdriver.ChromeOptions()
options.add_argument(" - incognito")
browser = webdriver.Chrome(executable_path='./chromedriver', chrome_options=options)
browser.get(url)
timeout = 10
try:
WebDriverWait(browser, timeout).until(
EC.visibility_of_element_located(
(By.XPATH, "//ul[@class='dataset-resources unstyled']")
)
)
except TimeoutException:
print("Timed out")
browser.quit()
data_elements = browser.find_element_by_xpath("//ul[@class='dataset-resources unstyled']")
for data_element in data_elements:
result = data_element.find_element_by_xpath(".//ul[@class='dataset-resources unstyled']")
defib_location = result.get_attribute('ul')
print(defib_location)
|
import dateparser
from gazette.items import Gazette
from gazette.spiders.base import BaseGazetteSpider
class MgContagemSpider(BaseGazetteSpider):
TERRITORY_ID = "3118601"
name = "mg_contagem"
allowed_domains = ["contagem.mg.gov.br"]
start_urls = ["http://www.contagem.mg.gov.br/?se=doc"]
def parse(self, response):
"""
@url http://www.contagem.mg.gov.br/?se=doc&pagina=2
@returns items 15 15
@scrapes date file_urls is_extra_edition power
"""
anchor_elements = response.css(".texto11pt a")
urls = [
response.urljoin(url)
for url in anchor_elements.css("::attr(href)").re(".+pdf")
]
extra_editions = ["complementar" in url for url in urls]
dates_in_sentence = anchor_elements.css("p span:last-child ::text").re(
"(\d{1,2}\s+de\s+\w+\s+de\s+\d{4})"
)
dates = [
dateparser.parse(date, languages=["pt"]).date()
for date in dates_in_sentence
]
for url, date, is_extra_edition in zip(urls, dates, extra_editions):
yield Gazette(
date=date,
file_urls=[url],
is_extra_edition=is_extra_edition,
power="executive_legislature",
)
number_of_pages = int(
response.css("table.subtitulo12pt tr:first-child td ::text").extract()[-1]
)
for next_page in range(2, number_of_pages + 1):
next_page_url = f"{self.start_urls[0]}&pagina={next_page}"
yield response.follow(next_page_url, callback=self.parse)
|
"""Source code for distributed attentional actor architecture (DA3) model.
Author: Yoshinari Motokawa <yoshinari.moto@fuji.waseda.jp>
"""
from typing import List
import torch
from core.utils.logging import initialize_logging
from omegaconf import DictConfig
from torch import nn
from ..hard_shrink_attention import HardShrinkBlock
from ..vit import Block, PatchEmbed
logger = initialize_logging(__name__)
class DA3(nn.Module):
def __init__(self, config: DictConfig, input_shape: List[int], output_size: int):
super().__init__()
patched_size_x = input_shape[1] // config.model.patch_size
patched_size_y = input_shape[2] // config.model.patch_size
self.view_method = config.observation_area_mask
self.patch_embed = PatchEmbed(
patch_size=config.model.patch_size,
in_chans=input_shape[0],
embed_dim=config.model.embed_dim,
)
self.saliency_vector = nn.Parameter(torch.zeros(1, 1, config.model.embed_dim))
self.pos_embed = nn.Parameter(
torch.zeros(1, patched_size_x * patched_size_y + 1, config.model.embed_dim)
)
block = HardShrinkBlock if config.model.attention == "hard" else Block
self.blocks = nn.ModuleList(
[
block(
dim=config.model.embed_dim,
num_heads=config.model.num_heads,
mlp_ratio=config.model.mlp_ratio,
**{"af_lambd": config.model.af_lambd}
)
for _ in range(config.model.block_loop)
]
)
self.norm = nn.LayerNorm(config.model.embed_dim)
self.head = nn.Linear(config.model.embed_dim, output_size)
def forward(self, state):
x = self.state_encoder(state)
out = self.patch_embed(x)
saliency_vector = self.saliency_vector.expand(out.shape[0], -1, -1)
out = torch.cat((saliency_vector, out), dim=1)
out = out + self.pos_embed
for blk in self.blocks:
out = blk(out)
out = self.norm(out)
out = out[:, 0]
out = self.head(out)
return out
def forward_attn(self, state):
x = self.state_encoder(state)
out = self.patch_embed(x)
saliency_vector = self.saliency_vector.expand(out.shape[0], -1, -1)
out = torch.cat((saliency_vector, out), dim=1)
out = out + self.pos_embed
attns = list()
for blk in self.blocks:
out, attn = blk.forward_attn(out)
attns.append(attn.detach())
out = self.norm(out)
out = out[:, 0]
out = self.head(out)
return out, [attns]
def state_encoder(self, state):
return state[self.view_method]
|
import logging
import shelve
from ftplib import FTP
import requests
import requests_cache
from io import BytesIO
_cache_file_path = None
def set_cache_http(cache_file_path):
requests_cache.install_cache(cache_file_path)
def open_url(url):
return requests.get(url).text
def set_cache_ftp(cache_file_path):
global _cache_file_path
_cache_file_path = cache_file_path
def ftp_retrieve(server, path, filename):
logging.info('loading: ftp://%s/%s/%s' % (server, path, filename))
ftp = FTP(server)
ftp.login()
ftp.cwd(path)
buffer = BytesIO()
ftp.retrbinary('RETR %s' % filename, buffer.write)
return buffer
def download_ftp(server, path, filename, refresh_cache=False):
"""
TODO: drop shelve (too unstable) and use a simple filesystem implementation.
:param server:
:param path:
:param filename:
:param refresh_cache:
:return:
"""
if _cache_file_path:
with shelve.open(_cache_file_path) as url_cache:
location = '/'.join([server, path, filename])
if location not in url_cache or refresh_cache:
url_cache[location] = ftp_retrieve(server, path, filename)
try:
output = url_cache[location]
except KeyError:
del url_cache[location]
raise
except EOFError:
del url_cache[location]
raise
else:
output = ftp_retrieve(server, path, filename)
return output
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Copyright (c) Megvii, Inc. and its affiliates.
import os
import torch.nn as nn
from yolox.exp import Exp as MyExp
class Exp(MyExp):
def __init__(self):
super(Exp, self).__init__()
self.depth = 0.33
self.width = 0.25
self.scale = (0.5, 1.5)
self.random_size = (10, 20)
self.test_size = (416, 416)
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
self.enable_mixup = False
def get_model(self, sublinear=False):
def init_yolo(M):
for m in M.modules():
if isinstance(m, nn.BatchNorm2d):
m.eps = 1e-3
m.momentum = 0.03
if "model" not in self.__dict__:
from yolox.models import YOLOX, YOLOPAFPN, YOLOXHead
in_channels = [256, 512, 1024]
# NANO model use depthwise = True, which is main difference.
backbone = YOLOPAFPN(self.depth, self.width, in_channels=in_channels, depthwise=True)
head = YOLOXHead(self.num_classes, self.width, in_channels=in_channels, depthwise=True)
self.model = YOLOX(backbone, head)
self.model.apply(init_yolo)
self.model.head.initialize_biases(1e-2)
return self.model
|
import dsz
MENU_TEXT = 'Run shares commands'
def main():
dsz.ui.Echo('Running shares -list and shares -query...', dsz.GOOD)
dsz.control.echo.Off()
dsz.cmd.Run('background log shares -list', dsz.RUN_FLAG_RECORD)
dsz.cmd.Run('background log shares -query', dsz.RUN_FLAG_RECORD)
dsz.control.echo.On()
if (__name__ == '__main__'):
main()
|
from leapp.actors import Actor
from leapp.libraries.actor.library import remove_boot_files
from leapp.models import BootContent
from leapp.tags import IPUWorkflowTag, PreparationPhaseTag
class RemoveBootFiles(Actor):
"""
Remove Leapp provided initramfs from boot partition.
Since Leapp provided initramfs and kernel are already loaded into RAM at this phase, remove
them to have as little space requirements for boot partition as possible.
"""
name = 'remove_boot_files'
consumes = (BootContent,)
produces = ()
tags = (IPUWorkflowTag, PreparationPhaseTag)
def process(self):
remove_boot_files()
|
# Generated by Django 2.2.6 on 2019-10-13 23:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Core_sample',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('global_id', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('name', models.CharField(max_length=50, verbose_name='Название')),
('deposit', models.PositiveIntegerField(verbose_name='Месторождение')),
('hole', models.PositiveIntegerField(verbose_name='Скважина')),
('top', models.FloatField(verbose_name='Вверх')),
('bottom', models.FloatField(verbose_name='Низ')),
('status', models.IntegerField(choices=[(1, 'notAnalysed'), (2, 'analysed'), (3, 'inProcess'), (4, 'error')], default=1, verbose_name='Статус')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Пользователь')),
],
options={
'verbose_name': 'Керн',
'verbose_name_plural': 'Керны',
},
),
migrations.CreateModel(
name='Fragment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dl_src', models.FilePathField(verbose_name='ДС изображение')),
('uv_src', models.FilePathField(verbose_name='УФ изображение')),
('top', models.FloatField(verbose_name='Вверх')),
('bottom', models.FloatField(verbose_name='Низ')),
('cs', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core_sample.Core_sample', verbose_name='Керн')),
],
options={
'verbose_name': 'Фрагмент керна',
'verbose_name_plural': 'Фрагменты керна',
},
),
]
|
"""Defines basic light string data and functions."""
import os
import sys
import atexit
import inspect
import time
import logging
from typing import Any, Optional, Sequence, Union, overload
from nptyping import NDArray
import numpy as np
from LightBerries.LightBerryExceptions import LightStringException
from LightBerries.RpiWS281xPatch import rpi_ws281x
from LightBerries.LightPixels import Pixel, PixelColors
LOGGER = logging.getLogger("LightBerries")
class LightString(Sequence[np.int_]):
"""Defines basic LED array data and functions."""
def __init__(
self,
ledCount: Optional[int] = None,
pixelStrip: rpi_ws281x.PixelStrip = None,
simulate: bool = False,
) -> None:
"""Creates a pixel array using the rpipixelStrip library and Pixels.
Args:
ledCount: the number of LEDs desired in the LightString
pixelStrip: the ws281x object that actually controls the LED signaling
simulate: dont use GPIO
Raises:
Warning: if something unexpected could happen
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
# cant run GPIO stuff without root, tell the user if they forgot
# linux check is just for debugging with fake GPIO on windows
if sys.platform == "linux" and not os.getuid() == 0: # pylint: disable = no-member
raise LightStringException(
"GPIO functionality requires root privilege. Please run command again as root"
)
# catch error cases first
if ledCount is None and pixelStrip is None and simulate is False:
raise LightStringException(
"Cannot create LightString object without ledCount or " + "pixelStrip object being specified"
)
# catch error cases first
# if ledCount is not None and pixelStrip is not None:
# raise Warning(
# "ledCount is overridden when pixelStrip is and ledcount "
# + "are both passed to LightString constructor"
# )
try:
self.simulate = simulate
# use passed led count if it is valid
if ledCount is not None:
self._ledCount = ledCount
# used passed pixel strip if it is not none
if pixelStrip is not None:
self.pixelStrip = pixelStrip
self.pixelStrip.begin()
self._ledCount = self.pixelStrip.numPixels()
LOGGER.debug(
"%s.%s Created WS281X object",
self.__class__.__name__,
inspect.stack()[0][3],
)
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception(
"%s.%s Exception: %s",
self.__class__.__name__,
inspect.stack()[0][3],
ex,
)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
try:
# validate led count
if not isinstance(self._ledCount, int):
raise LightStringException(
f'Cannot create LightString object with LED count "{self._ledCount}"',
)
# if led count is good, create our pixel sequence
self.rgbArray: NDArray[(3, Any), np.int32] = np.zeros((self._ledCount, 3))
self.rgbArray[:] = np.array([Pixel().array for i in range(self._ledCount)])
LOGGER.debug(
"%s.%s Created Numpy Light array",
self.__class__.__name__,
inspect.stack()[0][3],
)
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception(
"%s.%s Exception: %s",
self.__class__.__name__,
inspect.stack()[0][3],
ex,
)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
# try to force cleanup of underlying c objects when user exits
atexit.register(self.__del__)
def __del__(
self,
) -> None:
"""Properly disposes of the rpipixelStrip object.
Prevents memory leaks (hopefully) that were happening in the rpi.PixelStrip module.
Raises:
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
# check if pixel strip has been created
if isinstance(self.pixelStrip, rpi_ws281x.PixelStrip):
# turn off leds
self.off()
# cleanup c memory usage
try:
self.pixelStrip._cleanup()
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception("Failed to clean up WS281X object: %s", str(ex))
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
def __len__(
self,
) -> int:
"""Return length of the light string (the number of LEDs).
Returns:
the number of LEDs in the array
"""
if self.rgbArray is not None:
return len(self.rgbArray)
else:
return 0
@overload
def __getitem__( # noqa D105
self,
idx: int,
) -> NDArray[(3,), np.int32]:
... # pylint: disable=pointless-statement
@overload
def __getitem__( # noqa D105 # pylint: disable=function-redefined
self,
s: slice,
) -> NDArray[(3, Any), np.int32]:
... # pylint: disable=pointless-statement
def __getitem__( # pylint: disable=function-redefined
self, key: Union[int, slice]
) -> Union[NDArray[(3,), np.int32], NDArray[(3, Any), np.int32]]:
"""Return a LED index or slice from LED array.
Args:
key: an index of a single LED, or a slice specifying a range of LEDs
Returns:
the LED value or values as requested
Raises:
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
try:
if isinstance(self.rgbArray, np.ndarray):
return self.rgbArray[key].array
else:
raise LightStringException("Cannot index into uninitialized LightString object")
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception('Failed to get key "%s" from %s: %s', key, self.rgbArray, ex)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
def __setitem__(
self,
key: Union[int, slice],
value: Union[NDArray[(3,), np.int32], NDArray[(3, Any), np.int32]],
) -> None:
"""Set LED value(s) in the array.
Args:
key: the index or slice specifying one or more LED indices
value: the RGB value or values to assign to the given LED indices
Raises:
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
try:
if isinstance(self.rgbArray, np.ndarray):
if isinstance(key, slice):
if isinstance(value, np.ndarray):
self.rgbArray.__setitem__(key, value)
elif isinstance(value, Sequence):
self.rgbArray.__setitem__(key, [Pixel(v).array for v in value])
else:
raise LightStringException(
"Cannot assign multiple indices of LightString using a single value"
)
else:
if isinstance(value, np.ndarray):
self.rgbArray.__setitem__(key, value)
elif isinstance(value, Pixel):
self.rgbArray.__setitem__(key, Pixel(value).array)
else:
raise LightStringException(
"Cannot assign single index of LightString using multiple values"
)
else:
raise LightStringException("Cannot index into uninitialized LightString object")
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception("Failed to set light %s to value %s: %s", key, value, ex)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
def __enter__(
self,
) -> "LightString":
"""Get an instance of this object object.
Returns:
an instance of LightString
"""
return self
def __exit__(
self,
*args,
) -> None:
"""Cleanup the instance of this object.
Args:
args: ignored
"""
self.__del__()
def off(
self,
) -> None:
"""Turn all of the LEDs in the LightString off.
Raises:
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
for index in range(len(self.rgbArray)):
try:
self[index] = PixelColors.OFF.array
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception(
"Failed to set pixel %s in WS281X to value %s: %s",
index,
LightString(0),
ex,
)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
self.refresh()
def refresh(
self,
) -> None:
"""Update the ws281x signal using the numpy array.
Raises:
SystemExit: if exiting
KeyboardInterrupt: if user quits
LightStringException: if something bad happens
"""
try:
# define callback for map method (fast iterator)
if self.simulate is False:
def SetPixel(irgb):
try:
i = irgb[0]
rgb = irgb[1]
value = (int(rgb[0]) << 16) + (int(rgb[1]) << 8) + int(rgb[2])
self.pixelStrip.setPixelColor(i, value)
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception(
"Failed to set pixel %d in WS281X to value %d: %s",
i,
value,
str(ex),
)
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
# copy this class's array into the ws281x array
if self.simulate is False:
list(
map(
SetPixel,
enumerate(self.rgbArray),
)
)
# send the signal out
self.pixelStrip.show()
except SystemExit: # pylint:disable=try-except-raise
raise
except KeyboardInterrupt: # pylint:disable=try-except-raise
raise
except Exception as ex:
LOGGER.exception('Function call "show" in WS281X object failed: %s', str(ex))
raise LightStringException(str(ex)).with_traceback(ex.__traceback__)
if __name__ == "__main__":
LOGGER.info("Running LightString")
# the number of pixels in the light string
PIXEL_COUNT = 100
# GPIO pin to use for PWM signal
GPIO_PWM_PIN = 18
# DMA channel
DMA_CHANNEL = 5
# frequency to run the PWM signal at
PWM_FREQUENCY = 800000
GAMMA = None
LED_STRIP_TYPE = None
INVERT = False
PWM_CHANNEL = 0
with LightString(
pixelStrip=rpi_ws281x.PixelStrip(
num=PIXEL_COUNT,
pin=GPIO_PWM_PIN,
dma=DMA_CHANNEL,
freq_hz=PWM_FREQUENCY,
channel=PWM_CHANNEL,
invert=INVERT,
gamma=GAMMA,
strip_type=LED_STRIP_TYPE,
),
) as liteStr:
liteStr.refresh()
p = Pixel((255, 0, 0))
liteStr[4] = PixelColors.RED
liteStr.refresh()
time.sleep(1)
|
# Generated by Django 2.0.5 on 2018-07-05 16:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aventuras', '0012_auto_20180705_1244'),
]
operations = [
migrations.AddField(
model_name='evento',
name='ourEvent',
field=models.BooleanField(default=False),
),
]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Internet.TCPClient import TCPClient
from Kamaelia.Util.Console import ConsoleEchoer, ConsoleReader
from Kamaelia.Util.OneShot import OneShot
print """
This is a simple demonstration program that shows that it is possible to
build simple clients for manually connecting to SSL based sources - such
as HTTPS sources.
This program connects the the subversion server for Kamaelia on port
443 on sourceforge - ie on kamaelia.svn.sourceforge.net. When you are
connected you are connected through an encrypted connection, which means
you could type the following and get code back from the server:
GET /svnroot/kamaelia/trunk/Code/Python/Kamaelia/Examples/SimpleGraphicalApps/Ticker/Ulysses HTTP/1.0
Host: kamaelia.svn.sourceforge.net
That's pretty much the purpose of this example program.
"""
Graphline(
MAKESSL = OneShot(" make ssl "), # The actual message here is not necessary
CONSOLE = ConsoleReader(),
ECHO = ConsoleEchoer(),
CONNECTION = TCPClient("kamaelia.svn.sourceforge.net", 443),
linkages = {
("MAKESSL", "outbox"): ("CONNECTION", "makessl"),
("CONSOLE", "outbox"): ("CONNECTION", "inbox"),
("CONSOLE", "signal"): ("CONNECTION", "control"),
("CONNECTION", "outbox"): ("ECHO", "inbox"),
("CONNECTION", "signal"): ("ECHO", "control"),
}
).run()
|
# -*- encoding: utf-8 -*-
# pylint: disable=E0203,E1101,C0111
"""
@file
@brief Runtime operator.
"""
from scipy.linalg import solve
from ._op import OpRunBinaryNum
from ._new_ops import OperatorSchema
class Solve(OpRunBinaryNum):
atts = {'lower': False,
'transposed': False}
def __init__(self, onnx_node, desc=None, **options):
OpRunBinaryNum.__init__(self, onnx_node, desc=desc,
expected_attributes=Solve.atts,
**options)
def _find_custom_operator_schema(self, op_name):
if op_name == "Solve":
return SolveSchema()
raise RuntimeError( # pragma: no cover
"Unable to find a schema for operator '{}'.".format(op_name))
def _run(self, a, b): # pylint: disable=W0221
if self.inplaces.get(1, False):
return (solve(a, b, overwrite_b=True, lower=self.lower,
transposed=self.transposed), )
return (solve(a, b, lower=self.lower, transposed=self.transposed), )
def _infer_shapes(self, a, b): # pylint: disable=W0221
"""
Returns the shapes.
"""
return (b, )
def to_python(self, inputs):
return ('from scipy.linalg import solve',
"return solve({}, {}, lower={}, transposed={})".format(
inputs[0], inputs[1], self.lower, self.transposed))
class SolveSchema(OperatorSchema):
"""
Defines a schema for operators added in this package
such as @see cl TreeEnsembleClassifierDouble.
"""
def __init__(self):
OperatorSchema.__init__(self, 'Solve')
self.attributes = Solve.atts
|
"""[Lambda Expressions]
Lambda expressions are simply another way to create functions anonymous functions
keyword \ parameter list optional
\ \ the : is required, even for zero arguments
\ \ / / this expression is evaluated and returned when the lambda function is called. (think of it as "the body" of the function)
lambda [parameter list]: expression
\
the expression returns a function object
that evaluates and returns the expression when it is called
Examples
from tkinter import Y
from unittest import FunctionTestCase
lambda x: x**2
lambda x, y: x + y
lambda : 'hello'
lambda s: s[::-1].upper()
type(lambda x: x**2) -> function
Note that these expressions are function objects, but are not "named"
-> anonymous Functions
lambdas, or anonymous functions, are NOT equivalent to closures
Assigning a Lambda to a Variable name
my_func = lambda x: x**2
type(my_func) -> fuunction
my_func(3) -> 9
my_func(4) -> 16
# identical to:
def my_func(x):
return x**2
type(my_func) -> function
my_func(3) -> 9
my_finc(4) -> 16
Passing as an argument to another function
"""
def apply_func(x, fn):
return fn(x)
# Using lambda
apply_func(3, lambda x: x**2) # 9
apply_func(2, lambda x: x + 5) # 7
apply_func('abc', lambda x: x[1:] * 3) # bcbcbc
equivalently:
def fn_
|
from os.path import join,exists,realpath,dirname,basename
from os import makedirs,listdir, system
import numpy as np, _pickle as cPickle, editdistance, seaborn as sns
import matplotlib.pyplot as plt, pandas as pd, itertools, glob, h5py
from scipy.stats import entropy
from matplotlib.font_manager import FontProperties
from IPython.display import display
from collections import defaultdict
from IPython.display import display
# from itertools import izip
from scipy.stats import ranksums
import multiprocessing as mp
from PIL import Image
import inception_score
rundir = 'cifar10/'
e = 100
def get_score(improved_keras_dir, t_n_epoch):
score = []
for i in range(t_n_epoch-9, t_n_epoch):
print(i)
# scorefile = join(improved_keras_dir, 'epoch_{}.score'.format(i))
# if not exists(scorefile):
datafile = join(improved_keras_dir, 'epoch_{}.pkl'.format(i))
if not exists(datafile):
break
with open(datafile, 'rb') as f:
sample = cPickle.load(f)
print(len(list(sample)))
t_score = inception_score.get_inception_score(list(sample), 1)[0]
# with open(scorefile, 'w') as f:
# f.write('%f\n' % t_score)l
# else:
# with open(scorefile) as f:
# t_score = float(f.readline())
score.append(t_score)
return max(score)
expt2plot = ['optimAdam_ratio1']
for expt in expt2plot:
score = get_score(join(rundir, expt), e)
print(expt, score)
|
from geco.mips.loading.miplib import *
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Vision Related User-defined Types:
- :py:class:`Image`
"""
from __future__ import annotations
from io import IOBase
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Union
from urllib.parse import urlparse
# Third-party libraries
import numpy as np
from PIL import Image as PILImage
# Rikai
from rikai.internal.uri_utils import normalize_uri
from rikai.io import copy
from rikai.mixin import Asset, Displayable, ToNumpy, ToPIL
from rikai.spark.types import ImageType
__all__ = ["Image"]
class Image(ToNumpy, ToPIL, Asset, Displayable):
"""An external Image Asset.
It contains a reference URI to an image stored on the remote system.
Parameters
----------
image : bytes, file-like object, str or :py:class:`~pathlib.Path`
It can be the content of image, or a URI / Path of an image.
"""
__UDT__ = ImageType()
def __init__(
self,
image: Union[bytes, bytearray, IOBase, str, Path],
):
data, uri = None, None
if isinstance(image, IOBase):
data = image.read()
elif isinstance(image, (bytes, bytearray)):
data = image
else:
uri = image
super().__init__(data=data, uri=uri)
@classmethod
def from_array(
cls,
array: np.ndarray,
uri: Union[str, Path],
mode: str = None,
format: str = None,
**kwargs,
) -> Image:
"""Create an image in memory from numpy array.
Parameters
----------
array : np.ndarray
Array data
uri : str or Path
The external URI to store the data.
mode : str, optional
The mode which PIL used to create image. See supported
`modes on PIL document <https://pillow.readthedocs.io/en/stable/handbook/concepts.html#concept-modes>`_.
format : str, optional
The image format to save as. See
`supported formats <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.save>`_ for details.
kwargs : dict, optional
Optional arguments to pass to `PIL.Image.save <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.save>`_.
See Also
--------
:py:class:`PIL.Image.fromarray`
:py:func:`~rikai.spark.functions.vision.numpy_to_image`
""" # noqa: E501
assert array is not None
img = PILImage.fromarray(array, mode=mode)
return cls.from_pil(img, uri, format=format, **kwargs)
@staticmethod
def from_pil(
img: PILImage, uri: Union[str, Path], format: str = None, **kwargs
) -> Image:
"""Create an image in memory from a :py:class:`PIL.Image`.
Parameters
----------
img : :py:class:`PIL.Image`
An PIL Image instance
uri : str or Path
The URI to store the image externally.
format : str, optional
The image format to save as. See
`supported formats <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.save>`_ for details.
kwargs : dict, optional
Optional arguments to pass to `PIL.Image.save <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.save>`_.
""" # noqa: E501
parsed = urlparse(normalize_uri(uri))
if parsed.scheme == "file":
img.save(uri, format=format, **kwargs)
else:
with NamedTemporaryFile() as fobj:
img.save(fobj, format=format, **kwargs)
fobj.flush()
copy(fobj.name, uri)
return Image(uri)
def display(self, **kwargs):
"""
Custom visualizer for this image in jupyter notebook
Parameters
----------
kwargs: dict
Optional display arguments
Returns
-------
img: IPython.display.Image
"""
from IPython.display import Image
with self.open() as fobj:
return Image(fobj.read(), **kwargs)
def __repr__(self) -> str:
return f"Image(uri={self.uri})"
def _repr_html_(self):
"""Default visualizer for remote ref (or local ref under cwd)"""
return self.display()._repr_html_()
def _repr_mimebundle_(self, include=None, exclude=None):
"""default visualizer for embedded mime bundle"""
return self.display()._repr_mimebundle_(
include=include, exclude=exclude
)
def _repr_jpeg_(self):
"""default visualizer for embedded jpeg"""
return self.display()._repr_jpeg_()
def _repr_png_(self):
"""default visualizer for embedded png"""
return self.display()._repr_png_()
def __eq__(self, other) -> bool:
return isinstance(other, Image) and super().__eq__(other)
def to_pil(self) -> PILImage:
"""Return an PIL image.
Note
----
The caller should close the image.
https://pillow.readthedocs.io/en/stable/reference/open_files.html#image-lifecycle
"""
return PILImage.open(self.open())
def to_numpy(self) -> np.ndarray:
"""Convert this image into an :py:class:`numpy.ndarray`."""
with self.to_pil() as pil_img:
return np.asarray(pil_img)
|
#------------------------------------------------------------------------------
# Copyright (c) 2008 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Model view menus, menu items and toolbars. """
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from os.path import join, dirname
from enthought.pyface.api import ImageResource
from enthought.traits.ui.menu import MenuBar, ToolBar, Menu, Action
#------------------------------------------------------------------------------
# File actions:
#------------------------------------------------------------------------------
new_action = Action(name="&New", accelerator="Ctrl+N", action="new_model",
image=ImageResource("new"), tooltip="New (Ctrl+N)")
open_action = Action(name="&Open", accelerator="Ctrl+O", action="open_file",
image=ImageResource("open"), tooltip="Open (Ctrl+O)")
save_action = Action(name="&Save", accelerator="Ctrl+S",
action="save", image=ImageResource("save"), tooltip="Save (Ctrl+S)")
save_as_action = Action(name="Save &As", accelerator="Ctrl+Shift+S",
action="save_as", image=ImageResource("save"),
tooltip="Save As (Ctrl+Shift+S)")
# Action to revert all changes.
revert_action = Action(name="Revert", action="_on_revert",
defined_when="ui.history is not None", enabled_when="ui.history.can_undo")
# Action to close the view window.
close_action = Action(name="E&xit", accelerator="Alt+X", action="on_exit",
image=ImageResource("exit"), tooltip="Exit (Alt+X)")
#------------------------------------------------------------------------------
# Edit actions:
#------------------------------------------------------------------------------
# Action to undo last change.
undo_action = Action(name="Undo", action="_on_undo", accelerator="Ctrl+Z",
defined_when="ui.history is not None", enabled_when="ui.history.can_undo",
# image=ImageResource("undo"),
tooltip="Undo (Ctrl+Z)")
# Action to redo last undo.
redo_action = Action(name="Redo", action="_on_redo", accelerator="Ctrl+Y",
defined_when="ui.history is not None", enabled_when="ui.history.can_redo",
# image=ImageResource("redo.png"),
tooltip="Redo (Ctrl+Y)")
options_action = Action(name="Prefere&nces", action="godot_options")
#------------------------------------------------------------------------------
# View actions:
#------------------------------------------------------------------------------
tree_view_action = Action(
name="Tree", accelerator="Ctrl+T", action="toggle_tree",
tooltip="Tree view (Ctrl+T)", #image=ImageResource("tree"),
style="toggle", checked=True
)
configure_graph_action = Action(name="&Graph Attributes",
accelerator="Ctrl+G",
action="configure_graph", image=ImageResource("graph"),
tooltip="Graph Attributes (Ctrl+G)")
configure_nodes_action = Action(name="&Node Table",
accelerator="Ctrl+Shift+N",
action="configure_nodes", image=ImageResource("node"),
tooltip="Nodes (Ctrl+Shift+N)")
configure_edges_action = Action(name="&Edge Table",
accelerator="Ctrl+Shift+E",
action="configure_edges", image=ImageResource("edge"),
tooltip="Edges (Ctrl+Shift+E)")
configure_dot_code_action = Action(name="&Dot Editor", accelerator="Ctrl+D",
action="configure_dot_code", image=ImageResource("graph"),
tooltip="Dot Editor (Ctrl+D)")
#------------------------------------------------------------------------------
# Graph actions:
#------------------------------------------------------------------------------
node_action = Action(name="&Node", accelerator="Alt+N", action="add_node",
image=ImageResource("node"), tooltip="Node (Alt+N)")
edge_action = Action(name="&Edge", accelerator="Alt+E", action="add_edge",
image=ImageResource("edge"), tooltip="Edge (Alt+E)")
subgraph_action = Action(name="&Subgraph", accelerator="Alt+S",
action="add_subgraph", image=ImageResource("subgraph"),
tooltip="Subgraph (Alt+S)")
cluster_action = Action(name="&Cluster", accelerator="Alt+C",
action="add_cluster", image=ImageResource("cluster"),
tooltip="Cluster (Alt+C)")
#------------------------------------------------------------------------------
# Help actions:
#------------------------------------------------------------------------------
# Action to show help for the graph.
help_action = Action(name="Help", action="show_help",
image=ImageResource("help.png"), tooltip="Help")
about_action = Action(name="About Godot", action="about_godot",
image=ImageResource("about"), tooltip="About Godot")
#------------------------------------------------------------------------------
# Menus:
#------------------------------------------------------------------------------
file_menu = Menu(
"|", # Hack suggested by Brennan Williams to achieve correct ordering
new_action, open_action, "_",
save_action, save_as_action, revert_action, "_",
close_action, name="&File"
)
edit_menu = Menu("|", undo_action, redo_action, "_", options_action,
name="&Edit")
view_menu = Menu("|", tree_view_action, "_", configure_graph_action,
configure_nodes_action, configure_edges_action, configure_dot_code_action,
name="&View")
graph_menu = Menu("|", node_action, edge_action, subgraph_action,
cluster_action, name="&Graph")
help_menu = Menu("|", #help_action, "_",
about_action, name="&Help")
menubar = MenuBar(file_menu, edit_menu, view_menu, graph_menu, help_menu)
#------------------------------------------------------------------------------
# Godot "ToolBar" instance:
#------------------------------------------------------------------------------
toolbar = ToolBar(
"|", #close_action, "_",
new_action, open_action, save_action, save_as_action, "_",
undo_action, redo_action, "_",
node_action, edge_action,
configure_graph_action,
configure_nodes_action,
configure_edges_action,
show_tool_names=False, #show_divider=False
)
# EOF -------------------------------------------------------------------------
|
# Copyright 2022 The BladeDISC Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import unittest
import torch
from torch.nn import functional as F
from torch_blade import tensorrt
from torch_blade import utils
from torch_blade import tools
from torch_blade import Config
from torch_blade.logging import logger
from torch_blade.testing.common_utils import Feedforward, TestCase
from tests.tensorrt import skipIfNoTensorRT
from torch_blade.onnx_backends.backend_testbed import OnnxBackendChecker
@skipIfNoTensorRT()
class TestTensorRTSupportInfo(TestCase):
def test_support_info(self):
input = torch.ones([10, 10]).cuda()
net = Feedforward(10, 10)
net.eval().cuda()
module = torch.jit.trace(net, input)
module = tools.freeze_module(module._c, disableShapePeephole=False)
graph = module.forward.graph
unsupported = tensorrt.get_unsupported_nodes(graph)
self.assertEqual(len(unsupported), 0)
def test_empty_onnx_export(self):
class Model(torch.nn.Module):
def __init__(self):
super(Model, self).__init__()
self.linear = torch.nn.Linear(3, 4)
self.dropout = torch.nn.Dropout(p=0.8)
def forward(self, x):
x = self.linear(x)
x = self.dropout(x)
return x.contiguous().detach()
model = Model().cuda().eval()
module = torch.jit.trace(model, torch.ones([2, 3]).cuda())
module = tools.freeze_module(module._c, disableShapePeephole=False)
graph = module.forward.graph
unsupported = tensorrt.get_unsupported_nodes(graph)
self.assertEqual(len(unsupported), 0)
def test_inplace_safety(self):
class BasicBlock(torch.nn.Module):
def __init__(self):
super(BasicBlock, self).__init__()
self.conv1 = torch.nn.Conv2d(3, 10, kernel_size=3, padding=1)
self.conv2 = torch.nn.Conv2d(10, 3, kernel_size=3, padding=1)
self.conv3 = torch.nn.Conv2d(3, 3, kernel_size=3, padding=1)
self.bnorm = torch.nn.BatchNorm2d(3)
def forward_inplace(self, x):
out = self.conv1(x)
# this inplace bias is supported
out += 1
# this inplace relu_ is supported
out = F.relu_(out)
out = self.conv2(out)
# this inplace relu_ is supported
out = F.relu_(out)
shortcut = out
# this inplace add_ is supported
out += shortcut
shortcut = out
out = self.conv3(out)
out = self.bnorm(out)
# this inplace add_ is supported
out += shortcut
out1 = out[:, :1, :, :]
out2 = out[:, 1:, :, :]
out1 = F.relu_(out1)
out2 = F.relu_(out2)
out[:, :1, :, :] = out1
out[:, 1:, :, :] = out2
return out
def forward_no_inplace(self, x):
out = self.conv1(x)
out = out + 1
out = F.relu(out)
out = self.conv2(out)
out = F.relu(out)
shortcut = out
out = out + shortcut
shortcut = out
out = self.conv3(out)
out = self.bnorm(out)
out = out + shortcut
out = F.relu(out)
return out
class Model(torch.nn.Module):
def __init__(self):
super(Model, self).__init__()
self.block1 = BasicBlock()
self.block2 = BasicBlock()
def forward(self, x):
out1 = self.block1.forward_inplace(x)
out1 = self.block2.forward_inplace(out1)
out2 = self.block1.forward_no_inplace(x)
out2 = self.block2.forward_no_inplace(out2)
return out1, out2
model = Model()
model.eval()
model.cuda()
batch = torch.ones([1, 3, 224, 224])
batch = batch.cuda()
out1, out2 = model(batch)
self.assertEqual(out1, out2)
traced_model = torch.jit.trace(model, batch)
frozen_module = tools.freeze_module(traced_model._c, disableShapePeephole=False)
graph = frozen_module.forward.graph
ops_counter = utils.list_ops_count(graph)
unspt_counter = collections.Counter()
unsupported = tensorrt.get_unsupported_nodes(graph)
for node in unsupported:
unspt_counter[node.kind()] += 1
self.assertEqual(ops_counter["aten::slice"], unspt_counter["aten::slice"])
self.assertEqual(ops_counter["aten::view"], unspt_counter["aten::view"])
self.assertEqual(ops_counter["aten::copy_"], unspt_counter["aten::copy_"])
self.assertEqual(ops_counter["aten::expand"], unspt_counter["aten::expand"])
self.assertEqual(unspt_counter["aten::relu_"], 4)
logger.info(ops_counter)
logger.info(unspt_counter)
self.assertEqual(unspt_counter["aten::add_"], 0)
def test_inplace_safety_another(self):
def op(x):
return x + 1
def op_(x):
x -= 1
return x
def _count_unsupported(unspt):
unspt_counter = collections.Counter()
for node in unspt:
unspt_counter[node.kind()] += 1
return unspt_counter
def _count_graph(graph):
unsupported = tensorrt.get_unsupported_nodes(graph, ignore_device=True)
return _count_unsupported(unsupported)
def _count_model(model):
model.eval().cuda()
input = torch.zeros([4]).cuda()
output = model(input)
traced_module = torch.jit.trace(model, (input,))
graph = traced_module.graph
return _count_graph(graph)
class Model1(torch.nn.Module):
"""
Within this model, torch.jit.trace will produce graph like:
%2 : Float = aten::add(%1, some_constant)
%3 : Float = aten::sub_(%2, some_constant)
%4 : Float = aten::add(%3, some_constant)
The input of the third node is %3 instead of %2 which is not consistent with the definition of the
corresponding nn.Module. So the inplace node aten::sub_ is the last consumer of its inputs which make it
inplace-safe, and therefore all the nodes in this graph is inplace-safe.
The same phenomenon occurs in model2. So we manually add two graphs that have 'correct' topology structures
with corresponding nn.Module (i.e. Model1 and Model2) and use them as UTs.
"""
def forward(self, x):
x1 = op(x)
x2 = op_(x1)
x3 = op(x1)
return x3
class Model2(torch.nn.Module):
def forward(self, x):
x1 = op(x)
x2 = op_(x1) # support
x3 = op_(x2) # support
x4 = op(x3)
x5 = op_(x3) # not support
x6 = op_(x5) # not support
x7 = op(x3)
return x7
unspt_counter = _count_model(Model1())
self.assertEqual(unspt_counter["aten::sub_"], 0)
unspt_counter = _count_model(Model2())
self.assertEqual(unspt_counter["aten::sub_"], 0)
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph1 = torch.parse_ir(
"""
graph( %x.1 : Float(4)):
%1 : int = prim::Constant[value=1]()
%2 : Float(4) = aten::add(%x.1, %1, %1)
%3 : int = prim::Constant[value=1]()
%4 : Float(4) = aten::sub_(%2, %3, %3)
%5 : int = prim::Constant[value=1]()
%6 : Float(4) = aten::add(%2, %5, %5)
return (%6)
"""
)
graph2 = torch.parse_ir(
"""
graph( %x.1 : Float(4)):
%1 : int = prim::Constant[value=1]()
%2 : Float(4) = aten::add(%x.1, %1, %1)
%3 : int = prim::Constant[value=1]()
%4 : Float(4) = aten::sub_(%2, %3, %3)
%5 : int = prim::Constant[value=1]()
%6 : Float(4) = aten::sub_(%4, %5, %5)
%7 : int = prim::Constant[value=1]()
%8 : Float(4) = aten::add(%6, %7, %7)
%9 : int = prim::Constant[value=1]()
%10 : Float(4) = aten::sub_(%6, %9, %9)
%11 : int = prim::Constant[value=1]()
%12 : Float(4) = aten::sub_(%10, %11, %11)
%13 : int = prim::Constant[value=1]()
%14 : Float(4) = aten::add(%6, %13, %13)
return (%14)
"""
)
else:
graph1 = torch.parse_ir(
"""
graph( %x.1 : Float(4:1)):
%1 : int = prim::Constant[value=1]()
%2 : Float(4:1) = aten::add(%x.1, %1, %1)
%3 : int = prim::Constant[value=1]()
%4 : Float(4:1) = aten::sub_(%2, %3, %3)
%5 : int = prim::Constant[value=1]()
%6 : Float(4:1) = aten::add(%2, %5, %5)
return (%6)
"""
)
graph2 = torch.parse_ir(
"""
graph( %x.1 : Float(4:1)):
%1 : int = prim::Constant[value=1]()
%2 : Float(4:1) = aten::add(%x.1, %1, %1)
%3 : int = prim::Constant[value=1]()
%4 : Float(4:1) = aten::sub_(%2, %3, %3)
%5 : int = prim::Constant[value=1]()
%6 : Float(4:1) = aten::sub_(%4, %5, %5)
%7 : int = prim::Constant[value=1]()
%8 : Float(4:1) = aten::add(%6, %7, %7)
%9 : int = prim::Constant[value=1]()
%10 : Float(4:1) = aten::sub_(%6, %9, %9)
%11 : int = prim::Constant[value=1]()
%12 : Float(4:1) = aten::sub_(%10, %11, %11)
%13 : int = prim::Constant[value=1]()
%14 : Float(4:1) = aten::add(%6, %13, %13)
return (%14)
"""
)
unspt_counter = _count_graph(graph1)
self.assertEqual(unspt_counter["aten::sub_"], 1)
unspt_counter = _count_graph(graph2)
self.assertEqual(unspt_counter["aten::sub_"], 2)
def test_graph_input_inplace_safe(self):
class Model(torch.nn.Module):
def forward(self, x):
return F.relu_(x)
batch = torch.Tensor([1, -1, 1, -1])
batch = batch.cuda()
model = Model().eval().cuda()
traced_model = torch.jit.trace(model, batch)
self.assertEqual(batch, torch.Tensor([1, 0, 1, 0]))
frozen_module = torch._C._freeze_module(traced_model._c)
graph = frozen_module.forward.graph
unspt_counter = collections.Counter()
unsupported = tensorrt.get_unsupported_nodes(graph)
for node in unsupported:
unspt_counter[node.kind()] += 1
self.assertEqual(unspt_counter["aten::relu_"], 1)
def test_view_kinds_0(self):
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1, 1, 1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1, 1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1) = aten::select(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1) = aten::add(%6, %7, %8)
return (%9)
"""
)
else:
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1:1, 1:1, 1:1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1:1, 1:1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1:1) = aten::select(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1:1) = aten::add(%6, %7, %8)
return (%9)
"""
)
unsupported = tensorrt.get_unsupported_nodes(graph, True)
self.assertEqual(len(unsupported), 0)
def test_view_kinds_1(self):
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1, 1, 1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1, 1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1) = aten::select(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1) = aten::add_(%6, %7, %8)
return (%9)
"""
)
else:
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1:1, 1:1, 1:1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1:1, 1:1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1:1) = aten::select(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1:1) = aten::add_(%6, %7, %8)
return (%9)
"""
)
unsupported = tensorrt.get_unsupported_nodes(graph, True)
self.assertEqual(len(unsupported), 3)
def test_view_kinds_2(self):
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1, 1, 1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1, 1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1, 1) = aten::add_(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1) = aten::select(%3, %7, %8)
return (%9)
"""
)
else:
graph = torch.parse_ir(
"""
graph( %x.1 : Float(1:1, 1:1, 1:1)):
%1 : int = prim::Constant[value=0]()
%2 : int = prim::Constant[value=1]()
%3 : Float(1:1, 1:1) = aten::select(%x.1, %1, %2)
%4 : int = prim::Constant[value=0]()
%5 : int = prim::Constant[value=1]()
%6 : Float(1:1, 1:1) = aten::add_(%3, %4, %5)
%7 : int = prim::Constant[value=1]()
%8 : int = prim::Constant[value=1]()
%9 : Float(1:1) = aten::select(%3, %7, %8)
return (%9)
"""
)
unsupported = tensorrt.get_unsupported_nodes(graph, True)
self.assertEqual(len(unsupported), 3)
# NOTE: this unsupported set length should be 3 (two aten::select and one aten::add_)
# However, due to a flaw of the inplace safety check algorithm, aten::add_ is excluded
# in the set.
# todo: fix this error.
# graph = torch.parse_ir(
# '''
# graph( %x.1 : Float(1:1, 1:1, 1:1)):
# %1 : int = prim::Constant[value=0]()
# %2 : int = prim::Constant[value=1]()
# %3 : Float(1:1, 1:1, 1:1) = aten::add(%x.1, %1, %2)
# %4 : int = prim::Constant[value=0]()
# %5 : int = prim::Constant[value=1]()
# %6 : Float(1:1, 1:1) = aten::select(%3, %4, %5)
# %7 : Float(1:1, 1:1) = aten::add_(%3, %4, %5)
# %8 : int = prim::Constant[value=1]()
# %9 : int = prim::Constant[value=1]()
# %10 : Float(1:1) = aten::select(%6, %8, %9)
# return (%9)
# '''
# )
# unsupported = tensorrt.get_unsupported_nodes(graph, True)
# self.assertEqual(len(unsupported), 2)
@skipIfNoTensorRT()
class TestManRules(TestCase):
def _make_check(self, graph, target):
checker = OnnxBackendChecker(graph, tensorrt.is_onnx2trt_supported, "TensorRT")
is_supported = checker()
self.assertEqual(is_supported, target)
def test_aten_mul(self):
graph = torch.parse_ir(
"""
graph(%0 : int[]):
%1 : int = prim::Constant[value=1]()
%3 : int = aten::mul(%0, %1)
return (%3)
"""
)
self._make_check(graph, False)
def test_aten_add(self):
graph = torch.parse_ir(
"""
graph(%0 : int[], %1 : int[]):
%2 : int[] = aten::add(%0, %1)
return (%2)
"""
)
self._make_check(graph, False)
def test_aten_eq(self):
graph = torch.parse_ir(
"""
graph(%0 : int[]):
%1 : int = prim::Constant[value=1]()
%2 : int[] = prim::ListConstruct(%1)
%3 : bool = aten::eq(%0, %2)
return (%3)
"""
)
self._make_check(graph, False)
def test_const_fold_before_export(self):
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph = torch.parse_ir(
"""
graph(%input0.2 : Float(1, 512, 18, 18, requires_grad=0, device=cuda:0)):
%1 : None = prim::Constant() # :0:0
%2 : bool = prim::Constant[value=1]()
%3 : float[] = prim::Constant[value=[2., 2.]]()
%x1.3 : Float(1, 512, 36, 36, requires_grad=0, device=cuda:0) = aten::upsample_bilinear2d(%input0.2, %1, %2, %3)
return (%x1.3)
"""
)
else:
graph = torch.parse_ir(
"""
graph(%input0.2 : Float(1:165888, 512:324, 18:18, 18:1, requires_grad=0, device=cuda:0)):
%1 : None = prim::Constant() # :0:0
%2 : bool = prim::Constant[value=1]()
%3 : float[] = prim::Constant[value=[2., 2.]]()
%x1.3 : Float(1:663552, 512:1296, 36:36, 36:1, requires_grad=0, device=cuda:0) = aten::upsample_bilinear2d(%input0.2, %1, %2, %3)
return (%x1.3)
"""
)
cfg = Config.get_current_context_or_new().clone()
cfg.customize_onnx_opset_version = 11
with cfg:
self._make_check(graph, True)
def test_scalar_input_on_graph(self):
if utils.torch_version_number() >= utils.parse_version("1.8.1"):
graph = torch.parse_ir(
"""
graph(%x.3 : Float(1, 64, 1, 1, requires_grad=0, device=cuda:0),
%1 : int):
%2 : int = prim::Constant[value=-1]()
%3 : int[] = prim::ListConstruct(%1, %2)
%input.14 : Float(1, 64, requires_grad=0, device=cuda:0) = aten::view(%x.3, %3)
return (%input.14)
"""
)
else:
graph = torch.parse_ir(
"""
graph(%x.3 : Float(1:64, 64:1, 1:1, 1:1, requires_grad=0, device=cuda:0),
%1 : int):
%2 : int = prim::Constant[value=-1]()
%3 : int[] = prim::ListConstruct(%1, %2)
%input.14 : Float(1:64, 64:1, requires_grad=0, device=cuda:0) = aten::view(%x.3, %3)
return (%input.14)
"""
)
self._make_check(graph, True)
if __name__ == "__main__":
unittest.main()
|
"""Collectors to crawl free IP proxies from the internet
"""
|
#dealing with unexpected results
#great for writing complex programs
try:
print (a) #throw an exception
except:
print("a is not defined")
#a is not defined, instead of crashing program,
#we can ask it to tell us what the problem is
try:
print(a)
except NameError: #if this is the error...
print("a still isn't defined")
except: #if not...
print("Something else is wrong")
print(a) #this will not work and will
#BREAK the program
|
# Copyright 2020 The PEGASUS Authors..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Summarization params of baseline models for downstream datasets."""
import functools
from pegasus.data import parsers
from pegasus.eval import estimator_metrics
from pegasus.eval import text_eval
from pegasus.models import transformer
from pegasus.ops import public_parsing_ops
from pegasus.params import pegasus_params
from pegasus.params import registry
from tensorflow.contrib import training as contrib_training
def transformer_params(patterns, param_overrides):
"""Params for TransformerEncoderDecoderMLModel.
Args:
patterns: a dict include train_pattern, dev_pattern, test_pattern
param_overrides: a string, comma separated list of name=value
Returns:
A instance of HParams
"""
hparams = contrib_training.HParams(
train_pattern=patterns["train_pattern"],
dev_pattern=patterns["dev_pattern"],
test_pattern=patterns["test_pattern"],
vocab_filename="pegasus/ops/testdata/sp_test.model",
encoder_type="sentencepiece_newline",
length_bucket_size=0,
add_task_id=False,
batch_size=patterns["batch_size"],
max_input_len=patterns["max_input_len"],
max_target_len=patterns["max_output_len"],
max_decode_len=patterns["max_output_len"],
hidden_size=768,
filter_size=3072,
num_heads=12,
num_encoder_layers=12,
num_decoder_layers=12,
beam_size=1,
beam_start=5,
beam_alpha=0.6,
beam_min=0,
beam_max=-1,
temperature=0.0,
top_k=0,
top_p=0.0,
optimizer_name="adafactor",
train_steps=patterns["train_steps"],
learning_rate=patterns["learning_rate"],
label_smoothing=0.0,
dropout=0.1,
eval_max_predictions=patterns.get("eval_steps", 1000),
use_bfloat16=False,
model=None,
parser=None,
encoder=None,
estimator_prediction_fn=None,
eval=None,
estimator_eval_metrics_fn=estimator_metrics.gen_eval_metrics_fn,
)
if param_overrides:
hparams.parse(param_overrides)
hparams.parser = functools.partial(
parsers.supervised_strings_parser,
hparams.vocab_filename,
hparams.encoder_type,
hparams.max_input_len,
hparams.max_target_len,
length_bucket_size=hparams.length_bucket_size,
length_bucket_start_id=pegasus_params.LENGTH_BUCKET_START_ID,
length_bucket_max_id=pegasus_params.TASK_START_ID - 1,
add_task_id=hparams.add_task_id,
task_start_id=pegasus_params.TASK_START_ID)
hparams.encoder = public_parsing_ops.create_text_encoder(
hparams.encoder_type, hparams.vocab_filename)
hparams.model = functools.partial(
transformer.TransformerEncoderDecoderModel, hparams.encoder.vocab_size,
hparams.hidden_size, hparams.filter_size, hparams.num_heads,
hparams.num_encoder_layers, hparams.num_decoder_layers,
hparams.label_smoothing, hparams.dropout)
beam_keys = ("beam_start", "beam_alpha", "beam_min", "beam_max",
"temperature", "top_k", "top_p")
beam_kwargs = {k: hparams.get(k) for k in beam_keys if k in hparams.values()}
def decode_fn(features):
return hparams.model().predict(features, hparams.max_decode_len,
hparams.beam_size, **beam_kwargs)
hparams.estimator_prediction_fn = decode_fn
hparams.eval = functools.partial(
text_eval.text_eval,
hparams.encoder,
num_reserved=pegasus_params.NUM_RESERVED_TOKENS)
return hparams
@registry.register("MSR")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:MSR-train",
"dev_pattern": "tfds:MSR-train",
"test_pattern": "tfds:MSR-train",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 1000000,
"learning_rate": 0.01,
"batch_size": 128,
}, param_overrides)
@registry.register("PN_Summary")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:PN_Summary-train",
"dev_pattern": "tfds:PN_Summary-validation",
"test_pattern": "tfds:PN_Summary-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 50000,
"learning_rate": 5e-4,
"batch_size": 128,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("tebyan")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:Tebyan-train",
"dev_pattern": "tfds:Tebyan-validation",
"test_pattern": "tfds:Tebyan-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 50000,
"learning_rate": 5e-4,
"batch_size": 128,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("perkey_summary")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:perkey_summary-train",
"dev_pattern": "tfds:perkey_summary-validation",
"test_pattern": "tfds:perkey_summary-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 50000,
"learning_rate": 5e-4,
"batch_size": 128,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("perkey_title")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:perkey_title-train",
"dev_pattern": "tfds:perkey_title-validation",
"test_pattern": "tfds:perkey_title-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 50000,
"learning_rate": 5e-4,
"batch_size": 128,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("parsi_nlu_entailment")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:ParsiNLU_Entailment-train",
"dev_pattern": "tfds:ParsiNLU_Entailment-validation",
"test_pattern": "tfds:ParsiNLU_Entailment-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 48,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("parsi_nlu_mch")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:ParsiNLU_MCH-train",
"dev_pattern": "tfds:ParsiNLU_MCH-validation",
"test_pattern": "tfds:ParsiNLU_MCH-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 48,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("parsi_nlu_qqp")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:ParsiNLU_QQP-train",
"dev_pattern": "tfds:ParsiNLU_QQP-validation",
"test_pattern": "tfds:ParsiNLU_QQP-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 48,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("parsi_nlu_sentence_sentiment_movie")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:ParsiNLU_sentence_sentiment_Movie-train",
"dev_pattern": "tfds:ParsiNLU_sentence_sentiment_Movie-validation",
"test_pattern": "tfds:ParsiNLU_sentence_sentiment_Movie-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 48,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("parsi_nlu_sentence_sentiment_food")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:ParsiNLU_sentence_sentiment_Food-train",
"dev_pattern": "tfds:ParsiNLU_sentence_sentiment_Food-validation",
"test_pattern": "tfds:ParsiNLU_sentence_sentiment_Food-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 48,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("wiki_summary_v1")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:wiki_summary_v1-train",
"dev_pattern": "tfds:wiki_summary_v1-validation",
"test_pattern": "tfds:wiki_summary_v1-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 50000,
"learning_rate": 5e-4,
"batch_size": 64,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
@registry.register("voa_headlines")
def billsum_transformer(param_overrides):
return transformer_params(
{
"train_pattern": "tfds:VOA_headlines-train",
"dev_pattern": "tfds:VOA_headlines-validation",
"test_pattern": "tfds:VOA_headlines-test",
"max_input_len": 512,
"max_output_len": 256,
"train_steps": 20000,
"learning_rate": 5e-4,
"batch_size": 64,
"label_smoothing" : 0.1,
"beam_alpha" : 0.8
}, param_overrides)
|
from matplotlib import pyplot as plt
import figlatex
import afterpulse_tile21
import textbox
import colormap
vov = 5.5
################
ap21 = afterpulse_tile21.AfterPulseTile21(vov)
fig = plt.figure(num='figlaserpos-0', clear=True, figsize=[4.5, 3])
ap21.sim.hist('mainpos-offset', 'mainnpe==1', fig=fig, selection=False)
ax, = fig.get_axes()
textbox.textbox(ax, f'{vov} VoV', fontsize='medium', loc='lower center')
ax.set_xlabel('Laser peak position [ns]')
figs = [fig]
fig = plt.figure(num='figlaserpos-1', clear=True, figsize=[4.5, 3])
ap21.sim.hist2d('mainpos-offset', 'mainampl', '(mainnpe==1)&(length==128)', fig=fig, cmap=colormap.uniform(), selection=False)
ax, _ = fig.get_axes()
textbox.textbox(ax, f'{vov} VoV', fontsize='medium', loc='lower center')
ax.set_xlabel('Laser peak position [ns]')
ax.set_ylabel('Peak height')
figs.append(fig)
for fig in figs:
fig.tight_layout()
fig.show()
figlatex.save([figs])
|
import random
number = random.randrange(1,10)
guess = input("Guess a number from 1 to 10: ")
guess = int(guess)
if guess == number:
print("Great job! You got it!")
else:
print("Sorry, better luck next time.")
print("The number was " + str(number))
|
# Your Romeo API key, required for accessing the RoMEO API
# override this in your local config
ROMEO_API_KEY = ""
ROMEO_API_BASE_URL = "http://www.sherpa.ac.uk/romeo/api29.php"
ROMEO_DOWNLOAD_BASE_URL = "http://www.sherpa.ac.uk/downloads/"
|
from abc import ABC, abstractmethod
from pathlib import Path
from virtool_workflow.data_model import Index
from virtool_workflow.data_model.files import VirtoolFileFormat
class AbstractIndexProvider(ABC):
@abstractmethod
async def get(self) -> Index:
"""Get the current index."""
...
@abstractmethod
async def upload(self, path: Path, format: VirtoolFileFormat) -> Path:
"""Upload a file associated with the index."""
...
@abstractmethod
async def download(self, target_path: Path, *names) -> Path:
"""Download files associated with the index."""
...
@abstractmethod
async def finalize(self):
"""Mark that the index associated with the current job has a json representation of the reference available."""
...
def __await__(self):
return self.get().__await__()
|
import os
import io
import struct
import bson
class Packet:
def __init__(self, packet_id=0, status_code=0, packet_name="", body_type=0, body=b""):
self.packet_id = packet_id
self.status_code = status_code
self.packet_name = packet_name
self.body_type = body_type
self.body_size = 0
self.body = body
def to_loco_packet(self):
f = io.BytesIO()
f.write(struct.pack("<I", self.packet_id))
f.write(struct.pack("<H", self.status_code))
if (11-len(self.packet_name)) < 0:
raise Exception("invalid packetName")
f.write(self.packet_name.encode("utf-8"))
f.write(b"\x00" * (11 - len(self.packet_name)))
f.write(struct.pack("<b", self.body_type))
f.write(struct.pack("<i", len(self.body)))
f.write(self.body)
return f.getvalue()
def read_loco_packet(self, packet):
self.packet_id = struct.unpack("<I", packet[:4])[0]
self.status_code = struct.unpack("<H", packet[4:6])[0]
self.packet_name = packet[6:17].decode().replace("\0", "")
self.body_type = struct.unpack("<b", packet[17:18])[0]
self.body_size = struct.unpack("<i", packet[18:22])[0]
self.body = packet[22:]
def to_encrypted_loco_packet(self, crypto):
iv = os.urandom(16)
encrypted_packet = crypto.aes_encrypt(self.to_loco_packet(), iv)
f = io.BytesIO()
f.write(struct.pack("<I", len(encrypted_packet)+len(iv)))
f.write(iv)
f.write(encrypted_packet)
return f.getvalue()
def read_encrypted_loco_packet(self, packet, crypto):
packetLen = struct.unpack(">I", packet[0:4])[0]
iv = packet[4:20]
data = packet[20:packetLen-16]
dec = crypto.aes_decrypt(data, iv)
try:
self.read_loco_packet(dec)
except Exception as e:
print(str(e))
def to_json_body(self):
return bson.decode(self.body)
|
"""
This test module will only run on a POSIX system. Windows support *may* be added at some point in the future.
"""
# Global imports
import json, operator, os, signal, sys
from argparse import ArgumentParser
from datetime import datetime
from pathlib import Path
from time import sleep
from time import time
# local imports
from surfdebugnode import DebugNode
from surfapi.surfnoderpc import SurfNodeRPC
WAITING = True
def main( ):
global WAITING
"""
This example contains a simple parser to obtain the locations of both surfd and the data directory,
creates and runs a new debug node, replays all of the blocks in the data directory, and finally waits
for the user to interface with it outside of the script. Sending SIGINT succesfully and cleanly terminates
the program.
"""
import os, signal, sys
from argparse import ArgumentParser
if( os.name != "posix" ):
print( "This script only works on POSIX systems" )
return
parser = ArgumentParser( description='Run a Debug Node on an existing chain. This simply replays all blocks ' + \
'and then waits indefinitely to allow user interaction through RPC calls and ' + \
'the CLI wallet' )
parser.add_argument( '--surfd', '-s', type=str, required=True, help='The location of a surfd binary to run the debug node' )
parser.add_argument( '--data-dir', '-d', type=str, required=True, help='The location of an existing data directory. ' + \
'The debug node will pull blocks from this directory when replaying the chain. The directory ' + \
'will not be changed.' )
parser.add_argument( '--plugins', '-p', type=str, required=False, help='A list of plugins to load. witness and ' + \
'debug_node are always loaded.' )
parser.add_argument( '--apis', '-a', type=str, required=False, help='A list of apis to load. database_api, login_api, ' + \
'and debug_node_api are always loaded' )
args = parser.parse_args()
surfd = Path( args.surfd )
if( not surfd.exists() ):
print( 'Error: surfd does not exist.' )
return
surfd = surfd.resolve()
if( not surfd.is_file() ):
print( 'Error: surfd is not a file.' )
return
data_dir = Path( args.data_dir )
if( not data_dir.exists() ):
print( 'Error: data_dir does not exist or is not a properly constructed surfd data directory' )
data_dir = data_dir.resolve()
if( not data_dir.is_dir() ):
print( 'Error: data_dir is not a directory' )
plugins = list()
if( args.plugins ):
plugins = args.plugins.split()
apis = list()
if( args.apis ):
apis = args.apis.split()
signal.signal( signal.SIGINT, sigint_handler )
print( 'Creating and starting debug node' )
debug_node = DebugNode( str( surfd ), str( data_dir ), plugins=plugins, apis=apis, args='--replay', surfd_err=sys.stderr )
with debug_node:
debug_node.debug_generate_blocks_until( int( time() ), True )
debug_node.debug_set_hardfork( 14 )
print( 'Done!' )
print( 'Feel free to interact with this node via RPC calls for the cli wallet.' )
print( 'To shutdown the node, send SIGINT with Ctrl + C to this script. It will shut down safely.' )
while( WAITING ):
assert( debug_node.debug_generate_blocks( 1 ) == 1 )
sleep( 3 )
def sigint_handler( signum, frame ):
global WAITING
WAITING = False
sleep( 3 )
sys.exit( 0 )
main()
|
"""
Bundesagentur für Arbeit: Jobsuche API
Die größte Stellendatenbank Deutschlands durchsuchen, Details zu Stellenanzeigen und Informationen über Arbeitgeber abrufen. <br><br> Die Authentifizierung funktioniert per OAuth 2 Client Credentials mit JWTs. Folgende Client-Credentials können dafür verwendet werden:<br><br> **ClientID:** c003a37f-024f-462a-b36d-b001be4cd24a <br> **ClientSecret:** 32a39620-32b3-4307-9aa1-511e3d7f48a8 # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from deutschland.jobsuche.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from deutschland.jobsuche.exceptions import ApiAttributeError
def lazy_import():
from deutschland.jobsuche.model.job_search_response_embedded_jobs import (
JobSearchResponseEmbeddedJobs,
)
globals()["JobSearchResponseEmbeddedJobs"] = JobSearchResponseEmbeddedJobs
class JobSearchResponseEmbedded(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (
bool,
date,
datetime,
dict,
float,
int,
list,
str,
none_type,
) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
"jobs": ([JobSearchResponseEmbeddedJobs],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"jobs": "jobs", # noqa: E501
}
read_only_vars = {}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""JobSearchResponseEmbedded - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
jobs ([JobSearchResponseEmbeddedJobs]): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""JobSearchResponseEmbedded - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
jobs ([JobSearchResponseEmbeddedJobs]): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(
f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes."
)
|
TESTBED_TEMPLATE = \
r"""
{
"name" : "WCB Test",
"description" : "Run WCB for {{ duration_minutes }} minutes",
"start_time" : "{{ start_time }}",
"duration" : {{ duration_seconds }},
"binaries" : {
"hardware" : "firefly",
"bin_file": "{{ abs_bin_path }}",
"programAddress": "0x00200000",
"targets": {{ targets }}
},
"logs": 0,
"orchestrator" : {
"type" : "python",
"file" : "TO BE MODIFIED: (absolute) path to tcp_orchestrator.py",
"init" : "init_test",
"init_kargs" : {
"scenario" : "{{ version }}",
"seed" : {{ seed }}
},
"run" : "run_test"
},
"extra_files" : "TO BE MODIFIED: (absolute) path to the data.mat file were several control variables (e.g., K, Nt, Qt, ...) are defined"
}
"""
|
"""
Generates a cluster using a plummer model with a salpeter Initial Mass Function.
Compares the generated IMF against the expected line.
"""
import numpy
from matplotlib import pyplot
from amuse.units import units
from amuse.units import nbody_system
from amuse.ic.plummer import new_plummer_model
from amuse.ic.salpeter import new_salpeter_mass_distribution
def new_cluster(number_of_stars = 1000):
masses = new_salpeter_mass_distribution(
number_of_stars,
mass_min = 0.1 | units.MSun,
mass_max = 125.0 | units.MSun,
alpha = -2.35
)
nbody_converter = nbody_system.nbody_to_si(masses.sum(), 1 | units.parsec)
particles = new_plummer_model(number_of_stars, nbody_converter)
particles.mass = masses
particles.move_to_center()
return particles
def plot_particles_and_mass_distribution(particles):
figure = pyplot.figure(figsize= (12,6))
subplot = figure.add_subplot(1, 2, 1)
subplot.scatter(
particles.x.value_in(units.parsec),
particles.y.value_in(units.parsec),
s = particles.mass.value_in(units.MSun),# * len(particles),
edgecolors = 'red',
facecolors = 'red'
)
subplot.set_xlim(-4,4)
subplot.set_ylim(-4,4)
subplot.set_xlabel('x (parsec)')
subplot.set_ylabel('y (parsec)')
subplot = figure.add_subplot(1, 2, 2)
masses = particles.mass.value_in(units.MSun)
bins = 10**numpy.linspace(-1, 2, 100)
number_of_particles, bin_edges= numpy.histogram(masses, bins = bins)
bin_sizes = bin_edges[1:] - bin_edges[:-1]
y = number_of_particles / bin_sizes
x = (bin_edges[1:] + bin_edges[:-1]) / 2.0
y = y[number_of_particles > 10.0]
x = x[number_of_particles > 10.0]
subplot.scatter(x, y)
c = ((0.1**-1.35) - (125.0**-1.35)) / 1.35
subplot.plot(x, len(particles)/ c * (x**-2.35))
subplot.set_xscale('log')
subplot.set_yscale('log')
subplot.set_xlabel(u'M [M\u2299]')
subplot.set_ylabel('N')
pyplot.show()
if __name__ == "__main__":
particles = new_cluster(20000)
plot_particles_and_mass_distribution(particles)
|
import torch
import numpy
# codes of this function are borrowed from https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/models/pointnet2_utils.py
def index_points(device, points, idx):
"""
Input:
points: input points data, [B, N, C]
idx: sample index data, [B, S]
Return:
new_points:, indexed points data, [B, S, C]
"""
B = points.shape[0]
view_shape = list(idx.shape)
view_shape[1:] = [1] * (len(view_shape) - 1)
repeat_shape = list(idx.shape)
repeat_shape[0] = 1
# batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape)
batch_indices = torch.arange(B, dtype=torch.long).cuda().view(view_shape).repeat(repeat_shape)
new_points = points[batch_indices, idx, :]
return new_points
def knn_l2(device, net, k, u):
'''
Input:
k: int32, number of k in k-nn search
net: (batch_size, npoint, c) float32 array, points
u: int32, block size
Output:
idx: (batch_size, npoint, k) int32 array, indices to input points
'''
INF = 1e8
batch_size = net.size(0)
npoint = net.size(1)
n_channel = net.size(2)
square = torch.pow(torch.norm(net, dim=2,keepdim=True),2)
def u_block(batch_size, npoint, u):
block = numpy.zeros([batch_size, npoint, npoint])
n = npoint // u
for i in range(n):
block[:, (i*u):(i*u+u), (i*u):(i*u+u)] = numpy.ones([batch_size, u, u]) * (-INF)
return block
# minus_distance = 2 * torch.matmul(net, net.transpose(2,1)) - square - square.transpose(2,1) + torch.Tensor(u_block(batch_size, npoint, u)).to(device)
minus_distance = 2 * torch.matmul(net, net.transpose(2,1)) - square - square.transpose(2,1) + torch.Tensor(u_block(batch_size, npoint, u)).cuda()
_, indices = torch.topk(minus_distance, k, largest=True, sorted=False)
return indices
|
import numpy as np
from torch import Tensor, FloatTensor
from kospeech.data.audio.core import load_audio
from kospeech.data.audio.augment import NoiseInjector, SpecAugment
from kospeech.data.audio.feature import MelSpectrogram, MFCC, Spectrogram, FilterBank
class AudioParser(object):
"""
Provides inteface of audio parser.
Note:
Do not use this class directly, use one of the sub classes.
Method:
- **parse_audio()**: abstract method. you have to override this method.
- **parse_transcript()**: abstract method. you have to override this method.
"""
def __init__(self, dataset_path, noiseset_size, sample_rate=16000, noise_level=0.7, noise_augment=False):
if noise_augment:
self.noise_injector = NoiseInjector(dataset_path, noiseset_size, sample_rate, noise_level)
def parse_audio(self, *args, **kwargs):
raise NotImplementedError
def parse_transcript(self, *args, **kwargs):
raise NotImplementedError
class SpectrogramParser(AudioParser):
"""
Parses audio file into (spectrogram / mel spectrogram / mfcc) with various options.
Args:
transform_method (str): which feature to use (default: mel)
sample_rate (int): Sample rate of audio signal. (Default: 16000)
n_mels (int): Number of mfc coefficients to retain. (Default: 40)
frame_length (int): frame length for spectrogram (ms) (Default : 20)
frame_shift (int): Length of hop between STFT windows. (ms) (Default: 10)
feature_extract_by (str): which library to use for feature extraction(default: librosa)
del_silence (bool): flag indication whether to delete silence or not (default: True)
input_reverse (bool): flag indication whether to reverse input or not (default: True)
normalize (bool): flag indication whether to normalize spectrum or not (default:True)
time_mask_para (int): Hyper Parameter for Time Masking to limit time masking length
freq_mask_para (int): Hyper Parameter for Freq Masking to limit freq masking length
time_mask_num (int): how many time-masked area to make
freq_mask_num (int): how many freq-masked area to make
sos_id (int): start of sentence token`s identification
eos_id (int): end of sentence token`s identification
target_dict (dict): dictionary of filename and labels
"""
VANILLA = 0 # Not apply augmentation
SPEC_AUGMENT = 1 # SpecAugment
NOISE_INJECTION = 2 # Noise Injection
HYBRID_AUGMENT = 3 # Noise Injection & SpecAugment
def __init__(self, feature_extract_by: str = 'librosa', sample_rate: int = 16000,
n_mels: int = 80, frame_length: int = 20, frame_shift: int = 10,
del_silence: bool = False, input_reverse: bool = True,
normalize: bool = False, transform_method: str = 'mel',
time_mask_para: int = 70, freq_mask_para: int = 12, time_mask_num: int = 2, freq_mask_num: int = 2,
sos_id: int = 1, eos_id: int = 2, target_dict: dict = None, noise_augment: bool = False,
dataset_path: str = None, noiseset_size: int = 0, noise_level: float = 0.7) -> None:
super(SpectrogramParser, self).__init__(dataset_path, noiseset_size, sample_rate, noise_level, noise_augment)
self.del_silence = del_silence
self.input_reverse = input_reverse
self.normalize = normalize
self.sos_id = sos_id
self.eos_id = eos_id
self.target_dict = target_dict
self.spec_augment = SpecAugment(time_mask_para, freq_mask_para, time_mask_num, freq_mask_num)
if transform_method.lower() == 'mel':
self.transforms = MelSpectrogram(sample_rate, n_mels, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'mfcc':
self.transforms = MFCC(sample_rate, n_mels, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'spect':
self.transforms = Spectrogram(sample_rate, frame_length, frame_shift, feature_extract_by)
elif transform_method.lower() == 'fbank':
self.transforms = FilterBank(sample_rate, n_mels, frame_length, frame_shift)
else:
raise ValueError("Unsupported feature : {0}".format(transform_method))
def parse_audio(self, audio_path: str, augment_method: int) -> Tensor:
"""
Parses audio.
Args:
audio_path (str): path of audio file
augment_method (int): flag indication which augmentation method to use.
Returns: feature_vector
- **feature_vector** (torch.FloatTensor): feature from audio file.
"""
signal = load_audio(audio_path, self.del_silence)
if signal is None:
return None
if augment_method == SpectrogramParser.NOISE_INJECTION or augment_method == SpectrogramParser.HYBRID_AUGMENT:
signal = self.noise_injector(signal)
feature_vector = self.transforms(signal)
if self.normalize:
feature_vector -= feature_vector.mean()
if self.input_reverse: # Refer to "Sequence to Sequence Learning with Neural Network" paper
feature_vector = feature_vector[:, ::-1]
feature_vector = FloatTensor(np.ascontiguousarray(np.swapaxes(feature_vector, 0, 1)))
else:
feature_vector = FloatTensor(feature_vector).transpose(0, 1)
if augment_method == SpectrogramParser.SPEC_AUGMENT or augment_method == SpectrogramParser.HYBRID_AUGMENT:
feature_vector = self.spec_augment(feature_vector)
return feature_vector
def parse_transcript(self, *args, **kwargs):
raise NotImplementedError
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import vlag
import snmp
import switchport_basic
import switchport
import mac_learning
import ip
import ipv6
import track
import edge_loop_detection
import fcoeport
import mac
import hide_vrrp_holer
import ip_acl_interface
import service_policy
import port_profile_to_interface_associations
import qos
import vlan
import bpdu_drop
import tunnel
import spanning_tree
class port_channel(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/port-channel. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The list of port-channels in the managed device. Each
entry represents a port-channel.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__name','__cee','__vlag','__po_speed','__ifindex','__description','__shutdown','__minimum_links','__snmp','__mtu','__switchport_basic','__switchport','__mac_learning','__ip','__ipv6','__track','__edge_loop_detection','__load_balance','__fcoeport','__mac','__hide_vrrp_holer','__ip_acl_interface','__service_policy','__port_profile_port','__port_profile_to_interface_associations','__priority_tag_enable','__qos','__vlan','__bpdu_drop','__tunnel','__spanning_tree',)
_yang_name = 'port-channel'
_rest_name = 'Port-channel'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__spanning_tree = YANGDynClass(base=spanning_tree.spanning_tree, is_container='container', presence=False, yang_name="spanning-tree", rest_name="spanning-tree", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Spanning tree commands', u'sort-priority': u'98', u'callpoint': u'po-stp-config', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
self.__minimum_links = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'callpoint': u'interface_po', u'cli-completion-actionpoint': u'getinterfaceall-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)
self.__service_policy = YANGDynClass(base=service_policy.service_policy, is_container='container', presence=False, yang_name="service-policy", rest_name="service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Input/Output Policy Map', u'callpoint': u'interface_po', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='container', is_config=True)
self.__ip_acl_interface = YANGDynClass(base=ip_acl_interface.ip_acl_interface, is_container='container', presence=False, yang_name="ip-acl-interface", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'109'}}, namespace='urn:brocade.com:mgmt:brocade-ip-access-list', defining_module='brocade-ip-access-list', yang_type='container', is_config=True)
self.__ip = YANGDynClass(base=ip.ip, is_container='container', presence=False, yang_name="ip", rest_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol (IP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__edge_loop_detection = YANGDynClass(base=edge_loop_detection.edge_loop_detection, is_container='container', presence=False, yang_name="edge-loop-detection", rest_name="edge-loop-detection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable edge-loop-detection on the selected interface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_ELD', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__cee = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,31})'}), is_leaf=True, yang_name="cee", rest_name="cee", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u"Apply default CEE map 'default'"}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='cee:cee-map-name-type', is_config=True)
self.__shutdown = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Shutdown the selected interface', u'cli-show-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_SHUT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='empty', is_config=True)
self.__qos = YANGDynClass(base=qos.qos, is_container='container', presence=False, yang_name="qos", rest_name="qos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'cli-incomplete-no': None, u'callpoint': u'interface_po', u'sort-priority': u'93'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)
self.__switchport_basic = YANGDynClass(base=switchport_basic.switchport_basic, is_container='container', presence=False, yang_name="switchport-basic", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__hide_vrrp_holer = YANGDynClass(base=hide_vrrp_holer.hide_vrrp_holer, is_container='container', presence=False, yang_name="hide-vrrp-holer", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)
self.__port_profile_port = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'114'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)
self.__snmp = YANGDynClass(base=snmp.snmp, is_container='container', presence=False, yang_name="snmp", rest_name="snmp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Simple Network Management Protocol (SNMP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__bpdu_drop = YANGDynClass(base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
self.__port_profile_to_interface_associations = YANGDynClass(base=port_profile_to_interface_associations.port_profile_to_interface_associations, is_container='container', presence=False, yang_name="port-profile-to-interface-associations", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)
self.__ipv6 = YANGDynClass(base=ipv6.ipv6, is_container='container', presence=False, yang_name="ipv6", rest_name="ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol version 6(IPv6).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__vlag = YANGDynClass(base=vlag.vlag, is_container='container', presence=False, yang_name="vlag", rest_name="vlag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual LAG', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1 .. 63']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='string', is_config=True)
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Track interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__vlan = YANGDynClass(base=vlan.vlan, is_container='container', presence=False, yang_name="vlan", rest_name="vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Vlan commands', u'cli-incomplete-no': None, u'callpoint': u'VlanClassifierActivateCallpointWorker_po', u'sort-priority': u'97'}}, namespace='urn:brocade.com:mgmt:brocade-vlan', defining_module='brocade-vlan', yang_type='container', is_config=True)
self.__mac_learning = YANGDynClass(base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC learning.', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_LEARNING_DISABLE_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__mac = YANGDynClass(base=mac.mac, is_container='container', presence=False, yang_name="mac", rest_name="mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC parameters', u'callpoint': u'MacaclAccessgroupIntPoCP', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_ACL_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)
self.__load_balance = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'src-dst-ip-port': {'value': 6}, u'src-mac-vid': {'value': 2}, u'src-dst-ip': {'value': 4}, u'src-dst-ip-mac-vid': {'value': 5}, u'dst-mac-vid': {'value': 1}, u'src-dst-mac-vid': {'value': 3}, u'src-dst-ip-mac-vid-port': {'value': 7}},), default=unicode("src-dst-ip-mac-vid-port"), is_leaf=True, yang_name="load-balance", rest_name="load-balance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Load balancing Commands'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
self.__po_speed = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'40000': {'value': 3}, u'100': {'value': 5}, u'10000': {'value': 2}, u'100000': {'value': 4}, u'1000': {'value': 1}},), default=unicode("10000"), is_leaf=True, yang_name="po-speed", rest_name="speed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set speed informational parameter', u'alt-name': u'speed'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
self.__fcoeport = YANGDynClass(base=fcoeport.fcoeport, is_container='container', presence=False, yang_name="fcoeport", rest_name="fcoeport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the LAG to enable FCoE', u'display-when': u'(/vcsmode/vcs-mode = "true") or (/fcoe-fsb/fcoe-fsb-enable)', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_FEATURE_FCOE', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'callpoint': u'fcoeport_attr_lag_cp'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)
self.__name = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..6144']}), is_leaf=True, yang_name="name", rest_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None, u'cli-custom-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='portchannel-type', is_config=True)
self.__switchport = YANGDynClass(base=switchport.switchport, is_container='container', presence=False, yang_name="switchport", rest_name="switchport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the switching characteristics of the Layer2 \ninterface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
self.__tunnel = YANGDynClass(base=tunnel.tunnel, is_container='container', presence=False, yang_name="tunnel", rest_name="tunnel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None, u'info': u'Tunneling parameters'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
self.__mtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1522..9216']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(2500), is_leaf=True, yang_name="mtu", rest_name="mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set mtu value to interface'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='mtu-type', is_config=True)
self.__ifindex = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="ifindex", rest_name="ifindex", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint64', is_config=False)
self.__priority_tag_enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="priority-tag-enable", rest_name="priority-tag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure 802.1p priority tagging', u'cli-full-command': None, u'callpoint': u'interface_po', u'alt-name': u'priority-tag'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'port-channel']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'Port-channel']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /interface/port_channel/name (portchannel-type)
YANG Description: The port-channel identifier.
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /interface/port_channel/name (portchannel-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: The port-channel identifier.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..6144']}), is_leaf=True, yang_name="name", rest_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None, u'cli-custom-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='portchannel-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with portchannel-type""",
'defined-type': "brocade-interface:portchannel-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..6144']}), is_leaf=True, yang_name="name", rest_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None, u'cli-custom-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='portchannel-type', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..6144']}), is_leaf=True, yang_name="name", rest_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None, u'cli-custom-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='portchannel-type', is_config=True)
def _get_cee(self):
"""
Getter method for cee, mapped from YANG variable /interface/port_channel/cee (cee:cee-map-name-type)
YANG Description: The CEE map associated with this port-channel
interface.
"""
return self.__cee
def _set_cee(self, v, load=False):
"""
Setter method for cee, mapped from YANG variable /interface/port_channel/cee (cee:cee-map-name-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_cee is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cee() directly.
YANG Description: The CEE map associated with this port-channel
interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,31})'}), is_leaf=True, yang_name="cee", rest_name="cee", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u"Apply default CEE map 'default'"}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='cee:cee-map-name-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cee must be of a type compatible with cee:cee-map-name-type""",
'defined-type': "cee:cee-map-name-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,31})'}), is_leaf=True, yang_name="cee", rest_name="cee", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u"Apply default CEE map 'default'"}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='cee:cee-map-name-type', is_config=True)""",
})
self.__cee = t
if hasattr(self, '_set'):
self._set()
def _unset_cee(self):
self.__cee = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'[a-zA-Z]{1}([-a-zA-Z0-9\\.\\\\\\\\@#\\+\\*\\(\\)=\\{~\\}%<>=$_\\[\\]\\|]{0,31})'}), is_leaf=True, yang_name="cee", rest_name="cee", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u"Apply default CEE map 'default'"}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='cee:cee-map-name-type', is_config=True)
def _get_vlag(self):
"""
Getter method for vlag, mapped from YANG variable /interface/port_channel/vlag (container)
YANG Description: The vLAG properties for this port-channel.
"""
return self.__vlag
def _set_vlag(self, v, load=False):
"""
Setter method for vlag, mapped from YANG variable /interface/port_channel/vlag (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vlag is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vlag() directly.
YANG Description: The vLAG properties for this port-channel.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=vlag.vlag, is_container='container', presence=False, yang_name="vlag", rest_name="vlag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual LAG', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vlag must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=vlag.vlag, is_container='container', presence=False, yang_name="vlag", rest_name="vlag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual LAG', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__vlag = t
if hasattr(self, '_set'):
self._set()
def _unset_vlag(self):
self.__vlag = YANGDynClass(base=vlag.vlag, is_container='container', presence=False, yang_name="vlag", rest_name="vlag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Virtual LAG', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_po_speed(self):
"""
Getter method for po_speed, mapped from YANG variable /interface/port_channel/po_speed (enumeration)
YANG Description: This specifies the administratively configured
bandwidth for this physical interface.
"""
return self.__po_speed
def _set_po_speed(self, v, load=False):
"""
Setter method for po_speed, mapped from YANG variable /interface/port_channel/po_speed (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_po_speed is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_po_speed() directly.
YANG Description: This specifies the administratively configured
bandwidth for this physical interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'40000': {'value': 3}, u'100': {'value': 5}, u'10000': {'value': 2}, u'100000': {'value': 4}, u'1000': {'value': 1}},), default=unicode("10000"), is_leaf=True, yang_name="po-speed", rest_name="speed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set speed informational parameter', u'alt-name': u'speed'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """po_speed must be of a type compatible with enumeration""",
'defined-type': "brocade-interface:enumeration",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'40000': {'value': 3}, u'100': {'value': 5}, u'10000': {'value': 2}, u'100000': {'value': 4}, u'1000': {'value': 1}},), default=unicode("10000"), is_leaf=True, yang_name="po-speed", rest_name="speed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set speed informational parameter', u'alt-name': u'speed'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)""",
})
self.__po_speed = t
if hasattr(self, '_set'):
self._set()
def _unset_po_speed(self):
self.__po_speed = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'40000': {'value': 3}, u'100': {'value': 5}, u'10000': {'value': 2}, u'100000': {'value': 4}, u'1000': {'value': 1}},), default=unicode("10000"), is_leaf=True, yang_name="po-speed", rest_name="speed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set speed informational parameter', u'alt-name': u'speed'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
def _get_ifindex(self):
"""
Getter method for ifindex, mapped from YANG variable /interface/port_channel/ifindex (uint64)
"""
return self.__ifindex
def _set_ifindex(self, v, load=False):
"""
Setter method for ifindex, mapped from YANG variable /interface/port_channel/ifindex (uint64)
If this variable is read-only (config: false) in the
source YANG file, then _set_ifindex is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ifindex() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="ifindex", rest_name="ifindex", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ifindex must be of a type compatible with uint64""",
'defined-type': "uint64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="ifindex", rest_name="ifindex", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint64', is_config=False)""",
})
self.__ifindex = t
if hasattr(self, '_set'):
self._set()
def _unset_ifindex(self):
self.__ifindex = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="ifindex", rest_name="ifindex", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint64', is_config=False)
def _get_description(self):
"""
Getter method for description, mapped from YANG variable /interface/port_channel/description (string)
"""
return self.__description
def _set_description(self, v, load=False):
"""
Setter method for description, mapped from YANG variable /interface/port_channel/description (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_description is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_description() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1 .. 63']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1 .. 63']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='string', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
def _unset_description(self):
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1 .. 63']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='string', is_config=True)
def _get_shutdown(self):
"""
Getter method for shutdown, mapped from YANG variable /interface/port_channel/shutdown (empty)
"""
return self.__shutdown
def _set_shutdown(self, v, load=False):
"""
Setter method for shutdown, mapped from YANG variable /interface/port_channel/shutdown (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_shutdown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_shutdown() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Shutdown the selected interface', u'cli-show-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_SHUT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """shutdown must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Shutdown the selected interface', u'cli-show-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_SHUT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='empty', is_config=True)""",
})
self.__shutdown = t
if hasattr(self, '_set'):
self._set()
def _unset_shutdown(self):
self.__shutdown = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="shutdown", rest_name="shutdown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Shutdown the selected interface', u'cli-show-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_SHUT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='empty', is_config=True)
def _get_minimum_links(self):
"""
Getter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32)
YANG Description: The least number of operationally 'UP' links to
indicate port-channel being UP.
"""
return self.__minimum_links
def _set_minimum_links(self, v, load=False):
"""
Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_minimum_links is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_minimum_links() directly.
YANG Description: The least number of operationally 'UP' links to
indicate port-channel being UP.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'callpoint': u'interface_po', u'cli-completion-actionpoint': u'getinterfaceall-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """minimum_links must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'callpoint': u'interface_po', u'cli-completion-actionpoint': u'getinterfaceall-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)""",
})
self.__minimum_links = t
if hasattr(self, '_set'):
self._set()
def _unset_minimum_links(self):
self.__minimum_links = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'callpoint': u'interface_po', u'cli-completion-actionpoint': u'getinterfaceall-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)
def _get_snmp(self):
"""
Getter method for snmp, mapped from YANG variable /interface/port_channel/snmp (container)
YANG Description: The SNMP configurations for an interface.
"""
return self.__snmp
def _set_snmp(self, v, load=False):
"""
Setter method for snmp, mapped from YANG variable /interface/port_channel/snmp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_snmp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_snmp() directly.
YANG Description: The SNMP configurations for an interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=snmp.snmp, is_container='container', presence=False, yang_name="snmp", rest_name="snmp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Simple Network Management Protocol (SNMP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """snmp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=snmp.snmp, is_container='container', presence=False, yang_name="snmp", rest_name="snmp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Simple Network Management Protocol (SNMP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__snmp = t
if hasattr(self, '_set'):
self._set()
def _unset_snmp(self):
self.__snmp = YANGDynClass(base=snmp.snmp, is_container='container', presence=False, yang_name="snmp", rest_name="snmp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Simple Network Management Protocol (SNMP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_BASIC_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_mtu(self):
"""
Getter method for mtu, mapped from YANG variable /interface/port_channel/mtu (mtu-type)
YANG Description: The size of the largest packet which can be sent/
received on the interface, specified in bytes.
For interfaces that are used for transmitting network
datagrams, this is the size of the largest network
datagram that can be sent on the interface.
"""
return self.__mtu
def _set_mtu(self, v, load=False):
"""
Setter method for mtu, mapped from YANG variable /interface/port_channel/mtu (mtu-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mtu() directly.
YANG Description: The size of the largest packet which can be sent/
received on the interface, specified in bytes.
For interfaces that are used for transmitting network
datagrams, this is the size of the largest network
datagram that can be sent on the interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1522..9216']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(2500), is_leaf=True, yang_name="mtu", rest_name="mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set mtu value to interface'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='mtu-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mtu must be of a type compatible with mtu-type""",
'defined-type': "brocade-interface:mtu-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1522..9216']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(2500), is_leaf=True, yang_name="mtu", rest_name="mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set mtu value to interface'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='mtu-type', is_config=True)""",
})
self.__mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_mtu(self):
self.__mtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1522..9216']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(2500), is_leaf=True, yang_name="mtu", rest_name="mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set mtu value to interface'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='mtu-type', is_config=True)
def _get_switchport_basic(self):
"""
Getter method for switchport_basic, mapped from YANG variable /interface/port_channel/switchport_basic (container)
"""
return self.__switchport_basic
def _set_switchport_basic(self, v, load=False):
"""
Setter method for switchport_basic, mapped from YANG variable /interface/port_channel/switchport_basic (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_switchport_basic is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_switchport_basic() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=switchport_basic.switchport_basic, is_container='container', presence=False, yang_name="switchport-basic", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """switchport_basic must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=switchport_basic.switchport_basic, is_container='container', presence=False, yang_name="switchport-basic", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__switchport_basic = t
if hasattr(self, '_set'):
self._set()
def _unset_switchport_basic(self):
self.__switchport_basic = YANGDynClass(base=switchport_basic.switchport_basic, is_container='container', presence=False, yang_name="switchport-basic", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_switchport(self):
"""
Getter method for switchport, mapped from YANG variable /interface/port_channel/switchport (container)
YANG Description: The L2 switching characteristics of an interface.
"""
return self.__switchport
def _set_switchport(self, v, load=False):
"""
Setter method for switchport, mapped from YANG variable /interface/port_channel/switchport (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_switchport is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_switchport() directly.
YANG Description: The L2 switching characteristics of an interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=switchport.switchport, is_container='container', presence=False, yang_name="switchport", rest_name="switchport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the switching characteristics of the Layer2 \ninterface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """switchport must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=switchport.switchport, is_container='container', presence=False, yang_name="switchport", rest_name="switchport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the switching characteristics of the Layer2 \ninterface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__switchport = t
if hasattr(self, '_set'):
self._set()
def _unset_switchport(self):
self.__switchport = YANGDynClass(base=switchport.switchport, is_container='container', presence=False, yang_name="switchport", rest_name="switchport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the switching characteristics of the Layer2 \ninterface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MODE_SWITCHPORT_CONFIG', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_mac_learning(self):
"""
Getter method for mac_learning, mapped from YANG variable /interface/port_channel/mac_learning (container)
"""
return self.__mac_learning
def _set_mac_learning(self, v, load=False):
"""
Setter method for mac_learning, mapped from YANG variable /interface/port_channel/mac_learning (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_learning is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_learning() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC learning.', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_LEARNING_DISABLE_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mac_learning must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC learning.', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_LEARNING_DISABLE_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__mac_learning = t
if hasattr(self, '_set'):
self._set()
def _unset_mac_learning(self):
self.__mac_learning = YANGDynClass(base=mac_learning.mac_learning, is_container='container', presence=False, yang_name="mac-learning", rest_name="mac-learning", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAC learning.', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_LEARNING_DISABLE_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /interface/port_channel/ip (container)
YANG Description: The IP configurations for an interface.
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /interface/port_channel/ip (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: The IP configurations for an interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ip.ip, is_container='container', presence=False, yang_name="ip", rest_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol (IP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ip.ip, is_container='container', presence=False, yang_name="ip", rest_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol (IP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__ip = t
if hasattr(self, '_set'):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(base=ip.ip, is_container='container', presence=False, yang_name="ip", rest_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol (IP).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_ipv6(self):
"""
Getter method for ipv6, mapped from YANG variable /interface/port_channel/ipv6 (container)
YANG Description: The IPv6 configurations for an interface.
"""
return self.__ipv6
def _set_ipv6(self, v, load=False):
"""
Setter method for ipv6, mapped from YANG variable /interface/port_channel/ipv6 (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6() directly.
YANG Description: The IPv6 configurations for an interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ipv6.ipv6, is_container='container', presence=False, yang_name="ipv6", rest_name="ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol version 6(IPv6).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ipv6 must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ipv6.ipv6, is_container='container', presence=False, yang_name="ipv6", rest_name="ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol version 6(IPv6).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__ipv6 = t
if hasattr(self, '_set'):
self._set()
def _unset_ipv6(self):
self.__ipv6 = YANGDynClass(base=ipv6.ipv6, is_container='container', presence=False, yang_name="ipv6", rest_name="ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The Internet Protocol version 6(IPv6).', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_IP_CONFIG', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_track(self):
"""
Getter method for track, mapped from YANG variable /interface/port_channel/track (container)
YANG Description: Track interface
"""
return self.__track
def _set_track(self, v, load=False):
"""
Setter method for track, mapped from YANG variable /interface/port_channel/track (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_track is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_track() directly.
YANG Description: Track interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Track interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """track must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Track interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__track = t
if hasattr(self, '_set'):
self._set()
def _unset_track(self):
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Track interface', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_edge_loop_detection(self):
"""
Getter method for edge_loop_detection, mapped from YANG variable /interface/port_channel/edge_loop_detection (container)
YANG Description: Enable edge-loop-detection on the selected interface
"""
return self.__edge_loop_detection
def _set_edge_loop_detection(self, v, load=False):
"""
Setter method for edge_loop_detection, mapped from YANG variable /interface/port_channel/edge_loop_detection (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_edge_loop_detection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_edge_loop_detection() directly.
YANG Description: Enable edge-loop-detection on the selected interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=edge_loop_detection.edge_loop_detection, is_container='container', presence=False, yang_name="edge-loop-detection", rest_name="edge-loop-detection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable edge-loop-detection on the selected interface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_ELD', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """edge_loop_detection must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=edge_loop_detection.edge_loop_detection, is_container='container', presence=False, yang_name="edge-loop-detection", rest_name="edge-loop-detection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable edge-loop-detection on the selected interface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_ELD', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__edge_loop_detection = t
if hasattr(self, '_set'):
self._set()
def _unset_edge_loop_detection(self):
self.__edge_loop_detection = YANGDynClass(base=edge_loop_detection.edge_loop_detection, is_container='container', presence=False, yang_name="edge-loop-detection", rest_name="edge-loop-detection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable edge-loop-detection on the selected interface', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_ELD', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
def _get_load_balance(self):
"""
Getter method for load_balance, mapped from YANG variable /interface/port_channel/load_balance (enumeration)
"""
return self.__load_balance
def _set_load_balance(self, v, load=False):
"""
Setter method for load_balance, mapped from YANG variable /interface/port_channel/load_balance (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_load_balance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_load_balance() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'src-dst-ip-port': {'value': 6}, u'src-mac-vid': {'value': 2}, u'src-dst-ip': {'value': 4}, u'src-dst-ip-mac-vid': {'value': 5}, u'dst-mac-vid': {'value': 1}, u'src-dst-mac-vid': {'value': 3}, u'src-dst-ip-mac-vid-port': {'value': 7}},), default=unicode("src-dst-ip-mac-vid-port"), is_leaf=True, yang_name="load-balance", rest_name="load-balance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Load balancing Commands'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """load_balance must be of a type compatible with enumeration""",
'defined-type': "brocade-interface:enumeration",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'src-dst-ip-port': {'value': 6}, u'src-mac-vid': {'value': 2}, u'src-dst-ip': {'value': 4}, u'src-dst-ip-mac-vid': {'value': 5}, u'dst-mac-vid': {'value': 1}, u'src-dst-mac-vid': {'value': 3}, u'src-dst-ip-mac-vid-port': {'value': 7}},), default=unicode("src-dst-ip-mac-vid-port"), is_leaf=True, yang_name="load-balance", rest_name="load-balance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Load balancing Commands'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)""",
})
self.__load_balance = t
if hasattr(self, '_set'):
self._set()
def _unset_load_balance(self):
self.__load_balance = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'src-dst-ip-port': {'value': 6}, u'src-mac-vid': {'value': 2}, u'src-dst-ip': {'value': 4}, u'src-dst-ip-mac-vid': {'value': 5}, u'dst-mac-vid': {'value': 1}, u'src-dst-mac-vid': {'value': 3}, u'src-dst-ip-mac-vid-port': {'value': 7}},), default=unicode("src-dst-ip-mac-vid-port"), is_leaf=True, yang_name="load-balance", rest_name="load-balance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Load balancing Commands'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='enumeration', is_config=True)
def _get_fcoeport(self):
"""
Getter method for fcoeport, mapped from YANG variable /interface/port_channel/fcoeport (container)
"""
return self.__fcoeport
def _set_fcoeport(self, v, load=False):
"""
Setter method for fcoeport, mapped from YANG variable /interface/port_channel/fcoeport (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_fcoeport is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fcoeport() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=fcoeport.fcoeport, is_container='container', presence=False, yang_name="fcoeport", rest_name="fcoeport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the LAG to enable FCoE', u'display-when': u'(/vcsmode/vcs-mode = "true") or (/fcoe-fsb/fcoe-fsb-enable)', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_FEATURE_FCOE', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'callpoint': u'fcoeport_attr_lag_cp'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """fcoeport must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=fcoeport.fcoeport, is_container='container', presence=False, yang_name="fcoeport", rest_name="fcoeport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the LAG to enable FCoE', u'display-when': u'(/vcsmode/vcs-mode = "true") or (/fcoe-fsb/fcoe-fsb-enable)', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_FEATURE_FCOE', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'callpoint': u'fcoeport_attr_lag_cp'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)""",
})
self.__fcoeport = t
if hasattr(self, '_set'):
self._set()
def _unset_fcoeport(self):
self.__fcoeport = YANGDynClass(base=fcoeport.fcoeport, is_container='container', presence=False, yang_name="fcoeport", rest_name="fcoeport", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the LAG to enable FCoE', u'display-when': u'(/vcsmode/vcs-mode = "true") or (/fcoe-fsb/fcoe-fsb-enable)', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_FEATURE_FCOE', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'callpoint': u'fcoeport_attr_lag_cp'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)
def _get_mac(self):
"""
Getter method for mac, mapped from YANG variable /interface/port_channel/mac (container)
"""
return self.__mac
def _set_mac(self, v, load=False):
"""
Setter method for mac, mapped from YANG variable /interface/port_channel/mac (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mac.mac, is_container='container', presence=False, yang_name="mac", rest_name="mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC parameters', u'callpoint': u'MacaclAccessgroupIntPoCP', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_ACL_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mac must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=mac.mac, is_container='container', presence=False, yang_name="mac", rest_name="mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC parameters', u'callpoint': u'MacaclAccessgroupIntPoCP', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_ACL_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)""",
})
self.__mac = t
if hasattr(self, '_set'):
self._set()
def _unset_mac(self):
self.__mac = YANGDynClass(base=mac.mac, is_container='container', presence=False, yang_name="mac", rest_name="mac", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAC parameters', u'callpoint': u'MacaclAccessgroupIntPoCP', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_MAC_ACL_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-mac-access-list', defining_module='brocade-mac-access-list', yang_type='container', is_config=True)
def _get_hide_vrrp_holer(self):
"""
Getter method for hide_vrrp_holer, mapped from YANG variable /interface/port_channel/hide_vrrp_holer (container)
"""
return self.__hide_vrrp_holer
def _set_hide_vrrp_holer(self, v, load=False):
"""
Setter method for hide_vrrp_holer, mapped from YANG variable /interface/port_channel/hide_vrrp_holer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_hide_vrrp_holer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hide_vrrp_holer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=hide_vrrp_holer.hide_vrrp_holer, is_container='container', presence=False, yang_name="hide-vrrp-holer", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hide_vrrp_holer must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=hide_vrrp_holer.hide_vrrp_holer, is_container='container', presence=False, yang_name="hide-vrrp-holer", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)""",
})
self.__hide_vrrp_holer = t
if hasattr(self, '_set'):
self._set()
def _unset_hide_vrrp_holer(self):
self.__hide_vrrp_holer = YANGDynClass(base=hide_vrrp_holer.hide_vrrp_holer, is_container='container', presence=False, yang_name="hide-vrrp-holer", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'display-when': u'/vcsmode/vcs-mode = "false"'}}, namespace='urn:brocade.com:mgmt:brocade-vrrp', defining_module='brocade-vrrp', yang_type='container', is_config=True)
def _get_ip_acl_interface(self):
"""
Getter method for ip_acl_interface, mapped from YANG variable /interface/port_channel/ip_acl_interface (container)
"""
return self.__ip_acl_interface
def _set_ip_acl_interface(self, v, load=False):
"""
Setter method for ip_acl_interface, mapped from YANG variable /interface/port_channel/ip_acl_interface (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_acl_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_acl_interface() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ip_acl_interface.ip_acl_interface, is_container='container', presence=False, yang_name="ip-acl-interface", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'109'}}, namespace='urn:brocade.com:mgmt:brocade-ip-access-list', defining_module='brocade-ip-access-list', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip_acl_interface must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ip_acl_interface.ip_acl_interface, is_container='container', presence=False, yang_name="ip-acl-interface", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'109'}}, namespace='urn:brocade.com:mgmt:brocade-ip-access-list', defining_module='brocade-ip-access-list', yang_type='container', is_config=True)""",
})
self.__ip_acl_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_ip_acl_interface(self):
self.__ip_acl_interface = YANGDynClass(base=ip_acl_interface.ip_acl_interface, is_container='container', presence=False, yang_name="ip-acl-interface", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'109'}}, namespace='urn:brocade.com:mgmt:brocade-ip-access-list', defining_module='brocade-ip-access-list', yang_type='container', is_config=True)
def _get_service_policy(self):
"""
Getter method for service_policy, mapped from YANG variable /interface/port_channel/service_policy (container)
"""
return self.__service_policy
def _set_service_policy(self, v, load=False):
"""
Setter method for service_policy, mapped from YANG variable /interface/port_channel/service_policy (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_service_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_service_policy() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=service_policy.service_policy, is_container='container', presence=False, yang_name="service-policy", rest_name="service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Input/Output Policy Map', u'callpoint': u'interface_po', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """service_policy must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=service_policy.service_policy, is_container='container', presence=False, yang_name="service-policy", rest_name="service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Input/Output Policy Map', u'callpoint': u'interface_po', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='container', is_config=True)""",
})
self.__service_policy = t
if hasattr(self, '_set'):
self._set()
def _unset_service_policy(self):
self.__service_policy = YANGDynClass(base=service_policy.service_policy, is_container='container', presence=False, yang_name="service-policy", rest_name="service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Input/Output Policy Map', u'callpoint': u'interface_po', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='container', is_config=True)
def _get_port_profile_port(self):
"""
Getter method for port_profile_port, mapped from YANG variable /interface/port_channel/port_profile_port (empty)
YANG Description: This specifies if a physical/logical port can be
enabled for port-profiling. The presence of this
leaf indicates that the port is enabled for
port-profiling. Else, it is not enabled.
Enabling a port for port-profiling results in to
application of network policies (as per PP-MAC mapping)
following MAC learning process.
"""
return self.__port_profile_port
def _set_port_profile_port(self, v, load=False):
"""
Setter method for port_profile_port, mapped from YANG variable /interface/port_channel/port_profile_port (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_port_profile_port is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port_profile_port() directly.
YANG Description: This specifies if a physical/logical port can be
enabled for port-profiling. The presence of this
leaf indicates that the port is enabled for
port-profiling. Else, it is not enabled.
Enabling a port for port-profiling results in to
application of network policies (as per PP-MAC mapping)
following MAC learning process.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'114'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """port_profile_port must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'114'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)""",
})
self.__port_profile_port = t
if hasattr(self, '_set'):
self._set()
def _unset_port_profile_port(self):
self.__port_profile_port = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="port-profile-port", rest_name="port-profile-port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set the interface to AMPP profile mode', u'sort-priority': u'114'}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='empty', is_config=True)
def _get_port_profile_to_interface_associations(self):
"""
Getter method for port_profile_to_interface_associations, mapped from YANG variable /interface/port_channel/port_profile_to_interface_associations (container)
"""
return self.__port_profile_to_interface_associations
def _set_port_profile_to_interface_associations(self, v, load=False):
"""
Setter method for port_profile_to_interface_associations, mapped from YANG variable /interface/port_channel/port_profile_to_interface_associations (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_port_profile_to_interface_associations is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_port_profile_to_interface_associations() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=port_profile_to_interface_associations.port_profile_to_interface_associations, is_container='container', presence=False, yang_name="port-profile-to-interface-associations", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """port_profile_to_interface_associations must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=port_profile_to_interface_associations.port_profile_to_interface_associations, is_container='container', presence=False, yang_name="port-profile-to-interface-associations", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)""",
})
self.__port_profile_to_interface_associations = t
if hasattr(self, '_set'):
self._set()
def _unset_port_profile_to_interface_associations(self):
self.__port_profile_to_interface_associations = YANGDynClass(base=port_profile_to_interface_associations.port_profile_to_interface_associations, is_container='container', presence=False, yang_name="port-profile-to-interface-associations", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-port-profile', defining_module='brocade-port-profile', yang_type='container', is_config=True)
def _get_priority_tag_enable(self):
"""
Getter method for priority_tag_enable, mapped from YANG variable /interface/port_channel/priority_tag_enable (empty)
"""
return self.__priority_tag_enable
def _set_priority_tag_enable(self, v, load=False):
"""
Setter method for priority_tag_enable, mapped from YANG variable /interface/port_channel/priority_tag_enable (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority_tag_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority_tag_enable() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="priority-tag-enable", rest_name="priority-tag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure 802.1p priority tagging', u'cli-full-command': None, u'callpoint': u'interface_po', u'alt-name': u'priority-tag'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """priority_tag_enable must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="priority-tag-enable", rest_name="priority-tag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure 802.1p priority tagging', u'cli-full-command': None, u'callpoint': u'interface_po', u'alt-name': u'priority-tag'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='empty', is_config=True)""",
})
self.__priority_tag_enable = t
if hasattr(self, '_set'):
self._set()
def _unset_priority_tag_enable(self):
self.__priority_tag_enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="priority-tag-enable", rest_name="priority-tag", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure 802.1p priority tagging', u'cli-full-command': None, u'callpoint': u'interface_po', u'alt-name': u'priority-tag'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='empty', is_config=True)
def _get_qos(self):
"""
Getter method for qos, mapped from YANG variable /interface/port_channel/qos (container)
"""
return self.__qos
def _set_qos(self, v, load=False):
"""
Setter method for qos, mapped from YANG variable /interface/port_channel/qos (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_qos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_qos() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=qos.qos, is_container='container', presence=False, yang_name="qos", rest_name="qos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'cli-incomplete-no': None, u'callpoint': u'interface_po', u'sort-priority': u'93'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """qos must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=qos.qos, is_container='container', presence=False, yang_name="qos", rest_name="qos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'cli-incomplete-no': None, u'callpoint': u'interface_po', u'sort-priority': u'93'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)""",
})
self.__qos = t
if hasattr(self, '_set'):
self._set()
def _unset_qos(self):
self.__qos = YANGDynClass(base=qos.qos, is_container='container', presence=False, yang_name="qos", rest_name="qos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Quality of Service (QoS)', u'cli-incomplete-no': None, u'callpoint': u'interface_po', u'sort-priority': u'93'}}, namespace='urn:brocade.com:mgmt:brocade-qos', defining_module='brocade-qos', yang_type='container', is_config=True)
def _get_vlan(self):
"""
Getter method for vlan, mapped from YANG variable /interface/port_channel/vlan (container)
"""
return self.__vlan
def _set_vlan(self, v, load=False):
"""
Setter method for vlan, mapped from YANG variable /interface/port_channel/vlan (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vlan() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=vlan.vlan, is_container='container', presence=False, yang_name="vlan", rest_name="vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Vlan commands', u'cli-incomplete-no': None, u'callpoint': u'VlanClassifierActivateCallpointWorker_po', u'sort-priority': u'97'}}, namespace='urn:brocade.com:mgmt:brocade-vlan', defining_module='brocade-vlan', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vlan must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=vlan.vlan, is_container='container', presence=False, yang_name="vlan", rest_name="vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Vlan commands', u'cli-incomplete-no': None, u'callpoint': u'VlanClassifierActivateCallpointWorker_po', u'sort-priority': u'97'}}, namespace='urn:brocade.com:mgmt:brocade-vlan', defining_module='brocade-vlan', yang_type='container', is_config=True)""",
})
self.__vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_vlan(self):
self.__vlan = YANGDynClass(base=vlan.vlan, is_container='container', presence=False, yang_name="vlan", rest_name="vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Vlan commands', u'cli-incomplete-no': None, u'callpoint': u'VlanClassifierActivateCallpointWorker_po', u'sort-priority': u'97'}}, namespace='urn:brocade.com:mgmt:brocade-vlan', defining_module='brocade-vlan', yang_type='container', is_config=True)
def _get_bpdu_drop(self):
"""
Getter method for bpdu_drop, mapped from YANG variable /interface/port_channel/bpdu_drop (container)
"""
return self.__bpdu_drop
def _set_bpdu_drop(self, v, load=False):
"""
Setter method for bpdu_drop, mapped from YANG variable /interface/port_channel/bpdu_drop (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bpdu_drop is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bpdu_drop() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bpdu_drop must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)""",
})
self.__bpdu_drop = t
if hasattr(self, '_set'):
self._set()
def _unset_bpdu_drop(self):
self.__bpdu_drop = YANGDynClass(base=bpdu_drop.bpdu_drop, is_container='container', presence=False, yang_name="bpdu-drop", rest_name="bpdu-drop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Drop received BPDUs', u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
def _get_tunnel(self):
"""
Getter method for tunnel, mapped from YANG variable /interface/port_channel/tunnel (container)
"""
return self.__tunnel
def _set_tunnel(self, v, load=False):
"""
Setter method for tunnel, mapped from YANG variable /interface/port_channel/tunnel (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_tunnel is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tunnel() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=tunnel.tunnel, is_container='container', presence=False, yang_name="tunnel", rest_name="tunnel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None, u'info': u'Tunneling parameters'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """tunnel must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=tunnel.tunnel, is_container='container', presence=False, yang_name="tunnel", rest_name="tunnel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None, u'info': u'Tunneling parameters'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)""",
})
self.__tunnel = t
if hasattr(self, '_set'):
self._set()
def _unset_tunnel(self):
self.__tunnel = YANGDynClass(base=tunnel.tunnel, is_container='container', presence=False, yang_name="tunnel", rest_name="tunnel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'po-stp-config', u'sort-priority': u'98', u'display-when': u'/vcsmode/vcs-mode = "true"', u'cli-incomplete-no': None, u'info': u'Tunneling parameters'}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
def _get_spanning_tree(self):
"""
Getter method for spanning_tree, mapped from YANG variable /interface/port_channel/spanning_tree (container)
"""
return self.__spanning_tree
def _set_spanning_tree(self, v, load=False):
"""
Setter method for spanning_tree, mapped from YANG variable /interface/port_channel/spanning_tree (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_spanning_tree is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_spanning_tree() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=spanning_tree.spanning_tree, is_container='container', presence=False, yang_name="spanning-tree", rest_name="spanning-tree", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Spanning tree commands', u'sort-priority': u'98', u'callpoint': u'po-stp-config', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """spanning_tree must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=spanning_tree.spanning_tree, is_container='container', presence=False, yang_name="spanning-tree", rest_name="spanning-tree", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Spanning tree commands', u'sort-priority': u'98', u'callpoint': u'po-stp-config', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)""",
})
self.__spanning_tree = t
if hasattr(self, '_set'):
self._set()
def _unset_spanning_tree(self):
self.__spanning_tree = YANGDynClass(base=spanning_tree.spanning_tree, is_container='container', presence=False, yang_name="spanning-tree", rest_name="spanning-tree", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Spanning tree commands', u'sort-priority': u'98', u'callpoint': u'po-stp-config', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
name = __builtin__.property(_get_name, _set_name)
cee = __builtin__.property(_get_cee, _set_cee)
vlag = __builtin__.property(_get_vlag, _set_vlag)
po_speed = __builtin__.property(_get_po_speed, _set_po_speed)
ifindex = __builtin__.property(_get_ifindex)
description = __builtin__.property(_get_description, _set_description)
shutdown = __builtin__.property(_get_shutdown, _set_shutdown)
minimum_links = __builtin__.property(_get_minimum_links, _set_minimum_links)
snmp = __builtin__.property(_get_snmp, _set_snmp)
mtu = __builtin__.property(_get_mtu, _set_mtu)
switchport_basic = __builtin__.property(_get_switchport_basic, _set_switchport_basic)
switchport = __builtin__.property(_get_switchport, _set_switchport)
mac_learning = __builtin__.property(_get_mac_learning, _set_mac_learning)
ip = __builtin__.property(_get_ip, _set_ip)
ipv6 = __builtin__.property(_get_ipv6, _set_ipv6)
track = __builtin__.property(_get_track, _set_track)
edge_loop_detection = __builtin__.property(_get_edge_loop_detection, _set_edge_loop_detection)
load_balance = __builtin__.property(_get_load_balance, _set_load_balance)
fcoeport = __builtin__.property(_get_fcoeport, _set_fcoeport)
mac = __builtin__.property(_get_mac, _set_mac)
hide_vrrp_holer = __builtin__.property(_get_hide_vrrp_holer, _set_hide_vrrp_holer)
ip_acl_interface = __builtin__.property(_get_ip_acl_interface, _set_ip_acl_interface)
service_policy = __builtin__.property(_get_service_policy, _set_service_policy)
port_profile_port = __builtin__.property(_get_port_profile_port, _set_port_profile_port)
port_profile_to_interface_associations = __builtin__.property(_get_port_profile_to_interface_associations, _set_port_profile_to_interface_associations)
priority_tag_enable = __builtin__.property(_get_priority_tag_enable, _set_priority_tag_enable)
qos = __builtin__.property(_get_qos, _set_qos)
vlan = __builtin__.property(_get_vlan, _set_vlan)
bpdu_drop = __builtin__.property(_get_bpdu_drop, _set_bpdu_drop)
tunnel = __builtin__.property(_get_tunnel, _set_tunnel)
spanning_tree = __builtin__.property(_get_spanning_tree, _set_spanning_tree)
_pyangbind_elements = {'name': name, 'cee': cee, 'vlag': vlag, 'po_speed': po_speed, 'ifindex': ifindex, 'description': description, 'shutdown': shutdown, 'minimum_links': minimum_links, 'snmp': snmp, 'mtu': mtu, 'switchport_basic': switchport_basic, 'switchport': switchport, 'mac_learning': mac_learning, 'ip': ip, 'ipv6': ipv6, 'track': track, 'edge_loop_detection': edge_loop_detection, 'load_balance': load_balance, 'fcoeport': fcoeport, 'mac': mac, 'hide_vrrp_holer': hide_vrrp_holer, 'ip_acl_interface': ip_acl_interface, 'service_policy': service_policy, 'port_profile_port': port_profile_port, 'port_profile_to_interface_associations': port_profile_to_interface_associations, 'priority_tag_enable': priority_tag_enable, 'qos': qos, 'vlan': vlan, 'bpdu_drop': bpdu_drop, 'tunnel': tunnel, 'spanning_tree': spanning_tree, }
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2013 Intel Corp.
#
# Authors: Yunhong Jiang <yunhong.jiang@intel.com>
# Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from stevedore import extension
from ceilometer.openstack.common.fixture import mockpatch
from ceilometer.openstack.common import test
from ceilometer.openstack.common import timeutils
from ceilometer import pipeline
from ceilometer import publisher
from ceilometer.publisher import test as test_publisher
from ceilometer import sample
from ceilometer import transformer
from ceilometer.transformer import accumulator
from ceilometer.transformer import conversions
class TestTransformerAccumulator(test.BaseTestCase):
def test_handle_sample(self):
test_sample = sample.Sample(
name='a',
type=sample.TYPE_GAUGE,
volume=1,
unit='B',
user_id="test_user",
project_id="test_proj",
resource_id="test_resource",
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
)
# Test when size is set to less than 1.
tf = accumulator.TransformerAccumulator(size=0)
self.assertEqual(tf.handle_sample(None, test_sample), test_sample)
self.assertFalse(hasattr(tf, 'samples'))
# Test when size is set to greater or equal than 1.
tf = accumulator.TransformerAccumulator(size=2)
tf.handle_sample(None, test_sample)
self.assertEqual(len(tf.samples), 1)
class TestPipeline(test.BaseTestCase):
def fake_tem_init(self):
"""Fake a transformerManager for pipeline
The faked entry point setting is below:
update: TransformerClass
except: TransformerClassException
drop: TransformerClassDrop
"""
pass
def fake_tem_get_ext(self, name):
class_name_ext = {
'update': self.TransformerClass,
'except': self.TransformerClassException,
'drop': self.TransformerClassDrop,
'cache': accumulator.TransformerAccumulator,
'unit_conversion': conversions.ScalingTransformer,
'rate_of_change': conversions.RateOfChangeTransformer,
}
if name in class_name_ext:
return extension.Extension(name, None,
class_name_ext[name],
None,
)
raise KeyError(name)
def get_publisher(self, url, namespace=''):
fake_drivers = {'test://': test_publisher.TestPublisher,
'new://': test_publisher.TestPublisher,
'except://': self.PublisherClassException}
return fake_drivers[url](url)
class PublisherClassException(publisher.PublisherBase):
def publish_samples(self, ctxt, counters):
raise Exception()
class TransformerClass(transformer.TransformerBase):
samples = []
def __init__(self, append_name='_update'):
self.__class__.samples = []
self.append_name = append_name
def flush(self, ctxt):
return []
def handle_sample(self, ctxt, counter):
self.__class__.samples.append(counter)
newname = getattr(counter, 'name') + self.append_name
return sample.Sample(
name=newname,
type=counter.type,
volume=counter.volume,
unit=counter.unit,
user_id=counter.user_id,
project_id=counter.project_id,
resource_id=counter.resource_id,
timestamp=counter.timestamp,
resource_metadata=counter.resource_metadata,
)
class TransformerClassDrop(transformer.TransformerBase):
samples = []
def __init__(self):
self.__class__.samples = []
def handle_sample(self, ctxt, counter):
self.__class__.samples.append(counter)
class TransformerClassException(object):
def handle_sample(self, ctxt, counter):
raise Exception()
def setUp(self):
super(TestPipeline, self).setUp()
self.test_counter = sample.Sample(
name='a',
type=sample.TYPE_GAUGE,
volume=1,
unit='B',
user_id="test_user",
project_id="test_proj",
resource_id="test_resource",
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
)
self.useFixture(mockpatch.PatchObject(
transformer.TransformerExtensionManager, "__init__",
side_effect=self.fake_tem_init))
self.useFixture(mockpatch.PatchObject(
transformer.TransformerExtensionManager, "get_ext",
side_effect=self.fake_tem_get_ext))
self.useFixture(mockpatch.PatchObject(
publisher, 'get_publisher', side_effect=self.get_publisher))
self.transformer_manager = transformer.TransformerExtensionManager()
self.pipeline_cfg = [{
'name': "test_pipeline",
'interval': 5,
'counters': ['a'],
'transformers': [
{'name': "update",
'parameters': {}}
],
'publishers': ["test://"],
}, ]
def _exception_create_pipelinemanager(self):
self.assertRaises(pipeline.PipelineException,
pipeline.PipelineManager,
self.pipeline_cfg,
self.transformer_manager)
def test_no_counters(self):
del self.pipeline_cfg[0]['counters']
self._exception_create_pipelinemanager()
def test_no_transformers(self):
del self.pipeline_cfg[0]['transformers']
self._exception_create_pipelinemanager()
def test_no_name(self):
del self.pipeline_cfg[0]['name']
self._exception_create_pipelinemanager()
def test_no_interval(self):
del self.pipeline_cfg[0]['interval']
self._exception_create_pipelinemanager()
def test_no_publishers(self):
del self.pipeline_cfg[0]['publishers']
self._exception_create_pipelinemanager()
def test_invalid_resources(self):
invalid_resource = {'invalid': 1}
self.pipeline_cfg[0]['resources'] = invalid_resource
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude_same(self):
counter_cfg = ['a', '!a']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude(self):
counter_cfg = ['a', '!b']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_counters_wildcard_included(self):
counter_cfg = ['a', '*']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_publishers_invalid_publisher(self):
publisher_cfg = ['test_invalid']
self.pipeline_cfg[0]['publishers'] = publisher_cfg
def test_invalid_string_interval(self):
self.pipeline_cfg[0]['interval'] = 'string'
self._exception_create_pipelinemanager()
def test_check_transformer_invalid_transformer(self):
transformer_cfg = [
{'name': "test_invalid",
'parameters': {}}
]
self.pipeline_cfg[0]['transformers'] = transformer_cfg
self._exception_create_pipelinemanager()
def test_get_interval(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
self.assertTrue(pipe.get_interval() == 5)
def test_publisher_transformer_invoked(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
def test_multiple_included_counters(self):
counter_cfg = ['a', 'b']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertEqual(len(publisher.samples), 2)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
self.assertEqual(getattr(publisher.samples[1], "name"), 'b_update')
def test_counter_dont_match(self):
counter_cfg = ['nomatch']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 0)
self.assertEqual(publisher.calls, 0)
def test_wildcard_counter(self):
counter_cfg = ['*']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
def test_wildcard_excluded_counters(self):
counter_cfg = ['*', '!a']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].support_meter('a'))
def test_wildcard_excluded_counters_not_excluded(self):
counter_cfg = ['*', '!b']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(len(self.TransformerClass.samples), 1)
self.assertEqual(getattr(publisher.samples[0], "name"),
'a_update')
def test_all_excluded_counters_not_excluded(self):
counter_cfg = ['!b', '!c']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
def test_all_excluded_counters_is_excluded(self):
counter_cfg = ['!a', '!c']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].support_meter('a'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('b'))
self.assertFalse(pipeline_manager.pipelines[0].support_meter('c'))
def test_wildcard_and_excluded_wildcard_counters(self):
counter_cfg = ['*', '!disk.*']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('cpu'))
def test_included_counter_and_wildcard_counters(self):
counter_cfg = ['cpu', 'disk.*']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertTrue(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('cpu'))
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('instance'))
def test_excluded_counter_and_excluded_wildcard_counters(self):
counter_cfg = ['!cpu', '!disk.*']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertFalse(pipeline_manager.pipelines[0].support_meter('cpu'))
self.assertTrue(pipeline_manager.pipelines[0].
support_meter('instance'))
def test_multiple_pipeline(self):
self.pipeline_cfg.append({
'name': 'second_pipeline',
'interval': 5,
'counters': ['b'],
'transformers': [{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
}],
'publishers': ['new'],
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(publisher.calls, 1)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
new_publisher = pipeline_manager.pipelines[1].publishers[0]
self.assertEqual(len(new_publisher.samples), 1)
self.assertEqual(new_publisher.calls, 1)
self.assertEqual(getattr(new_publisher.samples[0], "name"), 'b_new')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], "name")
== 'b')
def test_multiple_pipeline_exception(self):
self.pipeline_cfg.append({
'name': "second_pipeline",
"interval": 5,
'counters': ['b'],
'transformers': [{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
}],
'publishers': ['except'],
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(publisher.calls, 1)
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], "name")
== 'b')
def test_none_transformer_pipeline(self):
self.pipeline_cfg[0]['transformers'] = None
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(publisher.calls, 1)
self.assertEqual(getattr(publisher.samples[0], 'name'), 'a')
def test_empty_transformer_pipeline(self):
self.pipeline_cfg[0]['transformers'] = []
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(publisher.calls, 1)
self.assertEqual(getattr(publisher.samples[0], 'name'), 'a')
def test_multiple_transformer_same_class(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters': {}
},
{
'name': 'update',
'parameters': {}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(publisher.calls, 1)
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(getattr(publisher.samples[0], 'name'),
'a_update_update')
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], 'name')
== 'a_update')
def test_multiple_transformer_same_class_different_parameter(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], 'name')
== 'a_update')
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(getattr(publisher.samples[0], 'name'),
'a_update_new')
def test_multiple_transformer_drop_transformer(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'drop',
'parameters': {}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 0)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(len(self.TransformerClassDrop.samples) == 1)
self.assertTrue(getattr(self.TransformerClassDrop.samples[0], 'name')
== 'a_update')
def test_multiple_publisher(self):
self.pipeline_cfg[0]['publishers'] = ['test://', 'new://']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
new_publisher = pipeline_manager.pipelines[0].publishers[1]
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(len(new_publisher.samples), 1)
self.assertEqual(getattr(new_publisher.samples[0], 'name'),
'a_update')
self.assertEqual(getattr(publisher.samples[0], 'name'),
'a_update')
def test_multiple_publisher_isolation(self):
self.pipeline_cfg[0]['publishers'] = ['except://', 'new://']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
new_publisher = pipeline_manager.pipelines[0].publishers[1]
self.assertEqual(len(new_publisher.samples), 1)
self.assertEqual(getattr(new_publisher.samples[0], 'name'),
'a_update')
def test_multiple_counter_pipeline(self):
self.pipeline_cfg[0]['counters'] = ['a', 'b']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter,
sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 2)
self.assertEqual(getattr(publisher.samples[0], 'name'), 'a_update')
self.assertEqual(getattr(publisher.samples[1], 'name'), 'b_update')
def test_flush_pipeline_cache(self):
CACHE_SIZE = 10
self.pipeline_cfg[0]['transformers'].extend([
{
'name': 'cache',
'parameters': {
'size': CACHE_SIZE,
}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
}, ]
)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_sample(None, self.test_counter)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 0)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 0)
pipe.publish_sample(None, self.test_counter)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 0)
for i in range(CACHE_SIZE - 2):
pipe.publish_sample(None, self.test_counter)
pipe.flush(None)
self.assertEqual(len(publisher.samples), CACHE_SIZE)
self.assertTrue(getattr(publisher.samples[0], 'name')
== 'a_update_new')
def test_flush_pipeline_cache_multiple_counter(self):
CACHE_SIZE = 3
self.pipeline_cfg[0]['transformers'].extend([
{
'name': 'cache',
'parameters': {
'size': CACHE_SIZE
}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
}, ]
)
self.pipeline_cfg[0]['counters'] = ['a', 'b']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter,
sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 0)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertEqual(len(publisher.samples), CACHE_SIZE)
self.assertEqual(getattr(publisher.samples[0], 'name'),
'a_update_new')
self.assertEqual(getattr(publisher.samples[1], 'name'),
'b_update_new')
def test_flush_pipeline_cache_before_publisher(self):
self.pipeline_cfg[0]['transformers'].append({
'name': 'cache',
'parameters': {}
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
publisher = pipe.publishers[0]
pipe.publish_sample(None, self.test_counter)
self.assertEqual(len(publisher.samples), 0)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 1)
self.assertEqual(getattr(publisher.samples[0], 'name'),
'a_update')
def test_variable_counter(self):
self.pipeline_cfg = [{
'name': "test_pipeline",
'interval': 5,
'counters': ['a:*'],
'transformers': [
{'name': "update",
'parameters': {}}
],
'publishers': ["test://"],
}, ]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.test_counter = sample.Sample(
name='a:b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertEqual(getattr(publisher.samples[0], "name"),
'a:b_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a:b')
def test_global_unit_conversion(self):
scale = 'volume / ((10**6) * 60)'
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'unit_conversion',
'parameters': {
'source': {},
'target': {'name': 'cpu_mins',
'unit': 'min',
'scale': scale},
}
},
]
self.pipeline_cfg[0]['counters'] = ['cpu']
counters = [
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=1200000000,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_samples(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 1)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 1)
cpu_mins = publisher.samples[-1]
self.assertEqual(getattr(cpu_mins, 'name'), 'cpu_mins')
self.assertEqual(getattr(cpu_mins, 'unit'), 'min')
self.assertEqual(getattr(cpu_mins, 'type'), sample.TYPE_CUMULATIVE)
self.assertEqual(getattr(cpu_mins, 'volume'), 20)
def test_unit_identified_source_unit_conversion(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'unit_conversion',
'parameters': {
'source': {'unit': '°C'},
'target': {'unit': '°F',
'scale': '(volume * 1.8) + 32'},
}
},
]
self.pipeline_cfg[0]['counters'] = ['core_temperature',
'ambient_temperature']
counters = [
sample.Sample(
name='core_temperature',
type=sample.TYPE_GAUGE,
volume=36.0,
unit='°C',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
sample.Sample(
name='ambient_temperature',
type=sample.TYPE_GAUGE,
volume=88.8,
unit='°F',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_samples(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 2)
core_temp = publisher.samples[1]
self.assertEqual(getattr(core_temp, 'name'), 'core_temperature')
self.assertEqual(getattr(core_temp, 'unit'), '°F')
self.assertEqual(getattr(core_temp, 'volume'), 96.8)
amb_temp = publisher.samples[0]
self.assertEqual(getattr(amb_temp, 'name'), 'ambient_temperature')
self.assertEqual(getattr(amb_temp, 'unit'), '°F')
self.assertEqual(getattr(amb_temp, 'volume'), 88.8)
self.assertEqual(getattr(core_temp, 'volume'), 96.8)
def _do_test_rate_of_change_conversion(self, prev, curr, type, expected,
offset=1, weight=None):
s = "(resource_metadata.user_metadata.autoscaling_weight or 1.0)" \
"* (resource_metadata.non.existent or 1.0)" \
"* (100.0 / (10**9 * (resource_metadata.cpu_number or 1)))"
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'rate_of_change',
'parameters': {
'source': {},
'target': {'name': 'cpu_util',
'unit': '%',
'type': sample.TYPE_GAUGE,
'scale': s},
}
},
]
self.pipeline_cfg[0]['counters'] = ['cpu']
now = timeutils.utcnow()
later = now + datetime.timedelta(minutes=offset)
um = {'autoscaling_weight': weight} if weight else {}
counters = [
sample.Sample(
name='cpu',
type=type,
volume=prev,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 4,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=prev,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource2',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 2,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=curr,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=later.isoformat(),
resource_metadata={'cpu_number': 4,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=curr,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource2',
timestamp=later.isoformat(),
resource_metadata={'cpu_number': 2,
'user_metadata': um},
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_samples(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 2)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 2)
cpu_util = publisher.samples[0]
self.assertEqual(getattr(cpu_util, 'name'), 'cpu_util')
self.assertEqual(getattr(cpu_util, 'resource_id'), 'test_resource')
self.assertEqual(getattr(cpu_util, 'unit'), '%')
self.assertEqual(getattr(cpu_util, 'type'), sample.TYPE_GAUGE)
self.assertEqual(getattr(cpu_util, 'volume'), expected)
cpu_util = publisher.samples[1]
self.assertEqual(getattr(cpu_util, 'name'), 'cpu_util')
self.assertEqual(getattr(cpu_util, 'resource_id'), 'test_resource2')
self.assertEqual(getattr(cpu_util, 'unit'), '%')
self.assertEqual(getattr(cpu_util, 'type'), sample.TYPE_GAUGE)
self.assertEqual(getattr(cpu_util, 'volume'), expected * 2)
def test_rate_of_change_conversion(self):
self._do_test_rate_of_change_conversion(120000000000,
180000000000,
sample.TYPE_CUMULATIVE,
25.0)
def test_rate_of_change_conversion_weight(self):
self._do_test_rate_of_change_conversion(120000000000,
180000000000,
sample.TYPE_CUMULATIVE,
27.5,
weight=1.1)
def test_rate_of_change_conversion_negative_cumulative_delta(self):
self._do_test_rate_of_change_conversion(180000000000,
120000000000,
sample.TYPE_CUMULATIVE,
50.0)
def test_rate_of_change_conversion_negative_gauge_delta(self):
self._do_test_rate_of_change_conversion(180000000000,
120000000000,
sample.TYPE_GAUGE,
-25.0)
def test_rate_of_change_conversion_zero_delay(self):
self._do_test_rate_of_change_conversion(120000000000,
120000000000,
sample.TYPE_CUMULATIVE,
0.0,
offset=0)
def test_rate_of_change_no_predecessor(self):
s = "100.0 / (10**9 * resource_metadata.get('cpu_number', 1))"
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'rate_of_change',
'parameters': {
'source': {},
'target': {'name': 'cpu_util',
'unit': '%',
'type': sample.TYPE_GAUGE,
'scale': s}
}
},
]
self.pipeline_cfg[0]['counters'] = ['cpu']
now = timeutils.utcnow()
counters = [
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=120000000000,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 4}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_samples(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(len(publisher.samples), 0)
pipe.flush(None)
self.assertEqual(len(publisher.samples), 0)
def test_resources(self):
resources = ['test1://', 'test2://']
self.pipeline_cfg[0]['resources'] = resources
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertEqual(pipeline_manager.pipelines[0].resources,
resources)
def test_no_resources(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertEqual(len(pipeline_manager.pipelines[0].resources),
0)
|
# -*- coding: utf-8 -*-
import unittest
from tennis import TennisGame1
test_cases = [
(0, 0, "Love-All", '0-0', 'player1', 'player2'),
(1, 1, "Fifteen-All", '0-0', 'player1', 'player2'),
(2, 2, "Thirty-All", '0-0', 'player1', 'player2'),
(3, 3, "Deuce", '0-0', 'player1', 'player2'),
(4, 4, "Deuce", '0-0', 'player1', 'player2'),
(1, 0, "Fifteen-Love", '0-0', 'player1', 'player2'),
(0, 1, "Love-Fifteen", '0-0', 'player1', 'player2'),
(2, 0, "Thirty-Love", '0-0', 'player1', 'player2'),
(0, 2, "Love-Thirty", '0-0', 'player1', 'player2'),
(3, 0, "Forty-Love", '0-0', 'player1', 'player2'),
(0, 3, "Love-Forty", '0-0', 'player1', 'player2'),
(4, 0, "Love-All", '1-0', 'player1', 'player2'),
(0, 4, "Love-All", '0-1', 'player1', 'player2'),
(2, 1, "Thirty-Fifteen", '0-0', 'player1', 'player2'),
(1, 2, "Fifteen-Thirty", '0-0', 'player1', 'player2'),
(3, 1, "Forty-Fifteen", '0-0', 'player1', 'player2'),
(1, 3, "Fifteen-Forty", '0-0', 'player1', 'player2'),
(4, 1, "Love-All", '1-0', 'player1', 'player2'),
(1, 4, "Love-All", '0-1', 'player1', 'player2'),
(3, 2, "Forty-Thirty", '0-0', 'player1', 'player2'),
(2, 3, "Thirty-Forty", '0-0', 'player1', 'player2'),
(4, 2, "Love-All", '1-0', 'player1', 'player2'),
(2, 4, "Love-All", '0-1', 'player1', 'player2'),
(4, 3, "Advantage player1", '0-0', 'player1', 'player2'),
(3, 4, "Advantage player2", '0-0', 'player1', 'player2'),
(5, 4, "Advantage player1", '0-0', 'player1', 'player2'),
(4, 5, "Advantage player2", '0-0', 'player1', 'player2'),
(15, 14, "Advantage player1", '0-0', 'player1', 'player2'),
(14, 15, "Advantage player2", '0-0', 'player1', 'player2'),
(6, 4, 'Love-All', '1-0', 'player1', 'player2'),
(4, 6, 'Love-All', '0-1', 'player1', 'player2'),
(16, 14, 'Love-All', '1-0', 'player1', 'player2'),
(14, 16, 'Love-All', '0-1', 'player1', 'player2'),
(6, 4, 'Love-All', '1-0', 'One', 'player2'),
(4, 6, 'Love-All', '0-1', 'player1', 'Two'),
(6, 5, 'Advantage One', '0-0', 'One', 'player2'),
(5, 6, 'Advantage Two', '0-0', 'player1', 'Two'),
(15, 0, 'Forty-Love', '3-0', 'player1', 'Two')
]
def play_game(TennisGame, p1Points, p2Points, p1Name, p2Name):
game = TennisGame(p1Name, p2Name)
for i in range(max(p1Points, p2Points)):
if i < p1Points:
game.won_point(p1Name)
if i < p2Points:
game.won_point(p2Name)
return game
class TestTennis(unittest.TestCase):
def test_current_game_scores(self):
for testcase in test_cases:
(p1Points, p2Points, score, game_score, p1Name, p2Name) = testcase
game = play_game(TennisGame1, p1Points, p2Points, p1Name, p2Name)
self.assertEqual(score, game.score())
def test_games_scores(self):
for testcase in test_cases:
(p1Points, p2Points, score, game_score, p1Name, p2Name) = testcase
game = play_game(TennisGame1, p1Points, p2Points, p1Name, p2Name)
self.assertEqual(game_score, game.games_score())
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittest for cros_test_lib (tests for tests? Who'd a thunk it)."""
from __future__ import print_function
import os
import subprocess
import sys
import time
import unittest
import mock
from chromite.lib import cros_test_lib
from chromite.lib import cros_build_lib
from chromite.lib import osutils
from chromite.lib import partial_mock
from chromite.lib import timeout_util
# Convenience alias
Dir = cros_test_lib.Directory
class CrosTestCaseTest(cros_test_lib.TestCase):
"""Test the cros_test_lib.TestCase."""
def testAssertStartsWith(self):
s = 'abcdef'
prefix = 'abc'
self.assertStartsWith(s, prefix)
prefix = 'def'
self.assertRaises(AssertionError, self.assertStartsWith, s, prefix)
def testAssertEndsWith(self):
s = 'abcdef'
suffix = 'abc'
self.assertRaises(AssertionError, self.assertEndsWith, s, suffix)
suffix = 'def'
self.assertEndsWith(s, suffix)
class TruthTableTest(cros_test_lib.TestCase):
"""Test TruthTable functionality."""
def _TestTableSanity(self, tt, lines):
"""Run the given truth table through basic sanity checks.
Args:
tt: A TruthTable object.
lines: The expect input lines, in order (list of tuples).
"""
# Check that more than one iterable can be used at once.
iter1 = iter(tt)
iter2 = iter(tt)
self.assertEqual(lines[0], next(iter1))
self.assertEqual(lines[0], next(iter2))
self.assertEqual(lines[1], next(iter2))
# Check that iteration again works again.
for ix, line in enumerate(tt):
self.assertEqual(lines[ix], line)
# Check direct access of input lines.
for i in range(len(tt)):
self.assertEqual(lines[i], tt.GetInputs(i))
# Check assertions on bad input to GetInputs.
self.assertRaises(ValueError, tt.GetInputs, -1)
self.assertRaises(ValueError, tt.GetInputs, len(tt))
def testTwoDimensions(self):
"""Test TruthTable behavior for two boolean inputs."""
tt = cros_test_lib.TruthTable(inputs=[(True, True), (True, False)])
self.assertEqual(len(tt), pow(2, 2))
# Check truth table output.
self.assertFalse(tt.GetOutput((False, False)))
self.assertFalse(tt.GetOutput((False, True)))
self.assertTrue(tt.GetOutput((True, False)))
self.assertTrue(tt.GetOutput((True, True)))
# Check assertions on bad input to GetOutput.
self.assertRaises(TypeError, tt.GetOutput, True)
self.assertRaises(ValueError, tt.GetOutput, (True, True, True))
# Check iteration over input lines.
lines = list(tt)
self.assertEqual((False, False), lines[0])
self.assertEqual((False, True), lines[1])
self.assertEqual((True, False), lines[2])
self.assertEqual((True, True), lines[3])
self._TestTableSanity(tt, lines)
def testFourDimensions(self):
"""Test TruthTable behavior for four boolean inputs."""
false1 = (True, True, True, False)
false2 = (True, False, True, False)
true1 = (False, True, False, True)
true2 = (True, True, False, False)
tt = cros_test_lib.TruthTable(inputs=(false1, false2), input_result=False)
self.assertEqual(len(tt), pow(2, 4))
# Check truth table output.
self.assertFalse(tt.GetOutput(false1))
self.assertFalse(tt.GetOutput(false2))
self.assertTrue(tt.GetOutput(true1))
self.assertTrue(tt.GetOutput(true2))
# Check assertions on bad input to GetOutput.
self.assertRaises(TypeError, tt.GetOutput, True)
self.assertRaises(ValueError, tt.GetOutput, (True, True, True))
# Check iteration over input lines.
lines = list(tt)
self.assertEqual((False, False, False, False), lines[0])
self.assertEqual((False, False, False, True), lines[1])
self.assertEqual((False, True, True, True), lines[7])
self.assertEqual((True, True, True, True), lines[15])
self._TestTableSanity(tt, lines)
class VerifyTarballTest(cros_test_lib.MockTempDirTestCase):
"""Test tarball verification functionality."""
TARBALL = 'fake_tarball'
def setUp(self):
self.rc_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
def _MockTarList(self, files):
"""Mock out tarball content list call.
Args:
files: A list of contents to return.
"""
self.rc_mock.AddCmdResult(
partial_mock.ListRegex('tar -tf'), output='\n'.join(files))
def testNormPath(self):
"""Test path normalization."""
tar_contents = ['./', './foo/', './foo/./a', './foo/./b']
dir_struct = [Dir('.', []), Dir('foo', ['a', 'b'])]
self._MockTarList(tar_contents)
cros_test_lib.VerifyTarball(self.TARBALL, dir_struct)
def testDuplicate(self):
"""Test duplicate detection."""
tar_contents = ['a', 'b', 'a']
dir_struct = ['a', 'b']
self._MockTarList(tar_contents)
self.assertRaises(AssertionError, cros_test_lib.VerifyTarball, self.TARBALL,
dir_struct)
class MockTestCaseTest(cros_test_lib.TestCase):
"""Tests MockTestCase functionality."""
class MyMockTestCase(cros_test_lib.MockTestCase):
"""Helper class for testing MockTestCase."""
def testIt(self):
pass
class Mockable(object):
"""Helper test class intended for having values mocked out."""
TO_BE_MOCKED = 0
TO_BE_MOCKED2 = 10
TO_BE_MOCKED3 = 20
def GetPatcher(self, attr, val):
return mock.patch('%s.MockTestCaseTest.Mockable.%s' % (__name__, attr),
new=val)
def testPatchRemovalError(self):
"""Verify that patch removal during tearDown is robust to Exceptions."""
tc = self.MyMockTestCase('testIt')
patcher = self.GetPatcher('TO_BE_MOCKED', -100)
patcher2 = self.GetPatcher('TO_BE_MOCKED2', -200)
patcher3 = self.GetPatcher('TO_BE_MOCKED3', -300)
patcher3.start()
tc.setUp()
tc.StartPatcher(patcher)
tc.StartPatcher(patcher2)
patcher.stop()
self.assertEqual(self.Mockable.TO_BE_MOCKED2, -200)
self.assertEqual(self.Mockable.TO_BE_MOCKED3, -300)
self.assertRaises(RuntimeError, tc.tearDown)
# Make sure that even though exception is raised for stopping 'patcher', we
# continue to stop 'patcher2', and run patcher.stopall().
self.assertEqual(self.Mockable.TO_BE_MOCKED2, 10)
self.assertEqual(self.Mockable.TO_BE_MOCKED3, 20)
class TestCaseTest(unittest.TestCase):
"""Tests TestCase functionality."""
def testTimeout(self):
"""Test that test cases are interrupted when they are hanging."""
class TimeoutTestCase(cros_test_lib.TestCase):
"""Test case that raises a TimeoutError because it takes too long."""
TEST_CASE_TIMEOUT = 1
def testSleeping(self):
"""Sleep for 2 minutes. This should raise a TimeoutError."""
time.sleep(2 * 60)
raise AssertionError('Test case should have timed out.')
# Run the test case, verifying it raises a TimeoutError.
test = TimeoutTestCase(methodName='testSleeping')
self.assertRaises(timeout_util.TimeoutError, test.testSleeping)
class OutputTestCaseTest(cros_test_lib.OutputTestCase,
cros_test_lib.TempDirTestCase):
"""Tests OutputTestCase functionality."""
def testStdoutAndStderr(self):
"""Check capturing stdout and stderr."""
with self.OutputCapturer():
print('foo')
print('bar', file=sys.stderr)
self.AssertOutputContainsLine('foo')
self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
def testStdoutReadDuringCapture(self):
"""Check reading stdout mid-capture."""
with self.OutputCapturer():
print('foo')
self.AssertOutputContainsLine('foo')
print('bar')
self.AssertOutputContainsLine('bar')
self.AssertOutputContainsLine('foo')
self.AssertOutputContainsLine('bar')
def testClearCaptured(self):
"""Check writing data, clearing it, then writing more data."""
with self.OutputCapturer() as cap:
print('foo')
self.AssertOutputContainsLine('foo')
cap.ClearCaptured()
self.AssertOutputContainsLine('foo', invert=True)
print('bar')
self.AssertOutputContainsLine('bar')
@cros_test_lib.pytestmark_skip
def testRunCommandCapture(self):
"""Check capturing run() subprocess output."""
with self.OutputCapturer():
cros_build_lib.run(['sh', '-c', 'echo foo; echo bar >&2'])
self.AssertOutputContainsLine('foo')
self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
def testCapturingStdoutAndStderrToFile(self):
"""Check that OutputCapturer captures to a named file."""
stdout_path = os.path.join(self.tempdir, 'stdout')
stderr_path = os.path.join(self.tempdir, 'stderr')
with self.OutputCapturer(stdout_path=stdout_path, stderr_path=stderr_path):
print('foo')
print('bar', file=sys.stderr)
# Check that output can be read by OutputCapturer.
self.AssertOutputContainsLine('foo')
self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
# Verify that output is actually written to the correct files.
self.assertEqual('foo\n', osutils.ReadFile(stdout_path))
self.assertEqual('bar\n', osutils.ReadFile(stderr_path))
class RunCommandTestCase(cros_test_lib.RunCommandTestCase):
"""Verify the test case behavior."""
def testPopenMockEncodingEmptyStrings(self):
"""Verify our automatic encoding in PopenMock works with default output."""
self.rc.AddCmdResult(['/x'])
result = cros_build_lib.run(['/x'], capture_output=True)
self.assertEqual(b'', result.stdout)
self.assertEqual(b'', result.stderr)
result = cros_build_lib.run(['/x'], capture_output=True, encoding='utf-8')
self.assertEqual('', result.stdout)
self.assertEqual('', result.stderr)
def testPopenMockBinaryData(self):
"""Verify our automatic encoding in PopenMock works with bytes."""
self.rc.AddCmdResult(['/x'], error=b'\xff')
result = cros_build_lib.run(['/x'], capture_output=True)
self.assertEqual(b'', result.stdout)
self.assertEqual(b'\xff', result.stderr)
with self.assertRaises(UnicodeDecodeError):
cros_build_lib.run(['/x'], capture_output=True, encoding='utf-8')
def testPopenMockMixedData(self):
"""Verify our automatic encoding in PopenMock works with mixed data."""
self.rc.AddCmdResult(['/x'], error=b'abc\x00', output=u'Yes\u20a0')
result = cros_build_lib.run(['/x'], capture_output=True)
self.assertEqual(b'Yes\xe2\x82\xa0', result.stdout)
self.assertEqual(b'abc\x00', result.stderr)
result = cros_build_lib.run(['/x'], capture_output=True, encoding='utf-8')
self.assertEqual(u'Yes\u20a0', result.stdout)
self.assertEqual(u'abc\x00', result.stderr)
def testPopenMockCombiningStderr(self):
"""Verify combining stderr into stdout works."""
self.rc.AddCmdResult(['/x'], stderr='err', stdout='out')
result = cros_build_lib.run(['/x'], stdout=True, stderr=True)
self.assertEqual(b'err', result.stderr)
self.assertEqual(b'out', result.stdout)
result = cros_build_lib.run(['/x'], stdout=True, stderr=subprocess.STDOUT)
self.assertEqual(None, result.stderr)
self.assertEqual(b'outerr', result.stdout)
|
import os
import time
import socket
from mmdet.apis import init_detector, inference_detector, show_result_pyplot, show_result_ins
import mmcv
# map
# config_file = '../configs/solo/decoupled_solo_r50_fpn_8gpu_3x.py'
# # download the checkpoint from model zoo and put it in `checkpoints/`
# checkpoint_file = '../checkpoints/DECOUPLED_SOLO_R50_3x.pth'
# config_file = '../configs/solo/solo_r50_fpn_8gpu_1x.py'
# checkpoint_file = '../checkpoints/SOLO_R50_1x.pth'
#
# config_file = '../configs/solo/solo_r50_fpn_8gpu_3x.py'
# checkpoint_file = '../checkpoints/SOLO_R50_3x.pth'
## AP
#
# config_file = './configs/solo/solo_r101_fpn_8gpu_3x.py'
# checkpoint_file = './checkpoints/SOLO_R101_3x.pth'
# config_file = '../configs/solo/decoupled_solo_r101_fpn_8gpu_3x.py'
# checkpoint_file = '../checkpoints/DECOUPLED_SOLO_R101_3x.pth'
# config_file = './configs/solov2/solov2_r101_fpn_8gpu_3x.py'
# checkpoint_file = './checkpoints/SOLOv2_R101_3x.pth'
# config_file = './configs/solov2/solov2_r101_dcn_fpn_8gpu_3x.py'
# checkpoint_file = './checkpoints/SOLOv2_R101_DCN_3x.pth'
# config_file = './configs/solov2/solov2_x101_dcn_fpn_8gpu_3x.py'
# checkpoint_file = './checkpoints/SOLOv2_X101_DCN_3x.pth'
## speed
# config_file = '../configs/solo/decoupled_solo_light_dcn_r50_fpn_8gpu_3x.py'
# checkpoint_file = '../checkpoints/DECOUPLED_SOLO_LIGHT_DCN_R50_3x.pth'
# config_file = './configs/solov2/solov2_light_512_dcn_r50_fpn_8gpu_3x.py'
# checkpoint_file = './checkpoints/SOLOv2_LIGHT_512_DCN_R50_3x.pth'
config_file = 'configs/solov2/solov2_light_448_r18_fpn_8gpu_3x.py'
checkpoint_file = './work_dir/0602/ps-X10DRG/solov2_light_448_r18_fpn_8gpu_3x/epoch_36.pth'
print(config_file)
# build the model from a config file and a checkpoint file
cuda_n = 0
print('gpu:', cuda_n)
os.environ['CUDA_VISIBLE_DEVICES'] = f'{cuda_n}'
model = init_detector(config_file, checkpoint_file, device=f'cuda')
#
# # test a single image
#
#
# for video_name in ['1', '2', '3']:
score_thr = 0.25
# for video_name in ['coco_72']:
# for video_name in ['Yotube-vos-3rd']:
# for video_name in ['transformed']:
save_dir = f'result/{socket.gethostname()}0530/'
# for video_name in ['cityscape_100', 'GTA5_99']:
for video_name in ['coco_72']:
# for video_name in ['Yotube-vos-3rd_rotate180']:
data_dir = f'data/{video_name}/'
out_img_dir = f"{save_dir}{config_file.split('/')[-1].split('.')[0]}/{video_name}_score_thr_{score_thr}/"
if not os.path.exists(out_img_dir):
os.makedirs(out_img_dir)
print('save', save_dir, os.path.abspath(save_dir), out_img_dir)
n = len(os.listdir(data_dir))
start = time.time()
# for i in range(1, 141):
for img in os.listdir(data_dir):
# img = f'{i}.jpg'
result = inference_detector(model, f'{data_dir}{img}')
show_result_ins(f'{data_dir}{img}', result, model.CLASSES, score_thr=score_thr, out_file=f"./{out_img_dir}{img}")
# print('save', os.path.abspath(f"../{out_img_dir}{img}"))
end = time.time()
# print()
# for img in os.listdir(directory):
# # print(f'{directory}{img}')
# # result = inference_detector(model, f'{directory}{img}')
# # show_result_ins(f'{directory}{img}', result, model.CLASSES, score_thr=0.25, out_file=f"../data/out/{img}")
# break
print('fps:', n/(end - start), 'n:', n)
|
from argparse import ArgumentParser
from api import State, util, engine
import random, csv, os
from rich import print
def run_tournament(options):
'''
NOTES FOR THE CSV FILENAME
the first bot is the tracked one, the other is the opponent
for example in T_Dataset_ml-rdeep.csv
ml is the tracked player
and rdeep is the opponent
'''
botnames = options.players.split(",")
bots = [util.load_player(botname) for botname in botnames]
n = len(bots)
wins = [0] * n
matches = [(p1, p2) for p1 in range(n) for p2 in range(n) if p1 < p2]
totalgames = (n*n - n)/2 * options.repeats
playedgames, scoredgames, games_count, seeds = 0, 0, 0, []
filename = "T_Dataset_{}-{}.csv".format(botnames[options.indexed - 1], botnames[options.indexed - 2])
with os.scandir() as entries:
for entry in entries:
if filename == entry.name:
with open(filename, "r", newline="") as t_data:
hist_data = list(csv.reader(t_data))
if not hist_data == []:
seeds = [int(item[1]) for item in hist_data]
games_count = [int(item[0]) for item in hist_data][-1]
else:
games_count, seeds = 0, []
# load existing seeds
if options.existing:
seeds_file = "T_Dataset_{}.csv".format(options.existing)
with open(seeds_file, "r", newline="") as seeds_data:
seeds = [int(seed.split(",")[1]) for seed in list(seeds_data.readlines())]
else:
seeds = []
print('Playing {} scored games:'.format(int(totalgames)))
with open(filename, "a", newline="") as t_data:
t_writer = csv.writer(t_data)
if seeds:
for a, b in matches:
for seed in seeds:
p = [a, b] if random.choice([True, False]) else [b, a]
state = State.generate(id=seed, phase=int(options.phase))
winner, score = engine.play(bots[p[0]], bots[p[1]], state, options.max_time*1000, verbose=options.verbose, fast=options.fast)
if winner is not None:
winner = p[winner - 1]
wins[winner] += score
#if winner == options.indexed - 1 and score > 1:
# t_writer.writerow([games_count + scoredgames, seed])
if score > 0:
scoredgames += 1
playedgames += 1
print('Played {} games, {:.0f} scored out of {:.0f} ([yellow]{:.0f}%[/yellow]): [italic green]{}[/italic green] won, seed [red]{}[/red], [black]{}[/black] \r'
.format(playedgames, scoredgames, len(seeds), scoredgames/float(len(seeds)) * 100, botnames[winner], seed, wins))
else:
for a, b in matches:
while not scoredgames == options.repeats:
p = [a, b] if random.choice([True, False]) else [b, a]
# Generate a state with a random seed
seed = random.randint(1000000, 9999999)
while seed in seeds:
seed = random.randint(1000000, 9999999)
seeds.append(seed)
state = State.generate(id=seed, phase=int(options.phase))
winner, score = engine.play(bots[p[0]], bots[p[1]], state, options.max_time*1000, verbose=options.verbose, fast=options.fast)
if winner is not None:
winner = p[winner - 1]
wins[winner] += score
if winner == options.indexed - 1 and score > 1:
t_writer.writerow([int(totalgames), seed])
if score > 0:
scoredgames += 1
playedgames += 1
print('Played {} games, {:.0f} scored out of {:.0f} ([yellow]{:.0f}%[/yellow]): [italic green]{}[/italic green] won, seed [red]{}[/red], [black]{}[/black] \r'
.format(playedgames, scoredgames, totalgames, scoredgames/float(totalgames) * 100, botnames[winner], seed, wins))
print('Results:')
for i, bot in enumerate(bots):
games_2 = int(wins[i] / 100000)
games_3 = int(wins[i] % 100000)
print(' '*4 + 'bot {}: {} points, won {} [purple]2[/purple] point games, {} [purple]3[/purple] point games, {} total'.format(bot, wins[i], games_2, games_3, games_2 + games_3))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-s", "--starting-phase",
dest="phase",
help="Which phase the game should start at.",
default=1)
parser.add_argument("-p", "--players",
dest="players",
help="Comma-separated list of player names (enclose with quotes).",
default="rand,bully,rdeep")
parser.add_argument("-r", "--repeats",
dest="repeats",
help="How many matches to play for each pair of bots",
type=int, default=10)
parser.add_argument("-t", "--max-time",
dest="max_time",
help="maximum amount of time allowed per turn in seconds (default: 5)",
type=int, default=5)
parser.add_argument("-f", "--fast",
dest="fast",
action="store_true",
help="This option forgoes the engine's check of whether a bot is able to make a decision in the allotted time, so only use this option if you are sure that your bot is stable.")
parser.add_argument("-v", "--verbose",
dest="verbose",
action="store_true",
help="Print verbose information")
parser.add_argument("-i", "--indexed",
dest="indexed",
help="Chose the wins of which player should be tracked (player 1 / 2)",
type=int, default=1)
parser.add_argument("-e", "--existing",
dest="existing",
help="Choose which dataset to load seeds from",
type=str, default=None)
run_tournament(parser.parse_args())
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This code is based on https://github.com/Zhongdao/Towards-Realtime-MOT/blob/master/tracker/multitracker.py
"""
import numpy as np
from collections import defaultdict
from ..matching import jde_matching as matching
from ..motion import KalmanFilter
from .base_jde_tracker import TrackState, STrack
from .base_jde_tracker import joint_stracks, sub_stracks, remove_duplicate_stracks
__all__ = ['JDETracker']
class JDETracker(object):
__shared__ = ['num_classes']
"""
JDE tracker, support single class and multi classes
Args:
num_classes (int): the number of classes
det_thresh (float): threshold of detection score
track_buffer (int): buffer for tracker
min_box_area (int): min box area to filter out low quality boxes
vertical_ratio (float): w/h, the vertical ratio of the bbox to filter
bad results. If set <0 means no need to filter bboxes,usually set
1.6 for pedestrian tracking.
tracked_thresh (float): linear assignment threshold of tracked
stracks and detections
r_tracked_thresh (float): linear assignment threshold of
tracked stracks and unmatched detections
unconfirmed_thresh (float): linear assignment threshold of
unconfirmed stracks and unmatched detections
motion (str): motion model, KalmanFilter as default
conf_thres (float): confidence threshold for tracking
metric_type (str): either "euclidean" or "cosine", the distance metric
used for measurement to track association.
"""
def __init__(self,
use_byte=False,
num_classes=1,
det_thresh=0.3,
track_buffer=30,
min_box_area=200,
vertical_ratio=1.6,
tracked_thresh=0.7,
r_tracked_thresh=0.5,
unconfirmed_thresh=0.7,
conf_thres=0,
match_thres=0.8,
low_conf_thres=0.2,
motion='KalmanFilter',
metric_type='euclidean'):
self.use_byte = use_byte
self.num_classes = num_classes
self.det_thresh = det_thresh if not use_byte else conf_thres + 0.1
self.track_buffer = track_buffer
self.min_box_area = min_box_area
self.vertical_ratio = vertical_ratio
self.tracked_thresh = tracked_thresh
self.r_tracked_thresh = r_tracked_thresh
self.unconfirmed_thresh = unconfirmed_thresh
self.conf_thres = conf_thres
self.match_thres = match_thres
self.low_conf_thres = low_conf_thres
if motion == 'KalmanFilter':
self.motion = KalmanFilter()
self.metric_type = metric_type
self.frame_id = 0
self.tracked_tracks_dict = defaultdict(list) # dict(list[STrack])
self.lost_tracks_dict = defaultdict(list) # dict(list[STrack])
self.removed_tracks_dict = defaultdict(list) # dict(list[STrack])
self.max_time_lost = 0
# max_time_lost will be calculated: int(frame_rate / 30.0 * track_buffer)
def update(self, pred_dets, pred_embs=None):
"""
Processes the image frame and finds bounding box(detections).
Associates the detection with corresponding tracklets and also handles
lost, removed, refound and active tracklets.
Args:
pred_dets (np.array): Detection results of the image, the shape is
[N, 6], means 'cls_id, score, x0, y0, x1, y1'.
pred_embs (np.array): Embedding results of the image, the shape is
[N, 128] or [N, 512].
Return:
output_stracks_dict (dict(list)): The list contains information
regarding the online_tracklets for the recieved image tensor.
"""
self.frame_id += 1
if self.frame_id == 1:
STrack.init_count(self.num_classes)
activated_tracks_dict = defaultdict(list)
refined_tracks_dict = defaultdict(list)
lost_tracks_dict = defaultdict(list)
removed_tracks_dict = defaultdict(list)
output_tracks_dict = defaultdict(list)
pred_dets_dict = defaultdict(list)
pred_embs_dict = defaultdict(list)
# unify single and multi classes detection and embedding results
for cls_id in range(self.num_classes):
cls_idx = (pred_dets[:, 0:1] == cls_id).squeeze(-1)
pred_dets_dict[cls_id] = pred_dets[cls_idx]
if pred_embs is not None:
pred_embs_dict[cls_id] = pred_embs[cls_idx]
else:
pred_embs_dict[cls_id] = None
for cls_id in range(self.num_classes):
""" Step 1: Get detections by class"""
pred_dets_cls = pred_dets_dict[cls_id]
pred_embs_cls = pred_embs_dict[cls_id]
remain_inds = (pred_dets_cls[:, 1:2] > self.conf_thres).squeeze(-1)
if remain_inds.sum() > 0:
pred_dets_cls = pred_dets_cls[remain_inds]
if self.use_byte:
detections = [
STrack(
STrack.tlbr_to_tlwh(tlbrs[2:6]),
tlbrs[1],
cls_id,
30,
temp_feat=None) for tlbrs in pred_dets_cls
]
else:
pred_embs_cls = pred_embs_cls[remain_inds]
detections = [
STrack(
STrack.tlbr_to_tlwh(tlbrs[2:6]), tlbrs[1], cls_id,
30, temp_feat)
for (tlbrs, temp_feat
) in zip(pred_dets_cls, pred_embs_cls)
]
else:
detections = []
''' Add newly detected tracklets to tracked_stracks'''
unconfirmed_dict = defaultdict(list)
tracked_tracks_dict = defaultdict(list)
for track in self.tracked_tracks_dict[cls_id]:
if not track.is_activated:
# previous tracks which are not active in the current frame are added in unconfirmed list
unconfirmed_dict[cls_id].append(track)
else:
# Active tracks are added to the local list 'tracked_stracks'
tracked_tracks_dict[cls_id].append(track)
""" Step 2: First association, with embedding"""
# building tracking pool for the current frame
track_pool_dict = defaultdict(list)
track_pool_dict[cls_id] = joint_stracks(
tracked_tracks_dict[cls_id], self.lost_tracks_dict[cls_id])
# Predict the current location with KalmanFilter
STrack.multi_predict(track_pool_dict[cls_id], self.motion)
if self.use_byte:
dists = matching.iou_distance(track_pool_dict[cls_id],
detections)
matches, u_track, u_detection = matching.linear_assignment(
dists, thresh=self.match_thres) # not self.tracked_thresh
else:
dists = matching.embedding_distance(
track_pool_dict[cls_id],
detections,
metric=self.metric_type)
dists = matching.fuse_motion(
self.motion, dists, track_pool_dict[cls_id], detections)
matches, u_track, u_detection = matching.linear_assignment(
dists, thresh=self.tracked_thresh)
for i_tracked, idet in matches:
# i_tracked is the id of the track and idet is the detection
track = track_pool_dict[cls_id][i_tracked]
det = detections[idet]
if track.state == TrackState.Tracked:
# If the track is active, add the detection to the track
track.update(detections[idet], self.frame_id)
activated_tracks_dict[cls_id].append(track)
else:
# We have obtained a detection from a track which is not active,
# hence put the track in refind_stracks list
track.re_activate(det, self.frame_id, new_id=False)
refined_tracks_dict[cls_id].append(track)
# None of the steps below happen if there are no undetected tracks.
""" Step 3: Second association, with IOU"""
if self.use_byte:
inds_low = pred_dets_dict[cls_id][:, 1:2] > self.low_conf_thres
inds_high = pred_dets_dict[cls_id][:, 1:2] < self.conf_thres
inds_second = np.logical_and(inds_low, inds_high).squeeze(-1)
pred_dets_cls_second = pred_dets_dict[cls_id][inds_second]
# association the untrack to the low score detections
if len(pred_dets_cls_second) > 0:
detections_second = [
STrack(
STrack.tlbr_to_tlwh(tlbrs[:4]),
tlbrs[4],
cls_id,
30,
temp_feat=None)
for tlbrs in pred_dets_cls_second[:, :5]
]
else:
detections_second = []
r_tracked_stracks = [
track_pool_dict[cls_id][i] for i in u_track
if track_pool_dict[cls_id][i].state == TrackState.Tracked
]
dists = matching.iou_distance(r_tracked_stracks,
detections_second)
matches, u_track, u_detection_second = matching.linear_assignment(
dists, thresh=0.4) # not r_tracked_thresh
else:
detections = [detections[i] for i in u_detection]
r_tracked_stracks = []
for i in u_track:
if track_pool_dict[cls_id][i].state == TrackState.Tracked:
r_tracked_stracks.append(track_pool_dict[cls_id][i])
dists = matching.iou_distance(r_tracked_stracks, detections)
matches, u_track, u_detection = matching.linear_assignment(
dists, thresh=self.r_tracked_thresh)
for i_tracked, idet in matches:
track = r_tracked_stracks[i_tracked]
det = detections[
idet] if not self.use_byte else detections_second[idet]
if track.state == TrackState.Tracked:
track.update(det, self.frame_id)
activated_tracks_dict[cls_id].append(track)
else:
track.re_activate(det, self.frame_id, new_id=False)
refined_tracks_dict[cls_id].append(track)
for it in u_track:
track = r_tracked_stracks[it]
if not track.state == TrackState.Lost:
track.mark_lost()
lost_tracks_dict[cls_id].append(track)
'''Deal with unconfirmed tracks, usually tracks with only one beginning frame'''
detections = [detections[i] for i in u_detection]
dists = matching.iou_distance(unconfirmed_dict[cls_id], detections)
matches, u_unconfirmed, u_detection = matching.linear_assignment(
dists, thresh=self.unconfirmed_thresh)
for i_tracked, idet in matches:
unconfirmed_dict[cls_id][i_tracked].update(detections[idet],
self.frame_id)
activated_tracks_dict[cls_id].append(unconfirmed_dict[cls_id][
i_tracked])
for it in u_unconfirmed:
track = unconfirmed_dict[cls_id][it]
track.mark_removed()
removed_tracks_dict[cls_id].append(track)
""" Step 4: Init new stracks"""
for inew in u_detection:
track = detections[inew]
if track.score < self.det_thresh:
continue
track.activate(self.motion, self.frame_id)
activated_tracks_dict[cls_id].append(track)
""" Step 5: Update state"""
for track in self.lost_tracks_dict[cls_id]:
if self.frame_id - track.end_frame > self.max_time_lost:
track.mark_removed()
removed_tracks_dict[cls_id].append(track)
self.tracked_tracks_dict[cls_id] = [
t for t in self.tracked_tracks_dict[cls_id]
if t.state == TrackState.Tracked
]
self.tracked_tracks_dict[cls_id] = joint_stracks(
self.tracked_tracks_dict[cls_id], activated_tracks_dict[cls_id])
self.tracked_tracks_dict[cls_id] = joint_stracks(
self.tracked_tracks_dict[cls_id], refined_tracks_dict[cls_id])
self.lost_tracks_dict[cls_id] = sub_stracks(
self.lost_tracks_dict[cls_id], self.tracked_tracks_dict[cls_id])
self.lost_tracks_dict[cls_id].extend(lost_tracks_dict[cls_id])
self.lost_tracks_dict[cls_id] = sub_stracks(
self.lost_tracks_dict[cls_id], self.removed_tracks_dict[cls_id])
self.removed_tracks_dict[cls_id].extend(removed_tracks_dict[cls_id])
self.tracked_tracks_dict[cls_id], self.lost_tracks_dict[
cls_id] = remove_duplicate_stracks(
self.tracked_tracks_dict[cls_id],
self.lost_tracks_dict[cls_id])
# get scores of lost tracks
output_tracks_dict[cls_id] = [
track for track in self.tracked_tracks_dict[cls_id]
if track.is_activated
]
return output_tracks_dict
|
import unittest
from cmpcodesize.compare import listFunctionSizes
class ListFunctionSizesTestCase(unittest.TestCase):
def test_when_size_array_is_none_raises(self):
with self.assertRaises(TypeError):
listFunctionSizes(None)
def test_when_size_array_is_empty_returns_none(self):
self.assertIsNone(listFunctionSizes([]))
if __name__ == '__main__':
unittest.main()
|
"""
flickr.py
Copyright 2004-2006 James Clarke <james@jamesclarke.info>
Portions Copyright 2007-2008 Joshua Henderson <joshhendo@gmail.com>
THIS SOFTWARE IS SUPPLIED WITHOUT WARRANTY OF ANY KIND, AND MAY BE
COPIED, MODIFIED OR DISTRIBUTED IN ANY WAY, AS LONG AS THIS NOTICE
AND ACKNOWLEDGEMENT OF AUTHORSHIP REMAIN.
2007-12-17
For an upto date TODO list, please see:
http://code.google.com/p/flickrpy/wiki/TodoList
For information on how to use the Authentication
module, plese see:
http://code.google.com/p/flickrpy/wiki/UserAuthentication
2006-12-19
Applied patches from Berco Beute and Wolfram Kriesing.
"""
__author__ = "James Clarke <james@jamesclarke.info>"
__version__ = "$Rev$"
__date__ = "$Date$"
__copyright__ = "Copyright: 2004-2010 James Clarke; Portions: 2007-2008 Joshua Henderson; Portions: 2011 Andrei Vlad Vacariu"
from urllib import urlencode, urlopen
from xml.dom import minidom
import hashlib
import os
HOST = 'http://flickr.com'
API = '/services/rest'
# set these here or using flickr.API_KEY in your application
API_KEY = None
API_SECRET = None
email = None
password = None
AUTH = False
debug = False
# The next 2 variables are only importatnt if authentication is used
# this can be set here or using flickr.tokenPath in your application
# this is the path to the folder containing tokenFile (default: token.txt)
tokenPath = ''
# this can be set here or using flickr.tokenFile in your application
# this is the name of the file containing the stored token.
tokenFile = 'token.txt'
class FlickrError(Exception): pass
class Photo(object):
"""Represents a Flickr Photo."""
__readonly = ['id', 'secret', 'server', 'farm', 'isfavorite', 'license', 'rotation',
'owner', 'dateposted', 'datetaken', 'takengranularity',
'title', 'description', 'ispublic', 'isfriend', 'isfamily',
'cancomment', 'canaddmeta', 'comments', 'tags', 'permcomment',
'permaddmeta', 'url', 'views']
#XXX: Hopefully None won't cause problems
def __init__(self, id, owner=None, dateuploaded=None, \
title=None, description=None, ispublic=None, \
isfriend=None, isfamily=None, cancomment=None, \
canaddmeta=None, comments=None, tags=None, secret=None, \
isfavorite=None, server=None, farm=None, license=None, \
rotation=None, url=None, views=None):
"""Must specify id, rest is optional."""
self.__loaded = False
self.__cancomment = cancomment
self.__canaddmeta = canaddmeta
self.__comments = comments
self.__dateuploaded = dateuploaded
self.__description = description
self.__id = id
self.__license = license
self.__isfamily = isfamily
self.__isfavorite = isfavorite
self.__isfriend = isfriend
self.__ispublic = ispublic
self.__owner = owner
self.__rotation = rotation
self.__secret = secret
self.__server = server
self.__farm = farm
self.__tags = tags
self.__title = title
self.__dateposted = None
self.__datetaken = None
self.__takengranularity = None
self.__permcomment = None
self.__permaddmeta = None
self.__url = None
self.__views = None
def __setattr__(self, key, value):
if key in self.__class__.__readonly:
raise AttributeError("The attribute %s is read-only." % key)
else:
super(Photo, self).__setattr__(key, value)
def _val(self, key):
if key in self.__class__.__readonly:
return super(Photo, self).__getattribute__("_%s__%s" % (self.__class__.__name__, key))
else:
return super(Photo, self).__getattribute__(key)
def __getattr__(self, key):
val = self._val(key)
if val == None and not self.__loaded:
self._load_properties()
val = self._val(key)
return val
def _load_properties(self):
"""Loads the properties from Flickr."""
self.__loaded = True
method = 'flickr.photos.getInfo'
data = _doget(method, photo_id=self.id)
photo = data.rsp.photo
self.__secret = photo.secret
self.__server = photo.server
self.__farm = photo.farm
self.__isfavorite = photo.isfavorite
self.__license = photo.license
self.__rotation = photo.rotation
owner = photo.owner
self.__owner = User(owner.nsid, username=owner.username,\
realname=owner.realname,\
location=owner.location)
self.__title = photo.title.text
self.__description = photo.description.text
self.__ispublic = photo.visibility.ispublic
self.__isfriend = photo.visibility.isfriend
self.__isfamily = photo.visibility.isfamily
self.__dateposted = photo.dates.posted
self.__datetaken = photo.dates.taken
self.__takengranularity = photo.dates.takengranularity
self.__cancomment = photo.editability.cancomment
self.__canaddmeta = photo.editability.canaddmeta
self.__comments = photo.comments.text
self.__url = photo.urls.url.text
self.__views = photo.views
try:
self.__permcomment = photo.permissions.permcomment
self.__permaddmeta = photo.permissions.permaddmeta
except AttributeError:
self.__permcomment = None
self.__permaddmeta = None
#TODO: Implement Notes?
if hasattr(photo.tags, "tag"):
if isinstance(photo.tags.tag, list):
self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text) \
for tag in photo.tags.tag]
else:
tag = photo.tags.tag
self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text)]
def __str__(self):
return '<Flickr Photo %s>' % self.id
def setTags(self, tags):
"""Set the tags for current photo to list tags.
(flickr.photos.settags)
"""
method = 'flickr.photos.setTags'
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
self._load_properties()
def addTags(self, tags):
"""Adds the list of tags to current tags. (flickr.photos.addtags)
"""
method = 'flickr.photos.addTags'
if isinstance(tags, list):
tags = uniq(tags)
_dopost(method, auth=True, photo_id=self.id, tags=tags)
#load properties again
self._load_properties()
def removeTag(self, tag):
"""Remove the tag from the photo must be a Tag object.
(flickr.photos.removeTag)
"""
method = 'flickr.photos.removeTag'
tag_id = ''
try:
tag_id = tag.id
except AttributeError:
raise FlickrError, "Tag object expected"
_dopost(method, auth=True, photo_id=self.id, tag_id=tag_id)
self._load_properties()
def setMeta(self, title=None, description=None):
"""Set metadata for photo. (flickr.photos.setMeta)"""
method = 'flickr.photos.setMeta'
if title is None:
title = self.title
if description is None:
description = self.description
_dopost(method, auth=True, title=title, \
description=description, photo_id=self.id)
self.__title = title
self.__description = description
def getAllContexts(self):
"""Retrieves lists of the pools/sets the photo is in"""
method = 'flickr.photos.getAllContexts'
data = _doget(method, photo_id=self.id)
d = {'pools': [], 'sets': []}
if hasattr(data.rsp, "pool"):
if isinstance(data.rsp.pool, list):
for pool in data.rsp.pool:
d["pools"].append({"id": pool.id, "title": pool.title})
else:
d["pools"].append({"id": data.rsp.pool.id, "title": data.rsp.pool.title})
if hasattr(data.rsp, "set"):
if isinstance(data.rsp.set, list):
for theset in data.rsp.set:
d["sets"].append({"id": theset.id, "title": theset.title})
else:
d["sets"].append({"id": data.rsp.set.id, "title": data.rsp.set.title})
return d
def getPoolCount(self):
"""Retrieves a count of the pools the photo is in"""
d = self.getAllContexts()
return len( d["pools"] )
def getSetCount(self):
"""Retrieves a count of the pools the photo is in"""
d = self.getAllContexts()
return len( d["sets"] )
def getURL(self, size='Medium', urlType='url'):
"""Retrieves a url for the photo. (flickr.photos.getSizes)
urlType - 'url' or 'source'
'url' - flickr page of photo
'source' - image file
"""
method = 'flickr.photos.getSizes'
data = _doget(method, photo_id=self.id)
for psize in data.rsp.sizes.size:
if psize.label == size:
return getattr(psize, urlType)
raise FlickrError, "No URL found"
def getSizes(self):
"""
Get all the available sizes of the current image, and all available
data about them.
Returns: A list of dicts with the size data.
"""
method = 'flickr.photos.getSizes'
data = _doget(method, photo_id=self.id)
ret = []
# The given props are those that we return and the according types, since
# return width and height as string would make "75">"100" be True, which
# is just error prone.
props = {'url':str,'width':int,'height':int,'label':str,'source':str,'text':str}
for psize in data.rsp.sizes.size:
d = {}
for prop,convert_to_type in props.items():
d[prop] = convert_to_type(getattr(psize, prop))
ret.append(d)
return ret
def getExif(self):
"""Retrieves EXIF metadata for the photo.
Example usage:
>>> exif = photo.getExif()
>>> print exif.camera
>>> for t in exif.tags:
... print '%s: %s' % (t.label, t.raw)
"""
return Exif.getExif(self.id)
def getLocation(self):
"""
Return the latitude+longitutde of the picture.
Returns None if no location given for this pic.
"""
method = 'flickr.photos.geo.getLocation'
try:
data = _doget(method, photo_id=self.id)
except FlickrError: # Some other error might have occured too!?
return None
loc = data.rsp.photo.location
return [loc.latitude, loc.longitude]
def getComments(self):
""""
get list of comments for photo
returns a list of comment objects
comment text is in return [item].text
"""
method = "flickr.photos.comments.getList"
try:
data = _doget(method, photo_id=self.id)
except FlickrError: # ???? what errors might there be????
return None
return data.rsp.comments
def _getDirectURL(self, size):
return "http://farm%s.static.flickr.com/%s/%s_%s_%s.jpg" % \
(self.farm, self.server, self.id, self.secret, size)
def getThumbnail(self):
"""
Return a string representation of the URL to the thumbnail
image (not the thumbnail image page).
"""
return self._getDirectURL('t')
def getSmallSquare(self):
"""
Return a string representation of the URL to the small square
image (not the small square image page).
"""
return self._getDirectURL('s')
def getSmall(self):
"""
Return a string representation of the URL to the small
image (not the small image page).
"""
return self._getDirectURL('m')
def getMedium(self):
"""
Return a string representation of the URL to the medium
image (not the medium image page).
"""
return self._getDirectURL('z')
def getLarge(self):
"""
Return a string representation of the URL to the large
image (not the large image page).
"""
return self._getDirectURL('b')
def getGalleryList(self, per_page='', page=''):
"""
get list of galleries which
contain the photo.
Galleries are returned sorted by
date which the photo was added
to the gallery
"""
if per_page > 500: # Max is 500
per_page = 500
method = "flickr.galleries.getListForPhoto"
try:
data = _doget(method, photo_id=self.id, per_page=per_page, \
page=page)
except FlickrError:
return None
return data.rsp.galleries.gallery
def getFavoriteCount(self):
"""
Return the number of favorites to the specific photo
"""
method = 'flickr.photos.getFavorites'
data = _doget(method, photo_id=self.id)
return data.rsp.photo.total
def getFavoriteUsers(self):
"""
Return the list of users who marked the specific photo as favorite
return format: { userid, username, date of marking favorite}
"""
method = 'flickr.photos.getFavorites'
data = _doget(method, photo_id=self.id)
u = []
try:
users = data.rsp.photo.person
except AttributeError:
return u # there are no favorite of this photo
try:
iter(users)
except TypeError:
users = [users] # there is only one favorite, so make is a list
for user in users:
u.append({"id": user.nsid, "username": user.username, "favedate": user.favedate})
return u
class Photoset(object):
"""A Flickr photoset.
If constructed with just an ID, the rest of the data about the Photoset is
fetched from the API.
"""
def __init__(self, id, title=None, primary=None, photos=0, description='', \
secret='', server=''):
self.__id = id
if not title and not primary:
method = 'flickr.photosets.getInfo'
data = _doget(method, photoset_id=self.id)
title = data.rsp.photoset.title.text
primary = Photo(data.rsp.photoset.primary)
description = data.rsp.photoset.description.text
count = data.rsp.photoset.photos
self.__title = title
self.__primary = primary
self.__description = description
self.__count = photos
self.__secret = secret
self.__server = server
id = property(lambda self: self.__id)
title = property(lambda self: self.__title)
description = property(lambda self: self.__description)
primary = property(lambda self: self.__primary)
def __len__(self):
return self.__count
def __str__(self):
return '<Flickr Photoset %s>' % self.id
def getPhotos(self):
"""Returns list of Photos."""
method = 'flickr.photosets.getPhotos'
data = _doget(method, photoset_id=self.id)
photos = data.rsp.photoset.photo
p = []
# If there's only one photo in the set, the API returns a single photo,
# not a list
try:
iter(photos)
except TypeError:
photos = [photos]
for photo in photos:
p.append(Photo(photo.id, title=photo.title, secret=photo.secret, \
server=photo.server))
return p
def editPhotos(self, photos, primary=None):
"""Edit the photos in this set.
photos - photos for set
primary - primary photo (if None will used current)
"""
method = 'flickr.photosets.editPhotos'
if primary is None:
primary = self.primary
ids = [photo.id for photo in photos]
if primary.id not in ids:
ids.append(primary.id)
_dopost(method, auth=True, photoset_id=self.id,\
primary_photo_id=primary.id,
photo_ids=ids)
self.__count = len(ids)
return True
def addPhoto(self, photo):
"""Add a photo to this set.
photo - the photo
"""
method = 'flickr.photosets.addPhoto'
_dopost(method, auth=True, photoset_id=self.id, photo_id=photo.id)
self.__count += 1
return True
def removePhoto(self, photo):
"""Remove the photo from this set.
photo - the photo
"""
method = 'flickr.photosets.removePhoto'
_dopost(method, auth=True, photoset_id=self.id, photo_id=photo.id)
self.__count = self.__count - 1
return True
def editMeta(self, title=None, description=None):
"""Set metadata for photo. (flickr.photos.setMeta)"""
method = 'flickr.photosets.editMeta'
if title is None:
title = self.title
if description is None:
description = self.description
_dopost(method, auth=True, title=title, \
description=description, photoset_id=self.id)
self.__title = title
self.__description = description
return True
#XXX: Delete isn't handled well as the python object will still exist
def delete(self):
"""Deletes the photoset.
"""
method = 'flickr.photosets.delete'
_dopost(method, auth=True, photoset_id=self.id)
return True
def create(cls, photo, title, description=''):
"""Create a new photoset.
photo - primary photo
"""
if not isinstance(photo, Photo):
raise TypeError, "Photo expected"
method = 'flickr.photosets.create'
data = _dopost(method, auth=True, title=title,\
description=description,\
primary_photo_id=photo.id)
set = Photoset(data.rsp.photoset.id, title, Photo(photo.id),
photos=1, description=description)
return set
create = classmethod(create)
class User(object):
"""A Flickr user."""
def __init__(self, id, username=None, isadmin=None, ispro=None, \
realname=None, location=None, firstdate=None, count=None):
"""id required, rest optional."""
self.__loaded = False #so we don't keep loading data
self.__id = id
self.__username = username
self.__isadmin = isadmin
self.__ispro = ispro
self.__realname = realname
self.__location = location
self.__photos_firstdate = firstdate
self.__photos_count = count
#property fu
id = property(lambda self: self._general_getattr('id'))
username = property(lambda self: self._general_getattr('username'))
isadmin = property(lambda self: self._general_getattr('isadmin'))
ispro = property(lambda self: self._general_getattr('ispro'))
realname = property(lambda self: self._general_getattr('realname'))
location = property(lambda self: self._general_getattr('location'))
photos_firstdate = property(lambda self: \
self._general_getattr('photos_firstdate'))
photos_firstdatetaken = property(lambda self: \
self._general_getattr\
('photos_firstdatetaken'))
photos_count = property(lambda self: \
self._general_getattr('photos_count'))
icon_server= property(lambda self: self._general_getattr('icon_server'))
icon_url= property(lambda self: self._general_getattr('icon_url'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Load User properties from Flickr."""
method = 'flickr.people.getInfo'
data = _doget(method, user_id=self.__id)
self.__loaded = True
person = data.rsp.person
self.__isadmin = person.isadmin
self.__ispro = person.ispro
self.__icon_server = person.iconserver
if int(person.iconserver) > 0:
self.__icon_url = 'http://photos%s.flickr.com/buddyicons/%s.jpg' \
% (person.iconserver, self.__id)
else:
self.__icon_url = 'http://www.flickr.com/images/buddyicon.jpg'
self.__username = person.username.text
self.__realname = getattr((getattr(person, 'realname', u'')), 'text', u'')
self.__location = getattr((getattr(person, 'location', u'')), 'text', u'')
self.__photos_count = getattr((getattr(getattr(person, 'photos', None), 'count', u'')), 'text', u'')
if self.__photos_count:
self.__photos_firstdate = person.photos.firstdate.text
self.__photos_firstdatetaken = person.photos.firstdatetaken.text
else:
self.__photos_firstdate = None
self.__photos_firstdatetaken = None
def __str__(self):
return '<Flickr User %s>' % self.id
def getPhotosets(self):
"""Returns a list of Photosets."""
method = 'flickr.photosets.getList'
data = _doget(method, user_id=self.id)
sets = []
if not getattr(data.rsp.photosets, 'photoset',None):
return sets #N.B. returns an empty set
if isinstance(data.rsp.photosets.photoset, list):
for photoset in data.rsp.photosets.photoset:
sets.append(Photoset(photoset.id, photoset.title.text,\
Photo(photoset.primary),\
secret=photoset.secret, \
server=photoset.server, \
description=photoset.description.text,
photos=photoset.photos))
else:
photoset = data.rsp.photosets.photoset
sets.append(Photoset(photoset.id, photoset.title.text,\
Photo(photoset.primary),\
secret=photoset.secret, \
server=photoset.server, \
description=photoset.description.text,
photos=photoset.photos))
return sets
def getPublicFavorites(self, per_page='', page=''):
return favorites_getPublicList(user_id=self.id, per_page=per_page, \
page=page)
def getFavorites(self, per_page='', page=''):
return favorites_getList(user_id=self.id, per_page=per_page, \
page=page)
def getGalleries(self, per_page='', page=''):
return galleries_getList(user_id=self.id, per_page=per_page, \
page=page)
class Group(object):
"""Flickr Group Pool"""
def __init__(self, id, name=None, members=None, online=None,\
privacy=None, chatid=None, chatcount=None):
self.__loaded = False
self.__id = id
self.__name = name
self.__members = members
self.__online = online
self.__privacy = privacy
self.__chatid = chatid
self.__chatcount = chatcount
self.__url = None
id = property(lambda self: self._general_getattr('id'))
name = property(lambda self: self._general_getattr('name'))
members = property(lambda self: self._general_getattr('members'))
online = property(lambda self: self._general_getattr('online'))
privacy = property(lambda self: self._general_getattr('privacy'))
chatid = property(lambda self: self._general_getattr('chatid'))
chatcount = property(lambda self: self._general_getattr('chatcount'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Loads the properties from Flickr."""
method = 'flickr.groups.getInfo'
data = _doget(method, group_id=self.id)
self.__loaded = True
group = data.rsp.group
self.__name = group.name.text
self.__description = group.description.text
self.__members = group.members.text
self.__privacy = group.privacy.text
def __str__(self):
return '<Flickr Group %s>' % self.id
def getPhotos(self, tags='', per_page='', page=''):
"""Get a list of photo objects for this group"""
method = 'flickr.groups.pools.getPhotos'
data = _doget(method, group_id=self.id, tags=tags,\
per_page=per_page, page=page)
photos = []
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
return photos
def add(self, photo):
"""Adds a Photo to the group"""
method = 'flickr.groups.pools.add'
_dopost(method, auth=True, photo_id=photo.id, group_id=self.id)
return True
def remove(self, photo):
"""Remove a Photo from the group"""
method = 'flickr.groups.pools.remove'
_dopost(method, auth=True, photo_id=photo.id, group_id=self.id)
return True
class Tag(object):
def __init__(self, id, author, raw, text):
self.id = id
self.author = author
self.raw = raw
self.text = text
def __str__(self):
return '<Flickr Tag %s (%s)>' % (self.id, self.text)
class Exif(object):
def __init__(self, camera, tags):
self.camera = camera
self.tags = tags
def __str__(self):
return '<Flickr Exif>'
@staticmethod
def getExif(photo_id_):
method = 'flickr.photos.getExif'
data = _doget(method, photo_id=photo_id_)
return Exif.parse(data.rsp.photo)
@staticmethod
def parse(photo):
camera = getattr(photo, 'camera', '')
tags = []
if hasattr(photo, 'exif'):
if isinstance(photo.exif, list):
tags = [ExifTag.parse(e) for e in photo.exif]
else:
tags = [ExifTag.parse(photo.exif)]
return Exif(camera, tags)
class ExifTag(object):
def __init__(self, tagspace, tagspaceid, tag, label, raw, clean):
self.tagspace = tagspace
self.tagspaceid = tagspaceid
self.tag = tag
self.label = label
self.raw = raw
self.clean = clean
def __str__(self):
return '<Flickr ExifTag %s (%s)>' % (self.tag, self.label)
@staticmethod
def parse(exif):
raw = ''
if hasattr(exif, 'raw'):
raw = exif.raw.text
clean = ''
if hasattr(exif, 'clean'):
clean = exif.clean.text
return ExifTag(exif.tagspace, exif.tagspaceid, exif.tag, exif.label,
raw, clean)
class Gallery(object):
"""Represents a Flickr Gallery.
Takes gallery_id as argument.
"""
# There are other attributes a Gallery could have,
# but defining them here might create errors.
# Might be useful to define them here, though,
# if the user wants to change them when creating
# an instance.
def __init__(self, id, owner=None, title=None, description=None, \
date_create=None, date_update=None, count_photos=None, \
count_videos=None, primary_photo_id=None, \
primary_photo_server=None, primary_photo_farm=None, \
primary_photo_secret=None):
self.__loaded = False
self.__url = None
self.__id = id
self.__owner = owner
self.__title = title
self.__description = description
self.__date_create = date_create
self.__date_update = date_update
self.__count_photos = count_photos
self.__count_videos = count_videos
self.__primary_photo_id = primary_photo_id
self.__primary_photo_server = primary_photo_server
self.__primary_photo_farm = primary_photo_farm
self.__primary_photo_secret = primary_photo_secret
id = property(lambda self: self._general_getattr('id'))
url = property(lambda self: self._general_getattr('url'))
owner = property(lambda self: self._general_getattr('owner'))
title = property(lambda self: self._general_getattr('title'))
description = property(lambda self: self._general_getattr('description'))
date_create = property(lambda self: self._general_getattr('date_create'))
date_update = property(lambda self: self._general_getattr('date_update'))
count_photos = property(lambda self: self._general_getattr('count_photos'))
count_videos = property(lambda self: self._general_getattr('count_videos'))
primary_photo_id = property(lambda self: self._general_getattr('primary_photo_id'))
primary_photo_server = property(lambda self: self._general_getattr('primary_photo_server'))
primary_photo_farm = property(lambda self: self._general_getattr('primary_photo_farm'))
primary_photo_secret = property(lambda self: self._general_getattr('primary_photo_secret'))
def _general_getattr(self, var):
"""Generic get attribute function."""
if getattr(self, "_%s__%s" % (self.__class__.__name__, var)) is None \
and not self.__loaded:
self._load_properties()
return getattr(self, "_%s__%s" % (self.__class__.__name__, var))
def _load_properties(self):
"""Loads the properties from Flickr."""
method = 'flickr.galleries.getInfo'
data = _doget(method, gallery_id=self.id)
self.__loaded = True
gallery = data.rsp.gallery
self.__url = gallery.url
self.__owner = gallery.owner
self.__title = gallery.title.text
self.__description = gallery.description.text
self.__date_create = gallery.date_create
self.__date_update = gallery.date_update
self.__count_photos = gallery.count_photos
self.__count_videos = gallery.count_videos
self.__primary_photo_id = gallery.primary_photo_id
self.__primary_photo_server = gallery.primary_photo_server
self.__primary_photo_farm = gallery.primary_photo_farm
self.__primary_photo_secret = gallery.primary_photo_secret
def __str__(self):
return '<Flickr Gallery %s>' % self.id
def addPhoto(self, photo, comment=''):
"""Add a new Photo to the Gallery."""
method = 'flickr.galleries.addPhoto'
_dopost(method, auth=True, photo_id=photo.id, gallery_id=self.id, \
comment=comment)
return True
def editMeta(self, title='', description=''):
"""Modify the meta-data for a gallery.
In original API, title is required, but here, if not
specified, it will use the current title. (So it's optional)
Calling this function without any parameters will blank out the description.
"""
method = 'flickr.galleries.editMeta'
if title == '':
title = self.title
_dopost(method, auth=True, gallery_id=self.id, title=title, \
description=description)
return True
def editPhoto(self, photo, comment):
"""Change the comment for the given Photo."""
method = 'flickr.galleries.editPhoto'
_dopost(method, auth=True, gallery_id=self.id, photo_id=photo.id, \
comment=comment)
return True
def editPhotos(self, primary_photo, *photos):
"""Modify the photos in a gallery. Use this method to add,
remove and re-order photos."""
method = 'flickr.galleries.editPhotos'
photo_ids = ','.join([photo.id for photo in photos])
_dopost(method, auth=True, gallery_id=self.id, \
primary_photo_id=primary_photo.id, photo_ids=photo_ids)
return True
def getPhotos(self, per_page='', page='', **extras):
"""Return the list of photos for a gallery.
*extras (optional): A comma-delimited list of extra information
to fetch for each returned record. Currently supported fields are:
description, license, date_upload, date_taken, owner_name,
icon_server, original_format, last_update, geo, tags, machine_tags,
o_dims, views, media, path_alias, url_sq, url_t, url_s, url_m, url_o
"""
method = 'flickr.galleries.getPhotos'
extras = ','.join('%s=%s' % (i, v) for i, v in dict(extras).items())
data = _doget(method, gallery_id=self.id, per_page=per_page, \
page=page, extras=extras)
photos = {} # dict with photo instance as key and comment as value.
# if there's no comment, '' will be assigned.
for photo in data.rsp.photos.photo:
if photo.has_comment == '1':
photos[_parse_photo(photo)] = photo.comment.text
elif photo.has_comment == '0':
photos[_parse_photo(photo)] = ''
else: # Shouldn't EVER get here
raise FlickrError
return photos
#Flickr API methods
#see api docs http://www.flickr.com/services/api/
#for details of each param
#XXX: Could be Photo.search(cls)
def photos_search(user_id='', auth=False, tags='', tag_mode='', text='',\
min_upload_date='', max_upload_date='',\
min_taken_date='', max_taken_date='', \
license='', per_page='', page='', sort='',\
safe_search='', content_type='', **kwargs):
"""Returns a list of Photo objects.
If auth=True then will auth the user. Can see private etc
"""
method = 'flickr.photos.search'
data = _doget(method, auth=auth, user_id=user_id, tags=tags, text=text,\
min_upload_date=min_upload_date,\
max_upload_date=max_upload_date, \
min_taken_date=min_taken_date, \
max_taken_date=max_taken_date, \
license=license, per_page=per_page,\
page=page, sort=sort, safe_search=safe_search, \
content_type=content_type, \
tag_mode=tag_mode, **kwargs)
photos = []
if data.rsp.photos.__dict__.has_key('photo'):
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def photos_search_pages(user_id='', auth=False, tags='', tag_mode='', text='',\
min_upload_date='', max_upload_date='',\
min_taken_date='', max_taken_date='', \
license='', per_page='', page='', sort=''):
"""Returns the number of pages for the previous function (photos_search())
"""
method = 'flickr.photos.search'
data = _doget(method, auth=auth, user_id=user_id, tags=tags, text=text,\
min_upload_date=min_upload_date,\
max_upload_date=max_upload_date, \
min_taken_date=min_taken_date, \
max_taken_date=max_taken_date, \
license=license, per_page=per_page,\
page=page, sort=sort)
return data.rsp.photos.pages
def photos_get_recent(extras='', per_page='', page=''):
"""http://www.flickr.com/services/api/flickr.photos.getRecent.html
"""
method = 'flickr.photos.getRecent'
data = _doget(method, extras=extras, per_page=per_page, page=page)
photos = []
if data.rsp.photos.__dict__.has_key('photo'):
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
#XXX: Could be class method in User
def people_findByEmail(email):
"""Returns User object."""
method = 'flickr.people.findByEmail'
data = _doget(method, find_email=email)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
def people_findByUsername(username):
"""Returns User object."""
method = 'flickr.people.findByUsername'
data = _doget(method, username=username)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
#XXX: Should probably be in User as a list User.public
def people_getPublicPhotos(user_id, per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.people.getPublicPhotos'
data = _doget(method, user_id=user_id, per_page=per_page, page=page)
photos = []
if hasattr(data.rsp.photos, "photo"): # Check if there are photos at all (may be been paging too far).
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
#XXX: These are also called from User
def favorites_getList(user_id='', per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.favorites.getList'
data = _doget(method, auth=True, user_id=user_id, per_page=per_page,\
page=page)
photos = []
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def favorites_getPublicList(user_id, per_page='', page=''):
"""Returns list of Photo objects."""
method = 'flickr.favorites.getPublicList'
data = _doget(method, auth=False, user_id=user_id, per_page=per_page,\
page=page)
photos = []
if isinstance(data.rsp.photos.photo, list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def favorites_add(photo_id):
"""Add a photo to the user's favorites."""
method = 'flickr.favorites.add'
_dopost(method, auth=True, photo_id=photo_id)
return True
def favorites_remove(photo_id):
"""Remove a photo from the user's favorites."""
method = 'flickr.favorites.remove'
_dopost(method, auth=True, photo_id=photo_id)
return True
def groups_getPublicGroups():
"""Get a list of groups the auth'd user is a member of."""
method = 'flickr.groups.getPublicGroups'
data = _doget(method, auth=True)
groups = []
if isinstance(data.rsp.groups.group, list):
for group in data.rsp.groups.group:
groups.append(Group(group.id, name=group.name))
else:
group = data.rsp.groups.group
groups = [Group(group.id, name=group.name)]
return groups
def groups_pools_getGroups():
"""Get a list of groups the auth'd user can post photos to."""
method = 'flickr.groups.pools.getGroups'
data = _doget(method, auth=True)
groups = []
if isinstance(data.rsp.groups.group, list):
for group in data.rsp.groups.group:
groups.append(Group(group.id, name=group.name, \
privacy=group.privacy))
else:
group = data.rsp.groups.group
groups = [Group(group.id, name=group.name, privacy=group.privacy)]
return groups
def tags_getListUser(user_id=''):
"""Returns a list of tags for the given user (in string format)"""
method = 'flickr.tags.getListUser'
auth = user_id == ''
data = _doget(method, auth=auth, user_id=user_id)
if isinstance(data.rsp.tags.tag, list):
return [tag.text for tag in data.rsp.tags.tag]
else:
return [data.rsp.tags.tag.text]
def tags_getListUserPopular(user_id='', count=''):
"""Gets the popular tags for a user in dictionary form tag=>count"""
method = 'flickr.tags.getListUserPopular'
auth = user_id == ''
data = _doget(method, auth=auth, user_id=user_id)
result = {}
if isinstance(data.rsp.tags.tag, list):
for tag in data.rsp.tags.tag:
result[tag.text] = tag.count
else:
result[data.rsp.tags.tag.text] = data.rsp.tags.tag.count
return result
def tags_getrelated(tag):
"""Gets the related tags for given tag."""
method = 'flickr.tags.getRelated'
data = _doget(method, auth=False, tag=tag)
if isinstance(data.rsp.tags.tag, list):
return [tag.text for tag in data.rsp.tags.tag]
else:
return [data.rsp.tags.tag.text]
def contacts_getPublicList(user_id):
"""Gets the contacts (Users) for the user_id"""
method = 'flickr.contacts.getPublicList'
data = _doget(method, auth=False, user_id=user_id)
try:
if isinstance(data.rsp.contacts.contact, list):
return [User(user.nsid, username=user.username) \
for user in data.rsp.contacts.contact]
except AttributeError:
return "No users in the list"
except:
return "Unknown error"
# else:
# user = data.rsp.contacts.contact
# return [User(user.nsid, username=user.username)]
def interestingness():
method = 'flickr.interestingness.getList'
data = _doget(method)
photos = []
if isinstance(data.rsp.photos.photo , list):
for photo in data.rsp.photos.photo:
photos.append(_parse_photo(photo))
else:
photos = [_parse_photo(data.rsp.photos.photo)]
return photos
def galleries_create(title, description, primary_photo_id=None):
"""Create a new gallery."""
method = 'flickr.galleries.create'
if primary_photo_id is None:
_dopost(method, auth=True, title=title, description=description,
primary_photo_id=primary_photo_id)
elif primary_photo_id is not None:
_dopost(method, auth=True, title=title, description=description)
def galleries_getList(user_id='', per_page='', page=''):
"""Returns list of Gallery objects."""
method = 'flickr.galleries.getList'
data = _doget(method, auth=False, user_id=user_id, per_page=per_page, \
page=page)
galleries = []
if isinstance(data.rsp.galleries.gallery, list):
for gallery in data.rsp.galleries.gallery:
galleries.append(_parse_gallery(gallery))
else:
galleries = [_parse_gallery(data.rsp.galleries.gallery)]
return galleries
def test_login():
method = 'flickr.test.login'
data = _doget(method, auth=True)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user
def test_echo():
method = 'flickr.test.echo'
data = _doget(method)
return data.rsp.stat
#useful methods
def _doget(method, auth=False, **params):
#uncomment to check you aren't killing the flickr server
#print "***** do get %s" % method
params = _prepare_params(params)
url = '%s%s/?api_key=%s&method=%s&%s%s'% \
(HOST, API, API_KEY, method, urlencode(params),
_get_auth_url_suffix(method, auth, params))
#another useful debug print statement
if debug:
print "_doget", url
return _get_data(minidom.parse(urlopen(url)))
def _dopost(method, auth=False, **params):
#uncomment to check you aren't killing the flickr server
#print "***** do post %s" % method
params = _prepare_params(params)
url = '%s%s/?api_key=%s%s'% \
(HOST, API, API_KEY, _get_auth_url_suffix(method, auth, params))
# There's no reason this can't be str(urlencode(params)). I just wanted to
# have it the same as the rest.
payload = '%s' % (urlencode(params))
#another useful debug print statement
if debug:
print "_dopost url", url
print "_dopost payload", payload
return _get_data(minidom.parse(urlopen(url, payload)))
def _prepare_params(params):
"""Convert lists to strings with ',' between items."""
for (key, value) in params.items():
if isinstance(value, list):
params[key] = ','.join([item for item in value])
return params
def _get_data(xml):
"""Given a bunch of XML back from Flickr, we turn it into a data structure
we can deal with (after checking for errors)."""
data = unmarshal(xml)
if not data.rsp.stat == 'ok':
msg = "ERROR [%s]: %s" % (data.rsp.err.code, data.rsp.err.msg)
raise FlickrError, msg
return data
def _get_api_sig(params):
"""Generate API signature."""
token = userToken()
parameters = ['api_key', 'auth_token']
for item in params.items():
parameters.append(item[0])
parameters.sort()
api_string = [API_SECRET]
for item in parameters:
for chocolate in params.items():
if item == chocolate[0]:
api_string.append(item)
api_string.append(str(chocolate[1]))
if item == 'api_key':
api_string.append('api_key')
api_string.append(API_KEY)
if item == 'auth_token':
api_string.append('auth_token')
api_string.append(token)
api_signature = hashlib.md5(''.join(api_string)).hexdigest()
return api_signature
def _get_auth_url_suffix(method, auth, params):
"""Figure out whether we want to authorize, and if so, construct a suitable
URL suffix to pass to the Flickr API."""
authentication = False
# auth may be passed in via the API, AUTH may be set globally (in the same
# manner as API_KEY, etc). We do a few more checks than may seem necessary
# because we allow the 'auth' parameter to actually contain the
# authentication token, not just True/False.
if auth or AUTH:
token = userToken()
authentication = True
elif auth != False:
token = auth
authentication = True
elif AUTH != False:
token = AUTH
authentication = True
# If we're not authenticating, no suffix is required.
if not authentication:
return ''
full_params = params
full_params['method'] = method
return '&auth_token=%s&api_sig=%s' % (token, _get_api_sig(full_params))
def _parse_photo(photo):
"""Create a Photo object from photo data."""
owner = User(photo.owner)
title = photo.title
ispublic = photo.ispublic
isfriend = photo.isfriend
isfamily = photo.isfamily
secret = photo.secret
server = photo.server
farm = photo.farm
p = Photo(photo.id, owner=owner, title=title, ispublic=ispublic,\
isfriend=isfriend, isfamily=isfamily, secret=secret, \
server=server, farm=farm)
return p
def _parse_gallery(gallery):
"""Create a Gallery object from gallery data."""
# This might not work!! NEEDS TESTING
url = gallery.url
owner = User(gallery.owner)
title = gallery.title.text
description = gallery.description.text
date_create = gallery.date_create
date_update = gallery.date_update
count_photos = gallery.count_photos
count_videos = gallery.count_videos
primary_photo_id = gallery.primary_photo_id
primary_photo_server = gallery.primary_photo_server
primary_photo_farm = gallery.primary_photo_farm
primary_photo_secret = gallery.primary_photo_secret
g = Gallery(gallery.id, owner=owner, title=title, description=description, \
date_create=date_create, date_update=date_update, \
count_photos=count_photos, count_videos=count_videos, \
primary_photo_id=primary_photo_id, \
primary_photo_server=primary_photo_server, \
primary_photo_farm=primary_photo_farm, \
primary_photo_secret=primary_photo_secret)
return g
#stolen methods
class Bag: pass
#unmarshal taken and modified from pyamazon.py
#makes the xml easy to work with
def unmarshal(element):
rc = Bag()
if isinstance(element, minidom.Element):
for key in element.attributes.keys():
setattr(rc, key, element.attributes[key].value)
childElements = [e for e in element.childNodes \
if isinstance(e, minidom.Element)]
if childElements:
for child in childElements:
key = child.tagName
if hasattr(rc, key):
if type(getattr(rc, key)) <> type([]):
setattr(rc, key, [getattr(rc, key)])
setattr(rc, key, getattr(rc, key) + [unmarshal(child)])
elif isinstance(child, minidom.Element) and \
(child.tagName == 'Details'):
# make the first Details element a key
setattr(rc,key,[unmarshal(child)])
#dbg: because otherwise 'hasattr' only tests
#dbg: on the second occurence: if there's a
#dbg: single return to a query, it's not a
#dbg: list. This module should always
#dbg: return a list of Details objects.
else:
setattr(rc, key, unmarshal(child))
else:
#jec: we'll have the main part of the element stored in .text
#jec: will break if tag <text> is also present
text = "".join([e.data for e in element.childNodes \
if isinstance(e, minidom.Text)])
setattr(rc, 'text', text)
return rc
#unique items from a list from the cookbook
def uniq(alist): # Fastest without order preserving
set = {}
map(set.__setitem__, alist, [])
return set.keys()
## Only the "getList" module is complete.
## Work in Progress; Nearly Finished
class Blogs():
def getList(self,auth=True):
"""blogs.getList requires READ authentication"""
# please read documentation on how to use this
method = 'flickr.blogs.getList'
if auth==True : data = _doget(method, auth=True)
if not auth==True : data = _doget(method, auth=False)
bID = []
bName = []
bNeedsPword = []
bURL = []
try:
for plog in data.rsp.blogs.blog:
bID.append(plog.id)
bName.append(plog.name)
bNeedsPword.append(plog.needspassword)
bURL.append(plog.url)
except TypeError:
try:
bID.append(data.rsp.blogs.blog.id)
bName.append(data.rsp.blogs.blog.name)
bNeedsPword.append(data.rsp.blogs.blog.needspassword)
bURL.append(data.rsp.blogs.blog.url)
except AttributeError:
return "AttributeError, unexplained!"
except:
return "Unknown error!"
except AttributeError:
return "There are no blogs!"
myReturn = [bID,bName,bNeedsPword,bURL]
return myReturn
def postPhoto(self, blogID, photoID, title, description, bpassword):
"""blogs.postPhoto requires WRITE authentication"""
method = 'flickr.blogs.postPhoto'
return None
class Urls():
def getUserPhotosURL(userid):
"""Returns user URL in an array (to access, use array[1])"""
method = 'flickr.urls.getUserPhotos'
data = _doget(method, user_id=userid)
return [data.rsp.user.nsid,data.rsp.user.url]
class Auth():
def getFrob(self):
"""Returns a frob that is used in authentication"""
method = 'flickr.auth.getFrob'
sig_str = API_SECRET + 'api_key' + API_KEY + 'method' + method
signature_hash = hashlib.md5(sig_str).hexdigest()
data = _doget(method, auth=False, api_sig=signature_hash)
return data.rsp.frob.text
def loginLink(self, permission, frob):
"""Generates a link that the user should be sent to"""
myAuth = Auth()
sig_str = API_SECRET + 'api_key' + API_KEY + 'frob' + frob + 'perms' + permission
signature_hash = hashlib.md5(sig_str).hexdigest()
perms = permission
link = "http://flickr.com/services/auth/?api_key=%s&perms=%s&frob=%s&api_sig=%s" % (API_KEY, perms, frob, signature_hash)
return link
def getToken(self, frob):
"""This token is what needs to be used in future API calls"""
method = 'flickr.auth.getToken'
sig_str = API_SECRET + 'api_key' + API_KEY + 'frob' + frob + 'method' + method
signature_hash = hashlib.md5(sig_str).hexdigest()
data = _doget(method, auth=False, api_sig=signature_hash,
api_key=API_KEY, frob=frob)
return data.rsp.auth.token.text
def userToken():
# This method allows you flickr.py to retrive the saved token
# as once the token for a program has been got from flickr,
# it cannot be got again, so flickr.py saves it in a file
# called token.txt (default) somewhere.
if not tokenPath == '':
f = file(os.path.join(tokenPath,tokenFile),'r')
else:
f = file(tokenFile,'r')
token = f.read()
f.close()
return token
def getUserPhotosURL(userid):
"""Returns user URL in an array (to access, use array[1])"""
# This addition has been added upon request of
# nsteinmetz. It will be "cleaned up" at another
# time.
method = 'flickr.urls.getUserPhotos'
data = _doget(method, user_id=userid)
userurl = [data.rsp.user.nsid,data.rsp.user.url]
return userurl
if __name__ == '__main__':
print test_echo()
|
'''
数字类型:
int 整形
float 浮点型
Complex 啥东西
'''
|
import twint
c = twint.Config()
c.Since = "2021-02-01"
c.Until = "2021-03-14"
c.Search = "(mulher OR mulheres OR garotinha OR garotas OR menina OR garotas) AND \
((engenheira OR cientista OR arquiteta OR programação OR biologa) OR \
(engenharia OR ciência OR stem)) OR \
(matemática) OR \
(#WomenInSTEM OR #WomenInTech OR #MulheresemTIOR #MulheresEmSTEM OR #GirlsInTech OR #MulheresnaCiencia)"
c.Lang = "pt"
c.Store_csv = True
c.Output = "./Query1_2021_pt.csv"
twint.run.Search(c)
|
import argparse
import pprint
import sys
import torch
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
from mtcnn.config import cfg
from mtcnn.datasets.iteration_based_batch_sampler import build_batch_sampler
from mtcnn.datasets.roidb import get_roidb
from mtcnn.engine.trainer import do_train
from mtcnn.modeling.model_builder import build_model
from mtcnn.utils.logger import setup_logging
from mtcnn.utils.lr_scheduler import make_optimizer
from mtcnn.utils.lr_scheduler import make_scheduler
logger = setup_logging(__name__)
def train():
model = build_model(cfg.MODEL.TYPE)
device = torch.device(cfg.MODEL.DEVICE)
model.to(device)
optimizer = make_optimizer(cfg, model)
scheduler = make_scheduler(cfg, optimizer)
transform = transforms.ToTensor()
roidb = get_roidb(transform=transform)
batch_sampler = build_batch_sampler(
roidb,
cfg.TRAIN.BATCH_SIZE,
shuffle=True
)
data_loader = DataLoader(roidb, batch_sampler=batch_sampler)
do_train(model, data_loader, optimizer, scheduler, device)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--cfg',
dest='cfg_file',
default=None,
type=str
)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main():
args = parse_args()
logger.info('Called with args:')
logger.info(pprint.pformat(args))
if args.cfg_file:
cfg.merge_from_file(args.cfg_file)
logger.info('Using configs:')
logger.info(pprint.pformat(cfg))
train()
if __name__ == '__main__':
main()
|
import abc
import decimal
import io
from typing import (
Any,
)
from eth_utils import (
big_endian_to_int,
to_normalized_address,
to_tuple,
)
from eth_abi.base import (
BaseCoder,
parse_tuple_type_str,
parse_type_str,
)
from eth_abi.exceptions import (
DecodingError,
InsufficientDataBytes,
NonEmptyPaddingBytes,
)
from eth_abi.utils.numeric import (
TEN,
abi_decimal_context,
ceil32,
)
class ContextFramesBytesIO(io.BytesIO):
"""
A byte stream which can track a series of contextual frames in a stack. This
data structure is necessary to perform nested decodings using the
:py:class:``HeadTailDecoder`` since offsets present in head sections are
relative only to a particular encoded object. These offsets can only be
used to locate a position in a decoding stream if they are paired with a
contextual offset that establishes the position of the object in which they
are found.
For example, consider the encoding of a value for the following type::
type: (int,(int,int[]))
value: (1,(2,[3,3]))
There are two tuples in this type: one inner and one outer. The inner tuple
type contains a dynamic type ``int[]`` and, therefore, is itself dynamic.
This means that its value encoding will be placed in the tail section of the
outer tuple's encoding. Furthermore, the inner tuple's encoding will,
itself, contain a tail section with the encoding for ``[3,3]``. All
together, the encoded value of ``(1,(2,[3,3]))`` would look like this (the
data values are normally 32 bytes wide but have been truncated to remove the
redundant zeros at the beginnings of their encodings)::
offset data
--------------------------
^ 0 0x01
| 32 0x40 <-- Offset of object A in global frame (64)
-----|--------------------
Global frame ^ 64 0x02 <-- Beginning of object A (64 w/offset 0 = 64)
| | 96 0x40 <-- Offset of object B in frame of object A (64)
-----|-Object A's frame---
| | 128 0x02 <-- Beginning of object B (64 w/offset 64 = 128)
| | 160 0x03
v v 192 0x03
--------------------------
Note that the offset of object B is encoded as 64 which only specifies the
beginning of its encoded value relative to the beginning of object A's
encoding. Globally, object B is located at offset 128. In order to make
sense out of object B's offset, it needs to be positioned in the context of
its enclosing object's frame (object A).
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._frames = []
self._total_offset = 0
def seek_in_frame(self, pos, *args, **kwargs):
"""
Seeks relative to the total offset of the current contextual frames.
"""
self.seek(self._total_offset + pos, *args, **kwargs)
def push_frame(self, offset):
"""
Pushes a new contextual frame onto the stack with the given offset and a
return position at the current cursor position then seeks to the new
total offset.
"""
self._frames.append((offset, self.tell()))
self._total_offset += offset
self.seek_in_frame(0)
def pop_frame(self):
"""
Pops the current contextual frame off of the stack and returns the
cursor to the frame's return position.
"""
try:
offset, return_pos = self._frames.pop()
except IndexError:
raise IndexError('no frames to pop')
self._total_offset -= offset
self.seek(return_pos)
class BaseDecoder(BaseCoder, metaclass=abc.ABCMeta):
"""
Base class for all decoder classes. Subclass this if you want to define a
custom decoder class. Subclasses must also implement
:any:`BaseCoder.from_type_str`.
"""
@abc.abstractmethod
def decode(self, stream: ContextFramesBytesIO) -> Any: # pragma: no cover
"""
Decodes the given stream of bytes into a python value. Should raise
:any:`exceptions.DecodingError` if a python value cannot be decoded
from the given byte stream.
"""
pass
def __call__(self, stream: ContextFramesBytesIO) -> Any:
return self.decode(stream)
class HeadTailDecoder(BaseDecoder):
is_dynamic = True
tail_decoder = None
def validate(self):
super().validate()
if self.tail_decoder is None:
raise ValueError("No `tail_decoder` set")
def decode(self, stream):
start_pos = decode_uint_256(stream)
stream.push_frame(start_pos)
value = self.tail_decoder(stream)
stream.pop_frame()
return value
class TupleDecoder(BaseDecoder):
decoders = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.decoders = tuple(
HeadTailDecoder(tail_decoder=d) if getattr(d, 'is_dynamic', False) else d
for d in self.decoders
)
self.is_dynamic = any(getattr(d, 'is_dynamic', False) for d in self.decoders)
def validate(self):
super().validate()
if self.decoders is None:
raise ValueError("No `decoders` set")
@to_tuple
def decode(self, stream):
for decoder in self.decoders:
yield decoder(stream)
@parse_tuple_type_str
def from_type_str(cls, abi_type, registry):
decoders = tuple(
registry.get_decoder(c.to_type_str())
for c in abi_type.components
)
return cls(decoders=decoders)
class SingleDecoder(BaseDecoder):
decoder_fn = None
def validate(self):
super().validate()
if self.decoder_fn is None:
raise ValueError("No `decoder_fn` set")
def validate_padding_bytes(self, value, padding_bytes):
raise NotImplementedError("Must be implemented by subclasses")
def decode(self, stream):
raw_data = self.read_data_from_stream(stream)
data, padding_bytes = self.split_data_and_padding(raw_data)
value = self.decoder_fn(data)
self.validate_padding_bytes(value, padding_bytes)
return value
def read_data_from_stream(self, stream):
raise NotImplementedError("Must be implemented by subclasses")
def split_data_and_padding(self, raw_data):
return raw_data, b''
class BaseArrayDecoder(BaseDecoder):
item_decoder = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Use a head-tail decoder to decode dynamic elements
if self.item_decoder.is_dynamic:
self.item_decoder = HeadTailDecoder(
tail_decoder=self.item_decoder,
)
def validate(self):
super().validate()
if self.item_decoder is None:
raise ValueError("No `item_decoder` set")
@parse_type_str(with_arrlist=True)
def from_type_str(cls, abi_type, registry):
item_decoder = registry.get_decoder(abi_type.item_type.to_type_str())
array_spec = abi_type.arrlist[-1]
if len(array_spec) == 1:
# If array dimension is fixed
return SizedArrayDecoder(
array_size=array_spec[0],
item_decoder=item_decoder,
)
else:
# If array dimension is dynamic
return DynamicArrayDecoder(item_decoder=item_decoder)
class SizedArrayDecoder(BaseArrayDecoder):
array_size = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.is_dynamic = self.item_decoder.is_dynamic
@to_tuple
def decode(self, stream):
for _ in range(self.array_size):
yield self.item_decoder(stream)
class DynamicArrayDecoder(BaseArrayDecoder):
# Dynamic arrays are always dynamic, regardless of their elements
is_dynamic = True
@to_tuple
def decode(self, stream):
array_size = decode_uint_256(stream)
stream.push_frame(32)
for _ in range(array_size):
yield self.item_decoder(stream)
stream.pop_frame()
class FixedByteSizeDecoder(SingleDecoder):
decoder_fn = None
value_bit_size = None
data_byte_size = None
is_big_endian = None
def validate(self):
super().validate()
if self.value_bit_size is None:
raise ValueError("`value_bit_size` may not be None")
if self.data_byte_size is None:
raise ValueError("`data_byte_size` may not be None")
if self.decoder_fn is None:
raise ValueError("`decoder_fn` may not be None")
if self.is_big_endian is None:
raise ValueError("`is_big_endian` may not be None")
if self.value_bit_size % 8 != 0:
raise ValueError(
"Invalid value bit size: {0}. Must be a multiple of 8".format(
self.value_bit_size,
)
)
if self.value_bit_size > self.data_byte_size * 8:
raise ValueError("Value byte size exceeds data size")
def read_data_from_stream(self, stream):
data = stream.read(self.data_byte_size)
if len(data) != self.data_byte_size:
raise InsufficientDataBytes(
"Tried to read {0} bytes. Only got {1} bytes".format(
self.data_byte_size,
len(data),
)
)
return data
def split_data_and_padding(self, raw_data):
value_byte_size = self._get_value_byte_size()
padding_size = self.data_byte_size - value_byte_size
if self.is_big_endian:
padding_bytes = raw_data[:padding_size]
data = raw_data[padding_size:]
else:
data = raw_data[:value_byte_size]
padding_bytes = raw_data[value_byte_size:]
return data, padding_bytes
def validate_padding_bytes(self, value, padding_bytes):
value_byte_size = self._get_value_byte_size()
padding_size = self.data_byte_size - value_byte_size
if padding_bytes != b'\x00' * padding_size:
raise NonEmptyPaddingBytes(
"Padding bytes were not empty: {0}".format(repr(padding_bytes))
)
def _get_value_byte_size(self):
value_byte_size = self.value_bit_size // 8
return value_byte_size
class Fixed32ByteSizeDecoder(FixedByteSizeDecoder):
data_byte_size = 32
class BooleanDecoder(Fixed32ByteSizeDecoder):
value_bit_size = 8
is_big_endian = True
@staticmethod
def decoder_fn(data):
if data == b'\x00':
return False
elif data == b'\x01':
return True
else:
raise NonEmptyPaddingBytes(
"Boolean must be either 0x0 or 0x1. Got: {0}".format(repr(data))
)
@parse_type_str('bool')
def from_type_str(cls, abi_type, registry):
return cls()
class AddressDecoder(Fixed32ByteSizeDecoder):
value_bit_size = 20 * 8
is_big_endian = True
decoder_fn = staticmethod(to_normalized_address)
@parse_type_str('address')
def from_type_str(cls, abi_type, registry):
return cls()
#
# Unsigned Integer Decoders
#
class UnsignedIntegerDecoder(Fixed32ByteSizeDecoder):
decoder_fn = staticmethod(big_endian_to_int)
is_big_endian = True
@parse_type_str('uint')
def from_type_str(cls, abi_type, registry):
return cls(value_bit_size=abi_type.sub)
decode_uint_256 = UnsignedIntegerDecoder(value_bit_size=256)
#
# Signed Integer Decoders
#
class SignedIntegerDecoder(Fixed32ByteSizeDecoder):
is_big_endian = True
def decoder_fn(self, data):
value = big_endian_to_int(data)
if value >= 2 ** (self.value_bit_size - 1):
return value - 2 ** self.value_bit_size
else:
return value
def validate_padding_bytes(self, value, padding_bytes):
value_byte_size = self._get_value_byte_size()
padding_size = self.data_byte_size - value_byte_size
if value >= 0:
expected_padding_bytes = b'\x00' * padding_size
else:
expected_padding_bytes = b'\xff' * padding_size
if padding_bytes != expected_padding_bytes:
raise NonEmptyPaddingBytes(
"Padding bytes were not empty: {0}".format(repr(padding_bytes))
)
@parse_type_str('int')
def from_type_str(cls, abi_type, registry):
return cls(value_bit_size=abi_type.sub)
#
# Bytes1..32
#
class BytesDecoder(Fixed32ByteSizeDecoder):
is_big_endian = False
@staticmethod
def decoder_fn(data):
return data
@parse_type_str('bytes')
def from_type_str(cls, abi_type, registry):
return cls(value_bit_size=abi_type.sub * 8)
class BaseFixedDecoder(Fixed32ByteSizeDecoder):
frac_places = None
is_big_endian = True
def validate(self):
super().validate()
if self.frac_places is None:
raise ValueError("must specify `frac_places`")
if self.frac_places <= 0 or self.frac_places > 80:
raise ValueError("`frac_places` must be in range (0, 80]")
class UnsignedFixedDecoder(BaseFixedDecoder):
def decoder_fn(self, data):
value = big_endian_to_int(data)
with decimal.localcontext(abi_decimal_context):
decimal_value = decimal.Decimal(value) / TEN ** self.frac_places
return decimal_value
@parse_type_str('ufixed')
def from_type_str(cls, abi_type, registry):
value_bit_size, frac_places = abi_type.sub
return cls(value_bit_size=value_bit_size, frac_places=frac_places)
class SignedFixedDecoder(BaseFixedDecoder):
def decoder_fn(self, data):
value = big_endian_to_int(data)
if value >= 2 ** (self.value_bit_size - 1):
signed_value = value - 2 ** self.value_bit_size
else:
signed_value = value
with decimal.localcontext(abi_decimal_context):
decimal_value = decimal.Decimal(signed_value) / TEN ** self.frac_places
return decimal_value
def validate_padding_bytes(self, value, padding_bytes):
value_byte_size = self._get_value_byte_size()
padding_size = self.data_byte_size - value_byte_size
if value >= 0:
expected_padding_bytes = b'\x00' * padding_size
else:
expected_padding_bytes = b'\xff' * padding_size
if padding_bytes != expected_padding_bytes:
raise NonEmptyPaddingBytes(
"Padding bytes were not empty: {0}".format(repr(padding_bytes))
)
@parse_type_str('fixed')
def from_type_str(cls, abi_type, registry):
value_bit_size, frac_places = abi_type.sub
return cls(value_bit_size=value_bit_size, frac_places=frac_places)
#
# String and Bytes
#
class ByteStringDecoder(SingleDecoder):
is_dynamic = True
@staticmethod
def decoder_fn(data):
return data
@staticmethod
def read_data_from_stream(stream):
data_length = decode_uint_256(stream)
padded_length = ceil32(data_length)
data = stream.read(padded_length)
if len(data) < padded_length:
raise InsufficientDataBytes(
"Tried to read {0} bytes. Only got {1} bytes".format(
padded_length,
len(data),
)
)
padding_bytes = data[data_length:]
if padding_bytes != b'\x00' * (padded_length - data_length):
raise NonEmptyPaddingBytes(
"Padding bytes were not empty: {0}".format(repr(padding_bytes))
)
return data[:data_length]
def validate_padding_bytes(self, value, padding_bytes):
pass
@parse_type_str('bytes')
def from_type_str(cls, abi_type, registry):
return cls()
class StringDecoder(ByteStringDecoder):
@parse_type_str('string')
def from_type_str(cls, abi_type, registry):
return cls()
@staticmethod
def decoder_fn(data):
try:
value = data.decode("utf-8")
except UnicodeDecodeError as e:
raise DecodingError(
e.encoding,
e.object,
e.start,
e.end,
"The returned type for this function is string which is "
"expected to be a UTF8 encoded string of text. The returned "
"value could not be decoded as valid UTF8. This is indicative "
"of a broken application which is using incorrect return types for "
"binary data.") from e
return value
|
import pytest
from blacksheep.common.files.pathsutils import (
get_file_extension_from_name,
get_mime_type_from_name,
)
@pytest.mark.parametrize(
"full_path,expected_result",
[
("hello.txt", ".txt"),
(".gitignore", ".gitignore"),
("ØØ Void.album", ".album"),
("", ""),
],
)
def test_get_file_extension_from_name(full_path, expected_result):
assert get_file_extension_from_name(full_path) == expected_result
@pytest.mark.parametrize(
"full_path,expected_result",
[
("example.ogg", "audio/ogg"),
("example.jpg", "image/jpeg"),
("example.jpeg", "image/jpeg"),
("example.png", "image/png"),
("example.js", "application/javascript"),
("example.json", "application/json"),
("example.woff2", "font/woff2"),
("hello.txt", "text/plain"),
(".gitignore", "application/octet-stream"),
("ØØ Void.album", "application/octet-stream"),
("", "application/octet-stream"),
],
)
def test_get_mime_type(full_path, expected_result):
assert get_mime_type_from_name(full_path) == expected_result
|
# tipo = coleta caracteres digitos para dizer seu tipo, se é numerio,etc..
# ========================================================================
# titulo e coleta de dados
print("\033[33m============[ EX 004 ]============")
print(34 * "=", "\033[m")
tipo = input("digite \033[33malgo\033[m: ")
print(34 * "\033[33m=", "\033[m")
# ========================================================================
# mostra informaçoes da variavel "tipo"
print(f"({tipo}) é do tipo: \033[33m{type(tipo)}\033[m")
print(f"({tipo}) é numero? \033[33m{tipo.isalnum()}\033[m")
print(f"({tipo}) é alpha numerico? \033[33m{tipo.isalpha()}\033[m")
print(f"({tipo}) é em minusculo? \033[33m{tipo.islower()}\033[m")
print(f"({tipo}) é em maiusculo? \033[33m{tipo.isupper()}\033[m")
print(f"({tipo}) so tem espaços? \033[33m{tipo.isspace()}\033[m")
# ========================================================================
|
# -*- coding: utf-8 -*-
"""Nexus 3 CLI
Usage:
nexus3 --help, -h
nexus3 login
nexus3 (list|ls) <repository_path>
nexus3 (upload|up) <from_src> <to_repository>
nexus3 repo create hosted maven <repo_name>
[--blob=<store_name>] [--version=<v_policy>]
[--layout=<l_policy>] [--strict-content]
[--write=<w_policy>]
nexus3 repo create hosted (bower|npm|nuget|pypi|raw|rubygems) <repo_name>
[--blob=<store_name>] [--write=<w_policy>] [--strict-content]
nexus3 repo create hosted yum <repo_name>
[--blob=<store_name>] [--write=<w_policy>]
[--depth=<repo_depth>] [--strict-content]
nexus3 repo create proxy maven <repo_name> <remote_url>
[--blob=<store_name>] [--version=<v_policy>]
[--layout=<l_policy>] [--strict-content]
nexus3 repo create proxy (bower|npm|nuget|pypi|raw|rubygems|yum)
<repo_name> <remote_url>
[--blob=<store_name>] [--strict-content]
nexus3 repo list
nexus3 repo rm <repo_name> [--force]
nexus3 script create <script.json>
nexus3 script list
nexus3 script (rm|run) <script_name>
Options:
-h --help This screen
--blob=<store_name> Use this blob with new repository [default: default]
--depth=<repo_depth> Depth (0-5) where repodata folder(s) exist [default: 0]
--force, -f Execute action without confirmation
--write=<w_policy> Accepted: allow, allow_once, deny [default: allow_once]
--layout=<l_policy> Accepted: strict, permissive [default: strict]
--version=<v_policy> Accepted: release, snapshot, mixed [default: release]
--strict-content Enable strict content type validation
Commands:
login Test login and save credentials to ~/.nexus-cli
list List all files within a path in the repository
repo create Create a repository using the format and options provided
repo list List all repositories available on the server
repo rm Not implemented; please use Nexus Web UI to remove <repo_name>
script create Create or update a script using the <script.json> file
script list List all scripts available on the server
script rm Remove existing <script_name>
script run Run the existing <script_name>
"""
import getpass
import inflect
import json
import sys
import types
from docopt import docopt
from nexuscli.exception import NexusClientConfigurationNotFound
from nexuscli.nexus_client import NexusClient
from nexuscli import repository
PLURAL = inflect.engine().plural
def _input(prompt, default=None):
"""
:return: raw_input for Python 2.x and input for Python 3.x
:rtype: function
"""
if sys.version_info < (3, 0):
real_input = raw_input # noqa - Python2
else:
real_input = input
value = real_input('{prompt} ({default}):'.format(**locals()))
if value:
return value
return default
def do_login():
nexus_url = _input('Nexus OSS URL', NexusClient.DEFAULT_URL)
nexus_user = _input('Nexus admin username', NexusClient.DEFAULT_USER)
nexus_pass = getpass.getpass(
prompt='Nexus admin password ({}):'.format(
NexusClient.DEFAULT_PASS))
if not nexus_pass:
nexus_pass = NexusClient.DEFAULT_PASS
client = NexusClient(url=nexus_url, user=nexus_user, password=nexus_pass)
client.write_config()
sys.stderr.write('\nConfiguration saved to {}\n'.format(
NexusClient.CONFIG_PATH))
def get_client():
client = NexusClient()
try:
client.read_config()
return client
except NexusClientConfigurationNotFound:
sys.stderr.write(
'Configuration not found; please run nexus-cli.py login\n')
sys.exit(1)
def cmd_script_do_list(nexus_client):
json_response = nexus_client.scripts.list()
sys.stderr.write('Name (type)\n')
for script in json_response:
sys.stdout.write('{script[name]} ({script[type]})\n'.format(
script=script))
def cmd_script_do_create(nexus_client, script_path):
script_content = json.load(open(script_path), strict=False)
nexus_client.scripts.create(script_content)
def cmd_script(args):
nexus_client = get_client()
if args.get('list'):
cmd_script_do_list(nexus_client)
elif args.get('rm'):
nexus_client.scripts.delete(args.get('<script_name>'))
elif args.get('run'):
nexus_client.scripts.run(args.get('<script_name>'))
elif args.get('create'):
cmd_script_do_create(nexus_client, args.get('<script.json>'))
else:
raise NotImplementedError
def cmd_repo_do_list(nexus_client):
json_response = nexus_client.repo_list()
output_format = '{0:40} {1:7} {2:7} {3}\n'
sys.stderr.write(output_format.format('Name', 'Format', 'Type', 'URL'))
sys.stderr.write(output_format.format('----', '------', '----', '---'))
for repo in json_response:
sys.stdout.write(output_format.format(
repo['name'], repo['format'], repo['type'], repo['url']))
def args_to_repo_format(args):
# docopt guarantees only one is True
for format_name in repository.validations.KNOWN_FORMATS:
if args.get(format_name) is True:
return format_name
def args_to_repo_type(args):
# docopt guarantees only one is True
for type_name in repository.validations.KNOWN_TYPES:
if args.get(type_name) is True:
return type_name
def cmd_repo_create(nexus_client, args):
"""Performs ``rekt repo create *`` commands"""
r = repository.Repository(
args_to_repo_type(args),
ignore_extra_kwargs=True,
name=args.get('<repo_name>'),
format=args_to_repo_format(args),
blob_store_name=args.get('--blob'),
depth=int(args.get('--depth')),
remote_url=args.get('<remote_url>'),
strict_content_type_validation=args.get('--strict-content'),
version_policy=args.get('--version'),
write_policy=args.get('--write'),
layout_policy=args.get('--layout'),
)
nexus_client.repositories.create(r)
def cmd_repo(args):
"""Performs ``nexus3 repo *`` commands"""
nexus_client = get_client()
if args.get('list'):
cmd_repo_do_list(nexus_client)
elif args.get('create'):
cmd_repo_create(nexus_client, args)
elif args.get('rm'):
if not args.get('--force'):
_input('Press ENTER to confirm deletion', 'ctrl+c to cancel')
nexus_client.repositories.delete(args.get('<repo_name>'))
else:
raise NotImplementedError
def cmd_list(args):
"""Performs ``nexus3 list``"""
nexus_client = get_client()
repository_path = args['<repository_path>']
artefact_list = nexus_client.list(repository_path)
# FIXME: is types.GeneratorType still used?
if isinstance(artefact_list, (list, types.GeneratorType)):
for artefact in iter(artefact_list):
sys.stdout.write('{}\n'.format(artefact))
return 0
else:
return 1
def _cmd_up_down_errors(count, action):
"""Print and exit with error if upload/download didn't succeed"""
if count == 0:
# FIXME: inflex the action verb to past participle
sys.stderr.write('WARNING: no files were {}\'ed.'.format(action))
sys.exit(1)
if count == -1:
sys.stderr.write('ERROR during {} operation.'.format(action))
sys.exit(2)
def cmd_upload(args):
"""Performs ``nexus3 upload``"""
nexus_client = get_client()
source = args['<from_src>']
destination = args['<to_repository>']
sys.stderr.write(
'Uploading {source} to {destination}\n'.format(**locals()))
upload_count = nexus_client.upload(source, destination)
_cmd_up_down_errors(upload_count, 'upload')
file = PLURAL('file', upload_count)
sys.stderr.write(
'Uploaded {upload_count} {file} to {destination}\n'.format(**locals()))
return 0
def main(argv=None):
arguments = docopt(__doc__, argv=argv)
if arguments.get('login'):
do_login()
NexusClient()
elif arguments.get('script'):
cmd_script(arguments)
elif arguments.get('repo'):
cmd_repo(arguments)
elif arguments.get('list') or arguments.get('ls'):
cmd_list(arguments)
elif arguments.get('upload') or arguments.get('up'):
cmd_upload(arguments)
else:
raise NotImplementedError
|
#! /usr/bin/python3
# Author: Maximilian Muth <mail@maxi-muth.de>
# https://github.com/mammuth/bing-wallpaper
# Version: 1.0
# License: GPL-2.0
# Description: Downloads the Bing picture of the Day and sets it as wallpaper (Linux / Windows).
import datetime
from urllib.request import urlopen, urlretrieve
from xml.dom import minidom
import os
import sys
def join_path(*args):
# Takes an list of values or multiple values and returns an valid path.
if isinstance(args[0], list):
path_list = args[0]
else:
path_list = args
val = [str(v).strip(' ') for v in path_list]
return os.path.normpath('/'.join(val))
dir_path = os.path.dirname(os.path.realpath(__file__))
save_dir = join_path(dir_path, 'images')
if not os.path.exists(save_dir):
os.makedirs(save_dir)
def set_wallpaper(pic_path):
if sys.platform.startswith('win32'):
cmd = 'REG ADD \"HKCU\Control Panel\Desktop\" /v Wallpaper /t REG_SZ /d \"%s\" /f' %pic_path
os.system(cmd)
os.system('rundll32.exe user32.dll, UpdatePerUserSystemParameters')
print('Wallpaper is set.')
elif sys.platform.startswith('linux2'):
os.system(''.join(['gsettings set org.gnome.desktop.background picture-uri file://', pic_path]))
print('Wallpaper is set.')
else:
print('OS not supported.')
return
return
def download_old_wallpapers(minus_days=False):
"""Uses download_wallpaper(set_wallpaper=False) to download the last 20 wallpapers.
If minus_days is given an integer a specific day in the past will be downloaded.
"""
if minus_days:
download_wallpaper(idx=minus_days, use_wallpaper=False)
return
for i in range(0, 20): # max 20
download_wallpaper(idx=i, use_wallpaper=False)
def download_wallpaper(idx=0, use_wallpaper=False):
# Getting the XML File
try:
usock = urlopen(''.join(['https://www.bing.com/HPImageArchive.aspx?format=xml&idx=',
str(idx), '&n=10&mkt=ru-RU'])) # ru-RU, because they always have 1920x1200 resolution
except Exception as e:
print('Error while downloading #', idx, e)
return
try:
xmldoc = minidom.parse(usock)
print(xmldoc)
# This is raised when there is trouble finding the image url.
except Exception as e:
print('Error while processing XML index #', idx, e)
return
# Parsing the XML File
print(len(xmldoc.getElementsByTagName('image')))
for image in xmldoc.getElementsByTagName('image'):
element = image.childNodes[3]
startdate = image.childNodes[0].firstChild.nodeValue
#print(element, fullstartdate)
url = 'https://www.bing.com' + element.firstChild.nodeValue
# Get Current Date as fileName for the downloaded Picture
now = datetime.datetime.now()
date = now - datetime.timedelta(days=int(idx))
#pic_path = join_path(save_dir, ''.join([date.strftime('bing_wp_%d-%m-%Y'), '.jpg']))
pic_path = join_path(save_dir, ''.join([startdate, '_', url.split("/")[-1] ]))
if os.path.isfile(pic_path):
print('Image of', date.strftime('%d-%m-%Y'), 'already downloaded.')
if use_wallpaper:
set_wallpaper(pic_path)
continue
print('Downloading: ', date.strftime('%d-%m-%Y'), 'index #', idx)
# Download and Save the Picture
# Get a higher resolution by replacing the file name
try:
urlretrieve(url.replace('_1366x768', '_1920x1200'), pic_path)
except Exception as e:
print('Error while downloading #', idx, e)
urlretrieve(url, pic_path)
# Set Wallpaper if wanted by user
if use_wallpaper:
set_wallpaper(pic_path)
if __name__ == "__main__":
download_wallpaper()
download_old_wallpapers(minus_days=False)
|
import abc
import tempfile
import os
import typing
from core.data_block import DataBlock
Symbol = typing.Any
class DataStream(abc.ABC):
"""abstract class to represent a Data Stream
The DataStream facilitates the block interface.
From the interface standpoint, the two functions which are useful are:
- get_block(block_size) -> returns a DataBlock of the given block_size from the stream
- write_block(block) -> writes the block of data to the stream
The DataStream can act as a stream object for both writing and reading blocks
The two more useful sub-classes of the abstract class are FileDataStream and ListDataStream.
(see their description for more details)
"""
@abc.abstractmethod
def seek(self, pos: int):
"""seek a particular position in the data stream"""
pass
@abc.abstractmethod
def get_symbol(self):
"""returns a symbol from the data stream, returns None if the stream is finished
This is an abstract method, and hence needs to be implemented by the subclasses
"""
pass
def get_block(self, block_size: int) -> DataBlock:
"""returns a block of data (of the given max size) from the stream
get_block function tries to return a block of size `block_size`.
In case the remaining stream is shorter, a smaller block will be returned
Args:
block_size (int): the (max) size of the block of data to be returned.
Returns:
DataBlock:
"""
# NOTE: we implement get_block as a loop over get_symbol function
# this is not the most optimal way of imeplemting get_block (as reading a block of data at once might be faster)
# TODO: investigate faster ways of directly reading a block
data_list = []
for _ in range(block_size):
# get next symbol
s = self.get_symbol()
if s is None:
break
data_list.append(s)
# if data_list is empty, return None to signal the stream is over
if not data_list:
return None
return DataBlock(data_list)
@abc.abstractmethod
def write_symbol(self, s):
"""writes the given symbol to the stream
The symbol can be appropriately converted to a particular format before writing.
This is an abstract method and so, the subclass will have to implement it
Args:
s (Any): symbol to be written to the stream
"""
pass
def write_block(self, data_block: DataBlock):
"""write the input block to the stream
Args:
data_block (DataBlock): block to be written to the stream
"""
# NOTE: we implement write_block as a loop over write_symbol function
# this is not the most optimal way of imeplemting write_block (as writing a block of data at once might be faster)
# TODO: investigate faster ways of directly writing a block
for s in data_block.data_list:
self.write_symbol(s)
def __enter__(self):
"""function executed while opening the context
See: https://realpython.com/python-with-statement/. More details in FileDataStream.__enter__ docstring
"""
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Function executed which exiting the context
Note that the arguments exc_type, exc_value, exc_traceback are as required by python for a context
"""
pass
class ListDataStream(DataStream):
"""
ListDataStream is a wrapper around a list of symbols.
It is useful to:
- extract data from the list block by block
- write data to the list block by block
In practice, this class might be used mainly for testing
(as usually you would read data from a file.. see FileDataStream for that)
"""
def __init__(self, input_list: typing.List):
"""initialize with input_list and reset the stream
Args:
input_list (List): the list of symbols, around which the class is a wrapper
Usage:
with ListDataStream(input_list) as ds:
block = ds.get_block(block_size=5)
# do something with the block
"""
# assert whether the input_list is indeed a list
assert isinstance(input_list, list)
self.input_list = input_list
# set the position counter
self.current_ind = 0
def seek(self, pos: int):
"""set the current_ind to a particular pos"""
assert pos <= len(self.input_list)
self.current_ind = pos
def get_symbol(self) -> Symbol:
"""returns the next symbol from the self.input_list"""
# retrieve the next symbol
if self.current_ind >= len(self.input_list):
return None
s = self.input_list[self.current_ind]
# increment the current_ind counter
self.current_ind += 1
return s
def write_symbol(self, s: Symbol):
"""write a symbol to the stream"""
assert self.current_ind <= len(self.input_list)
# the case where we modify a symbol
if self.current_ind < len(self.input_list):
self.input_list[self.current_ind] = s
else:
# case where we append a symbol
self.input_list.append(s)
class FileDataStream(DataStream):
"""Abstract class to create a data stream from a File
The FileDataStream defines __exit__, __enter__ methods on top of DataStream.
These methods handle file obj opening/closing
Subclasses (eg: TextDataStream) need to imeplement methods get_symbol, write_symbol
to get a functional object.
"""
def __init__(self, file_path: str, permissions="r"):
"""Initialize the FileDataStream object
Args:
file_path (str): path of the file to read from/write to
permissions (str, optional): Permissions to open the file obj. Use "r" to read, "w" to write to
(other pyhton file obj permissions also can be used). Defaults to "r".
"""
self.file_path = file_path
self.permissions = permissions
def __enter__(self):
"""open the file object context based on the permissions specified
NOTE: One way of cleanly managing resources in python is using the with statement
as shown in the example below. This ensures the resource is released when exiting the context.
One way to support allow using with statement is defining __enter__ and __exit__ statements,
which allow for executing functions while entering or exiting the context.
Reference: https://realpython.com/python-with-statement/
Example:
with TextFileDataStream(path, "w") as fds:
# get a text block
block = fds.get_block(5)
"""
self.file_obj = open(self.file_path, self.permissions)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""close the file object at the end of context
please take a look __enter__ docstring for more info.
Reference: https://realpython.com/python-with-statement/
"""
self.file_obj.close()
def seek(self, pos: int):
"""resets the file object to the beginning"""
self.file_obj.seek(pos)
class TextFileDataStream(FileDataStream):
"""FileDataStream to read/write text data"""
def get_symbol(self):
"""get the next character from the text file
as we read character data from file by default, the get_symbol function does not need to do anything special
conversions
Returns:
(str, None): the next character, None if we reached the end of stream
"""
s = self.file_obj.read(1)
if not s:
return None
return s
def write_symbol(self, s):
"""write a character to the text file"""
self.file_obj.write(s)
class Uint8FileDataStream(FileDataStream):
"""reads Uint8 numbers written to a file
FIXME: need to immplement
"""
pass
#################################
def test_list_data_stream():
"""simple testing function to check if list data stream is getting generated correctly"""
input_list = list(range(10))
with ListDataStream(input_list) as ds:
for i in range(3):
block = ds.get_block(block_size=3)
assert block.size == 3
block = ds.get_block(block_size=2)
assert block.size == 1
block = ds.get_block(block_size=2)
assert block is None
# try seeking and reading
ds.seek(7)
block = ds.get_block(block_size=5)
assert block.size == 3
assert block.data_list[0] == 7
# try seeking and writing
ds.seek(5)
ds.write_symbol(-1)
block = ds.get_block(block_size=5)
assert block.size == 5
assert block.data_list[0] == -1
def test_file_data_stream():
"""function to test file data stream"""
# create a temporary file
with tempfile.TemporaryDirectory() as tmpdirname:
temp_file_path = os.path.join(tmpdirname, "tmp_file.txt")
# write data to the file
data_gt = DataBlock(list("This-is_a_test_file"))
with TextFileDataStream(temp_file_path, "w") as fds:
fds.write_block(data_gt)
# try seeking to correct symbol at pos 4
fds.seek(4)
fds.write_symbol("_")
# read data from the file
with TextFileDataStream(temp_file_path, "r") as fds:
block = fds.get_block(block_size=4)
assert block.size == 4
# try seeking and reading
fds.seek(4)
block = fds.get_block(block_size=4)
assert block.data_list[0] == "_"
|
def reverse_string(a_string: str):
"""Take the input a_string and return it reversed (e.g. "hello" becomes
"olleh"."""
reversed_string = ""
for i in range(len(a_string)):
reversed_string += a_string[~i]
return reversed_string
|
#
# This file contains the Python code from Program 16.10 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by Bruno R. Preiss.
#
# Copyright (c) 2003 by Bruno R. Preiss, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm16_10.txt
#
class Graph(Container):
def breadthFirstTraversal(self, visitor, start):
assert isinstance(visitor, Visitor)
enqueued = Array(self._numberOfVertices)
for v in xrange(self._numberOfVertices):
enqueued[v] = False
queue = QueueAsLinkedList()
queue.enqueue(self[start])
enqueued[start] = True
while not queue.isEmpty and not visitor.isDone:
v = queue.dequeue()
visitor.visit(v)
for to in v.successors:
if not enqueued[to.number]:
queue.enqueue(to)
enqueued[to.number] = True
# ...
|
from os import path, system, mkdir
from shutil import rmtree
from jinja2 import Template
from . import config
from .log import log
print(__file__)
def get_template(name):
template_path = path.join(path.dirname(__file__), 'templates/', name + ".jinja2")
with open(template_path) as file_:
template = Template(file_.read())
return template
def reload_nginx():
log.debug("Reloading nginx...")
system('service nginx reload')
log.debug("Reload complete.")
def install_config(fname, data):
outpath = path.join(config.SITES_ENABLED_DIR, fname)
log.debug(f"saving config to {outpath}")
with open(outpath, "w") as outfile:
outfile.write(data)
def install_ssl(domain):
log.info(f"Install TLS Certification for {domain}")
system(f"certbot --nginx -d {domain}")
def init_root(root):
log.debug(f"Creating Document root: {root}")
ok = mkdir_confirm(root)
def mkdir_confirm(root):
# p = path.join(config.WEB_ROOT, domain)
if not path.exists(root):
mkdir(root)
return True
else:
confirm = input(f"{root} already exists. Overwite? (y/n): ")
if config == "y":
rmtree(root)
mkdir(root)
return True
else:
log.warn(f"Aborting.")
return False
|
# prefix where servers are kept
prefix = '/var/lib/mcp/servers'
# whether or not to allow server creation
creation = True
# whether to put servers and scripts in a container
container = False
# directory where the sources are kept; ignored if creation is disabled
sources = '/var/lib/mcp/sources'
# temprorary directory to build under; ignored if creation is disabled
tmp = '/tmp/mcp'
# directory where default configuration is kept; ignored if creation is disabled
config = '/var/lib/mcp/config'
# directory where the scripting libraries are kept; None to disable scripting libraries
scripting = '/var/lib/mcp/scripting'
# directory where the databases are kept
database = '/var/db/mcp'
# max size of server log files in kB before they are rotated; None to disable server log rotation
maxlogsize = 100
# range to automatically choose server ports
portrange = (4534, 4634)
# path to manager log; None to disable logging
log = '/var/log/mcp/manager.log'
# path to command output log; None to disable logging
cmdlog = '/var/log/mcp/command.log'
# path to HTTP log; None to disable logging
httpdlog = '/var/log/mcp/httpd.log'
# path to HTTP access log; None to disable logging
accesslog = '/var/log/mcp/access.log'
# template directory to use
import os.path
template = os.path.join(os.path.dirname(__file__), 'page', 'html')
resource = os.path.join(os.path.dirname(__file__), 'page', 'res')
# address and port of the web interface
addr = ('', 8000)
# address and port of the sftp server
sftpaddr = ('', 2222)
# sftp host key
sftpkey = '/var/lib/mcp/sftp/ssh_host_rsa_key'
# path to TLS/SSL key and certificate files; None to disable TLS encryption
tlskey = None
tlscert = None
# user to drop privileges to if run as root
user = 'mcp'
# how long to wait between server polls
poll_interval = 0.5
|
#!/usr/bin/env python
#
# Electrum - lightweight UraniumX client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import copy
import datetime
import traceback
import time
from typing import TYPE_CHECKING, Callable, Optional, List, Union
from functools import partial
from decimal import Decimal
from PyQt5.QtCore import QSize, Qt
from PyQt5.QtGui import QTextCharFormat, QBrush, QFont, QPixmap
from PyQt5.QtWidgets import (QDialog, QLabel, QPushButton, QHBoxLayout, QVBoxLayout, QWidget, QGridLayout,
QTextEdit, QFrame, QAction, QToolButton, QMenu, QCheckBox)
import qrcode
from qrcode import exceptions
from electrum.simple_config import SimpleConfig
from electrum.util import quantize_feerate
from electrum.bitcoin import base_encode, NLOCKTIME_BLOCKHEIGHT_MAX
from electrum.i18n import _
from electrum.plugin import run_hook
from electrum import simple_config
from electrum.transaction import SerializationError, Transaction, PartialTransaction, PartialTxInput
from electrum.logging import get_logger
from .util import (MessageBoxMixin, read_QIcon, Buttons, icon_path,
MONOSPACE_FONT, ColorScheme, ButtonsLineEdit, text_dialog,
char_width_in_lineedit, TRANSACTION_FILE_EXTENSION_FILTER_SEPARATE,
TRANSACTION_FILE_EXTENSION_FILTER_ONLY_COMPLETE_TX,
TRANSACTION_FILE_EXTENSION_FILTER_ONLY_PARTIAL_TX,
BlockingWaitingDialog, getSaveFileName, ColorSchemeItem)
from .fee_slider import FeeSlider, FeeComboBox
from .confirm_tx_dialog import TxEditor
from .amountedit import FeerateEdit, BTCAmountEdit
from .locktimeedit import LockTimeEdit
if TYPE_CHECKING:
from .main_window import ElectrumWindow
class TxSizeLabel(QLabel):
def setAmount(self, byte_size):
self.setText(('x %s bytes =' % byte_size) if byte_size else '')
class TxFiatLabel(QLabel):
def setAmount(self, fiat_fee):
self.setText(('≈ %s' % fiat_fee) if fiat_fee else '')
class QTextEditWithDefaultSize(QTextEdit):
def sizeHint(self):
return QSize(0, 100)
_logger = get_logger(__name__)
dialogs = [] # Otherwise python randomly garbage collects the dialogs...
def show_transaction(tx: Transaction, *, parent: 'ElectrumWindow', desc=None, prompt_if_unsaved=False):
try:
d = TxDialog(tx, parent=parent, desc=desc, prompt_if_unsaved=prompt_if_unsaved)
except SerializationError as e:
_logger.exception('unable to deserialize the transaction')
parent.show_critical(_("Electrum was unable to deserialize the transaction:") + "\n" + str(e))
else:
d.show()
class BaseTxDialog(QDialog, MessageBoxMixin):
def __init__(self, *, parent: 'ElectrumWindow', desc, prompt_if_unsaved, finalized: bool, external_keypairs=None):
'''Transactions in the wallet will show their description.
Pass desc to give a description for txs not yet in the wallet.
'''
# We want to be a top-level window
QDialog.__init__(self, parent=None)
self.tx = None # type: Optional[Transaction]
self.external_keypairs = external_keypairs
self.finalized = finalized
self.main_window = parent
self.config = parent.config
self.wallet = parent.wallet
self.prompt_if_unsaved = prompt_if_unsaved
self.saved = False
self.desc = desc
self.setMinimumWidth(640)
self.resize(1200,600)
self.set_title()
self.psbt_only_widgets = [] # type: List[QWidget]
vbox = QVBoxLayout()
self.setLayout(vbox)
vbox.addWidget(QLabel(_("Transaction ID:")))
self.tx_hash_e = ButtonsLineEdit()
qr_show = lambda: parent.show_qrcode(str(self.tx_hash_e.text()), 'Transaction ID', parent=self)
qr_icon = "qrcode_white.png" if ColorScheme.dark_scheme else "qrcode.png"
self.tx_hash_e.addButton(qr_icon, qr_show, _("Show as QR code"))
self.tx_hash_e.setReadOnly(True)
vbox.addWidget(self.tx_hash_e)
self.add_tx_stats(vbox)
vbox.addSpacing(10)
self.inputs_header = QLabel()
vbox.addWidget(self.inputs_header)
self.inputs_textedit = QTextEditWithDefaultSize()
vbox.addWidget(self.inputs_textedit)
self.txo_color_recv = TxOutputColoring(
legend=_("Receiving Address"), color=ColorScheme.GREEN, tooltip=_("Wallet receive address"))
self.txo_color_change = TxOutputColoring(
legend=_("Change Address"), color=ColorScheme.YELLOW, tooltip=_("Wallet change address"))
self.txo_color_2fa = TxOutputColoring(
legend=_("TrustedCoin (2FA) batch fee"), color=ColorScheme.BLUE, tooltip=_("TrustedCoin (2FA) fee for the next batch of transactions"))
outheader_hbox = QHBoxLayout()
outheader_hbox.setContentsMargins(0, 0, 0, 0)
vbox.addLayout(outheader_hbox)
self.outputs_header = QLabel()
outheader_hbox.addWidget(self.outputs_header)
outheader_hbox.addStretch(2)
outheader_hbox.addWidget(self.txo_color_recv.legend_label)
outheader_hbox.addWidget(self.txo_color_change.legend_label)
outheader_hbox.addWidget(self.txo_color_2fa.legend_label)
self.outputs_textedit = QTextEditWithDefaultSize()
vbox.addWidget(self.outputs_textedit)
self.sign_button = b = QPushButton(_("Sign"))
b.clicked.connect(self.sign)
self.broadcast_button = b = QPushButton(_("Broadcast"))
b.clicked.connect(self.do_broadcast)
self.save_button = b = QPushButton(_("Save"))
b.clicked.connect(self.save)
self.cancel_button = b = QPushButton(_("Close"))
b.clicked.connect(self.close)
b.setDefault(True)
self.export_actions_menu = export_actions_menu = QMenu()
self.add_export_actions_to_menu(export_actions_menu)
export_actions_menu.addSeparator()
export_submenu = export_actions_menu.addMenu(_("For CoinJoin; strip privates"))
self.add_export_actions_to_menu(export_submenu, gettx=self._gettx_for_coinjoin)
self.psbt_only_widgets.append(export_submenu)
export_submenu = export_actions_menu.addMenu(_("For hardware device; include xpubs"))
self.add_export_actions_to_menu(export_submenu, gettx=self._gettx_for_hardware_device)
self.psbt_only_widgets.append(export_submenu)
self.export_actions_button = QToolButton()
self.export_actions_button.setText(_("Export"))
self.export_actions_button.setMenu(export_actions_menu)
self.export_actions_button.setPopupMode(QToolButton.InstantPopup)
self.finalize_button = QPushButton(_('Finalize'))
self.finalize_button.clicked.connect(self.on_finalize)
partial_tx_actions_menu = QMenu()
ptx_merge_sigs_action = QAction(_("Merge signatures from"), self)
ptx_merge_sigs_action.triggered.connect(self.merge_sigs)
partial_tx_actions_menu.addAction(ptx_merge_sigs_action)
self._ptx_join_txs_action = QAction(_("Join inputs/outputs"), self)
self._ptx_join_txs_action.triggered.connect(self.join_tx_with_another)
partial_tx_actions_menu.addAction(self._ptx_join_txs_action)
self.partial_tx_actions_button = QToolButton()
self.partial_tx_actions_button.setText(_("Combine"))
self.partial_tx_actions_button.setMenu(partial_tx_actions_menu)
self.partial_tx_actions_button.setPopupMode(QToolButton.InstantPopup)
self.psbt_only_widgets.append(self.partial_tx_actions_button)
# Action buttons
self.buttons = [self.partial_tx_actions_button, self.sign_button, self.broadcast_button, self.cancel_button]
# Transaction sharing buttons
self.sharing_buttons = [self.finalize_button, self.export_actions_button, self.save_button]
run_hook('transaction_dialog', self)
if not self.finalized:
self.create_fee_controls()
vbox.addWidget(self.feecontrol_fields)
self.hbox = hbox = QHBoxLayout()
hbox.addLayout(Buttons(*self.sharing_buttons))
hbox.addStretch(1)
hbox.addLayout(Buttons(*self.buttons))
vbox.addLayout(hbox)
self.set_buttons_visibility()
dialogs.append(self)
def set_buttons_visibility(self):
for b in [self.export_actions_button, self.save_button, self.sign_button, self.broadcast_button, self.partial_tx_actions_button]:
b.setVisible(self.finalized)
for b in [self.finalize_button]:
b.setVisible(not self.finalized)
def set_tx(self, tx: 'Transaction'):
# Take a copy; it might get updated in the main window by
# e.g. the FX plugin. If this happens during or after a long
# sign operation the signatures are lost.
self.tx = tx = copy.deepcopy(tx)
try:
self.tx.deserialize()
except BaseException as e:
raise SerializationError(e)
# If the wallet can populate the inputs with more info, do it now.
# As a result, e.g. we might learn an imported address tx is segwit,
# or that a beyond-gap-limit address is is_mine.
# note: this might fetch prev txs over the network.
BlockingWaitingDialog(
self,
_("Adding info to tx, from wallet and network..."),
lambda: tx.add_info_from_wallet(self.wallet),
)
def do_broadcast(self):
self.main_window.push_top_level_window(self)
self.main_window.save_pending_invoice()
try:
self.main_window.broadcast_transaction(self.tx)
finally:
self.main_window.pop_top_level_window(self)
self.saved = True
self.update()
def closeEvent(self, event):
if (self.prompt_if_unsaved and not self.saved
and not self.question(_('This transaction is not saved. Close anyway?'), title=_("Warning"))):
event.ignore()
else:
event.accept()
try:
dialogs.remove(self)
except ValueError:
pass # was not in list already
def reject(self):
# Override escape-key to close normally (and invoke closeEvent)
self.close()
def add_export_actions_to_menu(self, menu: QMenu, *, gettx: Callable[[], Transaction] = None) -> None:
if gettx is None:
gettx = lambda: None
action = QAction(_("Copy to clipboard"), self)
action.triggered.connect(lambda: self.copy_to_clipboard(tx=gettx()))
menu.addAction(action)
qr_icon = "qrcode_white.png" if ColorScheme.dark_scheme else "qrcode.png"
action = QAction(read_QIcon(qr_icon), _("Show as QR code"), self)
action.triggered.connect(lambda: self.show_qr(tx=gettx()))
menu.addAction(action)
action = QAction(_("Export to file"), self)
action.triggered.connect(lambda: self.export_to_file(tx=gettx()))
menu.addAction(action)
def _gettx_for_coinjoin(self) -> PartialTransaction:
if not isinstance(self.tx, PartialTransaction):
raise Exception("Can only export partial transactions for coinjoins.")
tx = copy.deepcopy(self.tx)
tx.prepare_for_export_for_coinjoin()
return tx
def _gettx_for_hardware_device(self) -> PartialTransaction:
if not isinstance(self.tx, PartialTransaction):
raise Exception("Can only export partial transactions for hardware device.")
tx = copy.deepcopy(self.tx)
tx.add_info_from_wallet(self.wallet, include_xpubs=True)
# log warning if PSBT_*_BIP32_DERIVATION fields cannot be filled with full path due to missing info
from electrum.keystore import Xpub
def is_ks_missing_info(ks):
return (isinstance(ks, Xpub) and (ks.get_root_fingerprint() is None
or ks.get_derivation_prefix() is None))
if any([is_ks_missing_info(ks) for ks in self.wallet.get_keystores()]):
_logger.warning('PSBT was requested to be filled with full bip32 paths but '
'some keystores lacked either the derivation prefix or the root fingerprint')
return tx
def copy_to_clipboard(self, *, tx: Transaction = None):
if tx is None:
tx = self.tx
self.main_window.do_copy(str(tx), title=_("Transaction"))
def show_qr(self, *, tx: Transaction = None):
if tx is None:
tx = self.tx
qr_data = tx.to_qr_data()
try:
self.main_window.show_qrcode(qr_data, 'Transaction', parent=self)
except qrcode.exceptions.DataOverflowError:
self.show_error(_('Failed to display QR code.') + '\n' +
_('Transaction is too large in size.'))
except Exception as e:
self.show_error(_('Failed to display QR code.') + '\n' + repr(e))
def sign(self):
def sign_done(success):
if self.tx.is_complete():
self.prompt_if_unsaved = True
self.saved = False
self.update()
self.main_window.pop_top_level_window(self)
self.sign_button.setDisabled(True)
self.main_window.push_top_level_window(self)
self.main_window.sign_tx(self.tx, callback=sign_done, external_keypairs=self.external_keypairs)
def save(self):
self.main_window.push_top_level_window(self)
if self.main_window.save_transaction_into_wallet(self.tx):
self.save_button.setDisabled(True)
self.saved = True
self.main_window.pop_top_level_window(self)
def export_to_file(self, *, tx: Transaction = None):
if tx is None:
tx = self.tx
if isinstance(tx, PartialTransaction):
tx.finalize_psbt()
txid = tx.txid()
suffix = txid[0:8] if txid is not None else time.strftime('%Y%m%d-%H%M')
if tx.is_complete():
extension = 'txn'
default_filter = TRANSACTION_FILE_EXTENSION_FILTER_ONLY_COMPLETE_TX
else:
extension = 'psbt'
default_filter = TRANSACTION_FILE_EXTENSION_FILTER_ONLY_PARTIAL_TX
name = f'{self.wallet.basename()}-{suffix}.{extension}'
fileName = getSaveFileName(
parent=self,
title=_("Select where to save your transaction"),
filename=name,
filter=TRANSACTION_FILE_EXTENSION_FILTER_SEPARATE,
default_extension=extension,
default_filter=default_filter,
config=self.config,
)
if not fileName:
return
if tx.is_complete(): # network tx hex
with open(fileName, "w+") as f:
network_tx_hex = tx.serialize_to_network()
f.write(network_tx_hex + '\n')
else: # if partial: PSBT bytes
assert isinstance(tx, PartialTransaction)
with open(fileName, "wb+") as f:
f.write(tx.serialize_as_bytes())
self.show_message(_("Transaction exported successfully"))
self.saved = True
def merge_sigs(self):
if not isinstance(self.tx, PartialTransaction):
return
text = text_dialog(
parent=self,
title=_('Input raw transaction'),
header_layout=_("Transaction to merge signatures from") + ":",
ok_label=_("Load transaction"),
config=self.config,
)
if not text:
return
tx = self.main_window.tx_from_text(text)
if not tx:
return
try:
self.tx.combine_with_other_psbt(tx)
except Exception as e:
self.show_error(_("Error combining partial transactions") + ":\n" + repr(e))
return
self.update()
def join_tx_with_another(self):
if not isinstance(self.tx, PartialTransaction):
return
text = text_dialog(
parent=self,
title=_('Input raw transaction'),
header_layout=_("Transaction to join with") + " (" + _("add inputs and outputs") + "):",
ok_label=_("Load transaction"),
config=self.config,
)
if not text:
return
tx = self.main_window.tx_from_text(text)
if not tx:
return
try:
self.tx.join_with_other_psbt(tx)
except Exception as e:
self.show_error(_("Error joining partial transactions") + ":\n" + repr(e))
return
self.update()
def update(self):
if not self.finalized:
self.update_fee_fields()
self.finalize_button.setEnabled(self.can_finalize())
if self.tx is None:
return
self.update_io()
desc = self.desc
base_unit = self.main_window.base_unit()
format_amount = self.main_window.format_amount
format_fiat_and_units = self.main_window.format_fiat_and_units
tx_details = self.wallet.get_tx_info(self.tx)
tx_mined_status = tx_details.tx_mined_status
exp_n = tx_details.mempool_depth_bytes
amount, fee = tx_details.amount, tx_details.fee
size = self.tx.estimated_size()
txid = self.tx.txid()
fx = self.main_window.fx
tx_item_fiat = None
if (self.finalized # ensures we don't use historical rates for tx being constructed *now*
and txid is not None and fx.is_enabled() and amount is not None):
tx_item_fiat = self.wallet.get_tx_item_fiat(
tx_hash=txid, amount_sat=abs(amount), fx=fx, tx_fee=fee)
lnworker_history = self.wallet.lnworker.get_onchain_history() if self.wallet.lnworker else {}
if txid in lnworker_history:
item = lnworker_history[txid]
ln_amount = item['amount_msat'] / 1000
if amount is None:
tx_mined_status = self.wallet.lnworker.lnwatcher.get_tx_height(txid)
else:
ln_amount = None
self.broadcast_button.setEnabled(tx_details.can_broadcast)
can_sign = not self.tx.is_complete() and \
(self.wallet.can_sign(self.tx) or bool(self.external_keypairs))
self.sign_button.setEnabled(can_sign)
if self.finalized and tx_details.txid:
self.tx_hash_e.setText(tx_details.txid)
else:
# note: when not finalized, RBF and locktime changes do not trigger
# a make_tx, so the txid is unreliable, hence:
self.tx_hash_e.setText(_('Unknown'))
if not desc:
self.tx_desc.hide()
else:
self.tx_desc.setText(_("Description") + ': ' + desc)
self.tx_desc.show()
self.status_label.setText(_('Status:') + ' ' + tx_details.status)
if tx_mined_status.timestamp:
time_str = datetime.datetime.fromtimestamp(tx_mined_status.timestamp).isoformat(' ')[:-3]
self.date_label.setText(_("Date: {}").format(time_str))
self.date_label.show()
elif exp_n is not None:
text = '%.2f MB'%(exp_n/1000000)
self.date_label.setText(_('Position in mempool: {} from tip').format(text))
self.date_label.show()
else:
self.date_label.hide()
if self.tx.locktime <= NLOCKTIME_BLOCKHEIGHT_MAX:
locktime_final_str = f"LockTime: {self.tx.locktime} (height)"
else:
locktime_final_str = f"LockTime: {self.tx.locktime} ({datetime.datetime.fromtimestamp(self.tx.locktime)})"
self.locktime_final_label.setText(locktime_final_str)
if self.locktime_e.get_locktime() is None:
self.locktime_e.set_locktime(self.tx.locktime)
self.rbf_label.setText(_('Replace by fee') + f": {not self.tx.is_final()}")
if tx_mined_status.header_hash:
self.block_hash_label.setText(_("Included in block: {}")
.format(tx_mined_status.header_hash))
self.block_height_label.setText(_("At block height: {}")
.format(tx_mined_status.height))
else:
self.block_hash_label.hide()
self.block_height_label.hide()
if amount is None and ln_amount is None:
amount_str = _("Transaction unrelated to your wallet")
elif amount is None:
amount_str = ''
else:
if amount > 0:
amount_str = _("Amount received:") + ' %s'% format_amount(amount) + ' ' + base_unit
else:
amount_str = _("Amount sent:") + ' %s' % format_amount(-amount) + ' ' + base_unit
if fx.is_enabled():
if tx_item_fiat:
amount_str += ' (%s)' % tx_item_fiat['fiat_value'].to_ui_string()
else:
amount_str += ' (%s)' % format_fiat_and_units(abs(amount))
if amount_str:
self.amount_label.setText(amount_str)
else:
self.amount_label.hide()
size_str = _("Size:") + ' %d bytes'% size
if fee is None:
fee_str = _("Fee") + ': ' + _("unknown")
else:
fee_str = _("Fee") + f': {format_amount(fee)} {base_unit}'
if fx.is_enabled():
if tx_item_fiat:
fiat_fee_str = tx_item_fiat['fiat_fee'].to_ui_string()
else:
fiat_fee_str = format_fiat_and_units(fee)
fee_str += f' ({fiat_fee_str})'
if fee is not None:
fee_rate = Decimal(fee) / size # sat/byte
fee_str += ' ( %s ) ' % self.main_window.format_fee_rate(fee_rate * 1000)
if isinstance(self.tx, PartialTransaction):
if isinstance(self, PreviewTxDialog):
invoice_amt = self.tx.output_value() if self.output_value == '!' else self.output_value
else:
invoice_amt = amount
fee_warning_tuple = self.wallet.get_tx_fee_warning(
invoice_amt=invoice_amt, tx_size=size, fee=fee)
if fee_warning_tuple:
allow_send, long_warning, short_warning = fee_warning_tuple
fee_str += " - <font color={color}>{header}: {body}</font>".format(
header=_('Warning'),
body=short_warning,
color=ColorScheme.RED.as_color().name(),
)
if isinstance(self.tx, PartialTransaction):
risk_of_burning_coins = (can_sign and fee is not None
and self.wallet.get_warning_for_risk_of_burning_coins_as_fees(self.tx))
self.fee_warning_icon.setToolTip(str(risk_of_burning_coins))
self.fee_warning_icon.setVisible(bool(risk_of_burning_coins))
self.fee_label.setText(fee_str)
self.size_label.setText(size_str)
if ln_amount is None or ln_amount == 0:
ln_amount_str = ''
elif ln_amount > 0:
ln_amount_str = _('Amount received in channels') + ': ' + format_amount(ln_amount) + ' ' + base_unit
else:
assert ln_amount < 0, f"{ln_amount!r}"
ln_amount_str = _('Amount withdrawn from channels') + ': ' + format_amount(-ln_amount) + ' ' + base_unit
if ln_amount_str:
self.ln_amount_label.setText(ln_amount_str)
else:
self.ln_amount_label.hide()
show_psbt_only_widgets = self.finalized and isinstance(self.tx, PartialTransaction)
for widget in self.psbt_only_widgets:
if isinstance(widget, QMenu):
widget.menuAction().setVisible(show_psbt_only_widgets)
else:
widget.setVisible(show_psbt_only_widgets)
if tx_details.is_lightning_funding_tx:
self._ptx_join_txs_action.setEnabled(False) # would change txid
self.save_button.setEnabled(tx_details.can_save_as_local)
if tx_details.can_save_as_local:
self.save_button.setToolTip(_("Save transaction offline"))
else:
self.save_button.setToolTip(_("Transaction already saved or not yet signed."))
run_hook('transaction_dialog_update', self)
def update_io(self):
inputs_header_text = _("Inputs") + ' (%d)'%len(self.tx.inputs())
if not self.finalized:
selected_coins = self.main_window.get_manually_selected_coins()
if selected_coins is not None:
inputs_header_text += f" - " + _("Coin selection active ({} UTXOs selected)").format(len(selected_coins))
self.inputs_header.setText(inputs_header_text)
ext = QTextCharFormat()
tf_used_recv, tf_used_change, tf_used_2fa = False, False, False
def text_format(addr):
nonlocal tf_used_recv, tf_used_change, tf_used_2fa
if self.wallet.is_mine(addr):
if self.wallet.is_change(addr):
tf_used_change = True
return self.txo_color_change.text_char_format
else:
tf_used_recv = True
return self.txo_color_recv.text_char_format
elif self.wallet.is_billing_address(addr):
tf_used_2fa = True
return self.txo_color_2fa.text_char_format
return ext
def format_amount(amt):
return self.main_window.format_amount(amt, whitespaces=True)
i_text = self.inputs_textedit
i_text.clear()
i_text.setFont(QFont(MONOSPACE_FONT))
i_text.setReadOnly(True)
cursor = i_text.textCursor()
for txin in self.tx.inputs():
if txin.is_coinbase_input():
cursor.insertText('coinbase')
else:
prevout_hash = txin.prevout.txid.hex()
prevout_n = txin.prevout.out_idx
cursor.insertText(prevout_hash + ":%-4d " % prevout_n, ext)
addr = self.wallet.get_txin_address(txin)
if addr is None:
addr = ''
cursor.insertText(addr, text_format(addr))
txin_value = self.wallet.get_txin_value(txin)
if txin_value is not None:
cursor.insertText(format_amount(txin_value), ext)
cursor.insertBlock()
self.outputs_header.setText(_("Outputs") + ' (%d)'%len(self.tx.outputs()))
o_text = self.outputs_textedit
o_text.clear()
o_text.setFont(QFont(MONOSPACE_FONT))
o_text.setReadOnly(True)
cursor = o_text.textCursor()
for o in self.tx.outputs():
addr, v = o.get_ui_address_str(), o.value
cursor.insertText(addr, text_format(addr))
if v is not None:
cursor.insertText('\t', ext)
cursor.insertText(format_amount(v), ext)
cursor.insertBlock()
self.txo_color_recv.legend_label.setVisible(tf_used_recv)
self.txo_color_change.legend_label.setVisible(tf_used_change)
self.txo_color_2fa.legend_label.setVisible(tf_used_2fa)
def add_tx_stats(self, vbox):
hbox_stats = QHBoxLayout()
# left column
vbox_left = QVBoxLayout()
self.tx_desc = TxDetailLabel(word_wrap=True)
vbox_left.addWidget(self.tx_desc)
self.status_label = TxDetailLabel()
vbox_left.addWidget(self.status_label)
self.date_label = TxDetailLabel()
vbox_left.addWidget(self.date_label)
self.amount_label = TxDetailLabel()
vbox_left.addWidget(self.amount_label)
self.ln_amount_label = TxDetailLabel()
vbox_left.addWidget(self.ln_amount_label)
fee_hbox = QHBoxLayout()
self.fee_label = TxDetailLabel()
fee_hbox.addWidget(self.fee_label)
self.fee_warning_icon = QLabel()
pixmap = QPixmap(icon_path("warning"))
pixmap_size = round(2 * char_width_in_lineedit())
pixmap = pixmap.scaled(pixmap_size, pixmap_size, Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.fee_warning_icon.setPixmap(pixmap)
self.fee_warning_icon.setVisible(False)
fee_hbox.addWidget(self.fee_warning_icon)
fee_hbox.addStretch(1)
vbox_left.addLayout(fee_hbox)
vbox_left.addStretch(1)
hbox_stats.addLayout(vbox_left, 50)
# vertical line separator
line_separator = QFrame()
line_separator.setFrameShape(QFrame.VLine)
line_separator.setFrameShadow(QFrame.Sunken)
line_separator.setLineWidth(1)
hbox_stats.addWidget(line_separator)
# right column
vbox_right = QVBoxLayout()
self.size_label = TxDetailLabel()
vbox_right.addWidget(self.size_label)
self.rbf_label = TxDetailLabel()
vbox_right.addWidget(self.rbf_label)
self.rbf_cb = QCheckBox(_('Replace by fee'))
self.rbf_cb.setChecked(bool(self.config.get('use_rbf', True)))
vbox_right.addWidget(self.rbf_cb)
self.locktime_final_label = TxDetailLabel()
vbox_right.addWidget(self.locktime_final_label)
locktime_setter_hbox = QHBoxLayout()
locktime_setter_hbox.setContentsMargins(0, 0, 0, 0)
locktime_setter_hbox.setSpacing(0)
locktime_setter_label = TxDetailLabel()
locktime_setter_label.setText("LockTime: ")
self.locktime_e = LockTimeEdit(self)
locktime_setter_hbox.addWidget(locktime_setter_label)
locktime_setter_hbox.addWidget(self.locktime_e)
locktime_setter_hbox.addStretch(1)
self.locktime_setter_widget = QWidget()
self.locktime_setter_widget.setLayout(locktime_setter_hbox)
vbox_right.addWidget(self.locktime_setter_widget)
self.block_height_label = TxDetailLabel()
vbox_right.addWidget(self.block_height_label)
vbox_right.addStretch(1)
hbox_stats.addLayout(vbox_right, 50)
vbox.addLayout(hbox_stats)
# below columns
self.block_hash_label = TxDetailLabel(word_wrap=True)
vbox.addWidget(self.block_hash_label)
# set visibility after parenting can be determined by Qt
self.rbf_label.setVisible(self.finalized)
self.rbf_cb.setVisible(not self.finalized)
self.locktime_final_label.setVisible(self.finalized)
self.locktime_setter_widget.setVisible(not self.finalized)
def set_title(self):
self.setWindowTitle(_("Create transaction") if not self.finalized else _("Transaction"))
def can_finalize(self) -> bool:
return False
def on_finalize(self):
pass # overridden in subclass
def update_fee_fields(self):
pass # overridden in subclass
class TxDetailLabel(QLabel):
def __init__(self, *, word_wrap=None):
super().__init__()
self.setTextInteractionFlags(Qt.TextSelectableByMouse)
if word_wrap is not None:
self.setWordWrap(word_wrap)
class TxOutputColoring:
# used for both inputs and outputs
def __init__(
self,
*,
legend: str,
color: ColorSchemeItem,
tooltip: str,
):
self.color = color.as_color(background=True)
self.legend_label = QLabel("<font color={color}>{box_char}</font> = {label}".format(
color=self.color.name(),
box_char="█",
label=legend,
))
font = self.legend_label.font()
font.setPointSize(font.pointSize() - 1)
self.legend_label.setFont(font)
self.legend_label.setVisible(False)
self.text_char_format = QTextCharFormat()
self.text_char_format.setBackground(QBrush(self.color))
self.text_char_format.setToolTip(tooltip)
class TxDialog(BaseTxDialog):
def __init__(self, tx: Transaction, *, parent: 'ElectrumWindow', desc, prompt_if_unsaved):
BaseTxDialog.__init__(self, parent=parent, desc=desc, prompt_if_unsaved=prompt_if_unsaved, finalized=True)
self.set_tx(tx)
self.update()
class PreviewTxDialog(BaseTxDialog, TxEditor):
def __init__(
self,
*,
make_tx,
external_keypairs,
window: 'ElectrumWindow',
output_value: Union[int, str],
):
TxEditor.__init__(
self,
window=window,
make_tx=make_tx,
is_sweep=bool(external_keypairs),
output_value=output_value,
)
BaseTxDialog.__init__(self, parent=window, desc='', prompt_if_unsaved=False,
finalized=False, external_keypairs=external_keypairs)
BlockingWaitingDialog(window, _("Preparing transaction..."),
lambda: self.update_tx(fallback_to_zero_fee=True))
self.update()
def create_fee_controls(self):
self.size_e = TxSizeLabel()
self.size_e.setAlignment(Qt.AlignCenter)
self.size_e.setAmount(0)
self.size_e.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.fiat_fee_label = TxFiatLabel()
self.fiat_fee_label.setAlignment(Qt.AlignCenter)
self.fiat_fee_label.setAmount(0)
self.fiat_fee_label.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.feerate_e = FeerateEdit(lambda: 0)
self.feerate_e.setAmount(self.config.fee_per_byte())
self.feerate_e.textEdited.connect(partial(self.on_fee_or_feerate, self.feerate_e, False))
self.feerate_e.editingFinished.connect(partial(self.on_fee_or_feerate, self.feerate_e, True))
self.fee_e = BTCAmountEdit(self.main_window.get_decimal_point)
self.fee_e.textEdited.connect(partial(self.on_fee_or_feerate, self.fee_e, False))
self.fee_e.editingFinished.connect(partial(self.on_fee_or_feerate, self.fee_e, True))
self.fee_e.textChanged.connect(self.entry_changed)
self.feerate_e.textChanged.connect(self.entry_changed)
self.fee_slider = FeeSlider(self, self.config, self.fee_slider_callback)
self.fee_combo = FeeComboBox(self.fee_slider)
self.fee_slider.setFixedWidth(self.fee_e.width())
def feerounding_onclick():
text = (self.feerounding_text + '\n\n' +
_('To somewhat protect your privacy, Electrum tries to create change with similar precision to other outputs.') + ' ' +
_('At most 100 satoshis might be lost due to this rounding.') + ' ' +
_("You can disable this setting in '{}'.").format(_('Preferences')) + '\n' +
_('Also, dust is not kept as change, but added to the fee.') + '\n' +
_('Also, when batching RBF transactions, BIP 125 imposes a lower bound on the fee.'))
self.show_message(title=_('Fee rounding'), msg=text)
self.feerounding_icon = QToolButton()
self.feerounding_icon.setIcon(read_QIcon('info.png'))
self.feerounding_icon.setAutoRaise(True)
self.feerounding_icon.clicked.connect(feerounding_onclick)
self.feerounding_icon.setVisible(False)
self.feecontrol_fields = QWidget()
hbox = QHBoxLayout(self.feecontrol_fields)
hbox.setContentsMargins(0, 0, 0, 0)
grid = QGridLayout()
grid.addWidget(QLabel(_("Target fee:")), 0, 0)
grid.addWidget(self.feerate_e, 0, 1)
grid.addWidget(self.size_e, 0, 2)
grid.addWidget(self.fee_e, 0, 3)
grid.addWidget(self.feerounding_icon, 0, 4)
grid.addWidget(self.fiat_fee_label, 0, 5)
grid.addWidget(self.fee_slider, 1, 1)
grid.addWidget(self.fee_combo, 1, 2)
hbox.addLayout(grid)
hbox.addStretch(1)
def fee_slider_callback(self, dyn, pos, fee_rate):
super().fee_slider_callback(dyn, pos, fee_rate)
self.fee_slider.activate()
if fee_rate:
fee_rate = Decimal(fee_rate)
self.feerate_e.setAmount(quantize_feerate(fee_rate / 1000))
else:
self.feerate_e.setAmount(None)
self.fee_e.setModified(False)
def on_fee_or_feerate(self, edit_changed, editing_finished):
edit_other = self.feerate_e if edit_changed == self.fee_e else self.fee_e
if editing_finished:
if edit_changed.get_amount() is None:
# This is so that when the user blanks the fee and moves on,
# we go back to auto-calculate mode and put a fee back.
edit_changed.setModified(False)
else:
# edit_changed was edited just now, so make sure we will
# freeze the correct fee setting (this)
edit_other.setModified(False)
self.fee_slider.deactivate()
self.update()
def is_send_fee_frozen(self):
return self.fee_e.isVisible() and self.fee_e.isModified() \
and (self.fee_e.text() or self.fee_e.hasFocus())
def is_send_feerate_frozen(self):
return self.feerate_e.isVisible() and self.feerate_e.isModified() \
and (self.feerate_e.text() or self.feerate_e.hasFocus())
def set_feerounding_text(self, num_satoshis_added):
self.feerounding_text = (_('Additional {} satoshis are going to be added.')
.format(num_satoshis_added))
def get_fee_estimator(self):
if self.is_send_fee_frozen() and self.fee_e.get_amount() is not None:
fee_estimator = self.fee_e.get_amount()
elif self.is_send_feerate_frozen() and self.feerate_e.get_amount() is not None:
amount = self.feerate_e.get_amount() # sat/byte feerate
amount = 0 if amount is None else amount * 1000 # sat/kilobyte feerate
fee_estimator = partial(
SimpleConfig.estimate_fee_for_feerate, amount)
else:
fee_estimator = None
return fee_estimator
def entry_changed(self):
# blue color denotes auto-filled values
text = ""
fee_color = ColorScheme.DEFAULT
feerate_color = ColorScheme.DEFAULT
if self.not_enough_funds:
fee_color = ColorScheme.RED
feerate_color = ColorScheme.RED
elif self.fee_e.isModified():
feerate_color = ColorScheme.BLUE
elif self.feerate_e.isModified():
fee_color = ColorScheme.BLUE
else:
fee_color = ColorScheme.BLUE
feerate_color = ColorScheme.BLUE
self.fee_e.setStyleSheet(fee_color.as_stylesheet())
self.feerate_e.setStyleSheet(feerate_color.as_stylesheet())
#
self.needs_update = True
def update_fee_fields(self):
freeze_fee = self.is_send_fee_frozen()
freeze_feerate = self.is_send_feerate_frozen()
tx = self.tx
if self.no_dynfee_estimates and tx:
size = tx.estimated_size()
self.size_e.setAmount(size)
if self.not_enough_funds or self.no_dynfee_estimates:
if not freeze_fee:
self.fee_e.setAmount(None)
if not freeze_feerate:
self.feerate_e.setAmount(None)
self.feerounding_icon.setVisible(False)
return
assert tx is not None
size = tx.estimated_size()
fee = tx.get_fee()
self.size_e.setAmount(size)
fiat_fee = self.main_window.format_fiat_and_units(fee)
self.fiat_fee_label.setAmount(fiat_fee)
# Displayed fee/fee_rate values are set according to user input.
# Due to rounding or dropping dust in CoinChooser,
# actual fees often differ somewhat.
if freeze_feerate or self.fee_slider.is_active():
displayed_feerate = self.feerate_e.get_amount()
if displayed_feerate is not None:
displayed_feerate = quantize_feerate(displayed_feerate)
elif self.fee_slider.is_active():
# fallback to actual fee
displayed_feerate = quantize_feerate(fee / size) if fee is not None else None
self.feerate_e.setAmount(displayed_feerate)
displayed_fee = round(displayed_feerate * size) if displayed_feerate is not None else None
self.fee_e.setAmount(displayed_fee)
else:
if freeze_fee:
displayed_fee = self.fee_e.get_amount()
else:
# fallback to actual fee if nothing is frozen
displayed_fee = fee
self.fee_e.setAmount(displayed_fee)
displayed_fee = displayed_fee if displayed_fee else 0
displayed_feerate = quantize_feerate(displayed_fee / size) if displayed_fee is not None else None
self.feerate_e.setAmount(displayed_feerate)
# show/hide fee rounding icon
feerounding = (fee - displayed_fee) if (fee and displayed_fee is not None) else 0
self.set_feerounding_text(int(feerounding))
self.feerounding_icon.setToolTip(self.feerounding_text)
self.feerounding_icon.setVisible(abs(feerounding) >= 1)
def can_finalize(self):
return (self.tx is not None
and not self.not_enough_funds)
def on_finalize(self):
if not self.can_finalize():
return
assert self.tx
self.finalized = True
self.tx.set_rbf(self.rbf_cb.isChecked())
locktime = self.locktime_e.get_locktime()
if locktime is not None:
self.tx.locktime = locktime
for widget in [self.fee_slider, self.fee_combo, self.feecontrol_fields, self.rbf_cb,
self.locktime_setter_widget, self.locktime_e]:
widget.setEnabled(False)
widget.setVisible(False)
for widget in [self.rbf_label, self.locktime_final_label]:
widget.setVisible(True)
self.set_title()
self.set_buttons_visibility()
self.update()
|
from collections import defaultdict
import json
import mmap
import operator
import os
import socket
from struct import Struct
MMDB_META_DATA_START = '\xAB\xCD\xEFMaxMind.com'
MMDB_META_DATA_BLOCK_MAX_SIZE = 131072
MMDB_DATA_SECTION_SEPARATOR = 16
unpack_int = Struct('>I').unpack
unpack_long = Struct('>Q').unpack
unpack_short = Struct('>H').unpack
class GeoIP(object):
"""Container for a GEOIP address"""
__slots__ = ('ip', 'data')
def __init__(self, ip, data):
self.ip = ip
self.data = data
@property
def country(self):
if 'country' in self.data:
return self.data['country']['iso_code']
@property
def country_en(self):
if 'country' in self.data:
return self.data['country']['names']['en']
@property
def continent(self):
if 'continent' in self.data:
return self.data['continent']['code']
@property
def state(self):
return ', '.join([x['iso_code'] for x in self.data.get('subdivisions') or ()
if 'iso_code' in x])
@property
def postal(self):
if 'postal' in self.data:
return self.data['postal'].get('code')
@property
def city(self):
if 'city' in self.data:
return self.data['city']['names']['en']
@property
def timezone(self):
if 'location' in self.data:
return self.data['location'].get('time_zone')
@property
def location(self):
if 'location' in self.data:
lat = self.data['location'].get('latitude')
long = self.data['location'].get('longitude')
if lat is not None and long is not None:
return lat, long
def to_dict(self):
return {
'ip': self.ip,
'country': self.country,
'continent': self.continent,
'state': self.state,
'city': self.city,
'postal': self.postal,
'timezone': self.timezone,
'location': self.location,
}
def pack_ip(ip):
for fmly in socket.AF_INET, socket.AF_INET6:
try:
return socket.inet_pton(fmly, ip)
except socket.error:
continue
raise ValueError('Malformed IP address')
class MMDB(object):
"""Context manager to query MaxMind database"""
def __init__(self, filename, buffer, meta_data):
self.closed = False
self.filename = filename
self.is_ipv6 = meta_data['ip_version'] == 6
self.nodes = meta_data['node_count']
self.record_size = meta_data['record_size']
self.node_size = self.record_size / 4
self.db_size = self.nodes * self.node_size
self.buffer = buffer
self.meta_data = meta_data
self.reader = MMDBParser(buffer, self.db_size)
self.ipv4_start = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close()
def close(self):
self.closed = True
self.buffer.close()
def lookup(self, ip_addr):
if self.closed:
raise RuntimeError('Database is closed.')
packed_addr = pack_ip(ip_addr)
bits = len(packed_addr) * 8
node = self.find_start_node(bits)
seen = set()
for i in xrange(bits):
if node >= self.nodes:
break
bit = (ord(packed_addr[i >> 3]) >> (7 - (i % 8))) & 1
node = self.parse_node(node, bit)
if node in seen:
raise LookupError('Circle in tree detected')
seen.add(node)
if node > self.nodes:
offset = node - self.nodes + self.db_size
return GeoIP(ip_addr, self.reader.read(offset)[0])
def find_start_node(self, bits):
if bits == 128 or not self.is_ipv6:
return 0
if self.ipv4_start is not None:
return self.ipv4_start
node = 0
for netmask in xrange(96):
if node >= self.nodes:
break
node = self.parse_node(netmask, 0)
self.ipv4_start = node
return node
def parse_node(self, node, index):
offset = node * self.node_size
if self.record_size == 24:
offset += index * 3
bytes = '\x00' + self.buffer[offset:offset + 3]
elif self.record_size == 28:
b = ord(self.buffer[offset + 3:offset + 4])
if index:
b &= 0x0F
else:
b = (0xF0 & b) >> 4
offset += index * 4
bytes = chr(b) + self.buffer[offset:offset + 3]
elif self.record_size == 32:
offset += index * 4
bytes = self.buffer[offset:offset + 4]
else:
raise LookupError('Invalid record size')
return unpack_int(bytes)[0]
def make_struct_parser(code):
"""Helper to create struct unpack methods."""
struct = Struct('>' + code)
def unpack_func(self, size, offset):
new_offset = offset + struct.size
bytes = self.buffer[offset:new_offset].rjust(struct.size, '\x00')
value = struct.unpack(bytes)[0]
return value, new_offset
return unpack_func
class MMDBParser(object):
"""
Parser for MaxMind MMDB binary format.
Reference: https://maxmind.github.io/MaxMind-DB/
"""
def __init__(self, buffer, offset=0):
self.buffer = buffer
self.offset = offset
def parse_ptr(self, size, offset):
ptr_size = ((size >> 3) & 0x3) + 1
bytes = self.buffer[offset:offset + ptr_size]
if ptr_size != 4:
bytes = chr(size & 0x7) + bytes
ptr = (
unpack_int(bytes.rjust(4, '\x00'))[0] +
self.offset +
MMDB_DATA_SECTION_SEPARATOR +
(0, 2048, 526336, 0)[ptr_size - 1]
)
return self.read(ptr)[0], offset + ptr_size
def parse_str(self, size, offset):
bytes = self.buffer[offset:offset + size]
return bytes.decode('utf-8', 'replace'), offset + size
parse_double = make_struct_parser('d')
def parse_bytes(self, size, offset):
return self.buffer[offset:offset + size], offset + size
def parse_uint(self, size, offset):
bytes = self.buffer[offset:offset + size]
return unpack_long(bytes.rjust(8, '\x00'))[0], offset + size
def parse_dict(self, size, offset):
container = {}
for _ in xrange(size):
key, offset = self.read(offset)
value, offset = self.read(offset)
container[key] = value
return container, offset
parse_int32 = make_struct_parser('i')
def parse_list(self, size, offset):
rv = [None] * size
for idx in xrange(size):
rv[idx], offset = self.read(offset)
return rv, offset
def parse_error(self, size, offset):
raise AssertionError('Read invalid type code')
def parse_bool(self, size, offset):
return size != 0, offset
parse_float = make_struct_parser('f')
callbacks = (
parse_error,
parse_ptr,
parse_str,
parse_double,
parse_bytes,
parse_uint,
parse_uint,
parse_dict,
parse_int32,
parse_uint,
parse_uint,
parse_list,
parse_error,
parse_error,
parse_bool,
parse_float,
)
def read(self, offset):
new_offset = offset + 1
byte = ord(self.buffer[offset:new_offset])
size = byte & 0x1f
ty = byte >> 5
if ty == 0:
byte = ord(self.buffer[new_offset:new_offset + 1])
ty = byte + 7
new_offset += 1
if ty != 1 and size >= 29:
to_read = size - 28
bytes = self.buffer[new_offset:new_offset + to_read]
new_offset += to_read
if size == 29:
size = 29 + ord(bytes)
elif size == 30:
size = 285 + unpack_short(bytes)[0]
elif size > 30:
size = 65821 + unpack_int(bytes.rjust(4, '\x00'))[0]
return self.callbacks[ty](self, size, new_offset)
def read_mmdb_meta_data(buffer):
offset = buffer.rfind(MMDB_META_DATA_START,
buffer.size() - MMDB_META_DATA_BLOCK_MAX_SIZE)
if offset < 0:
raise ValueError('Could not find meta data')
offset += len(MMDB_META_DATA_START)
return MMDBParser(buffer, offset).read(offset)[0]
def open_mmdb(filename):
"""Open memory mapped buffer of MMDB"""
with open(filename, 'rb') as f:
mmap_buffer = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
meta_data = read_mmdb_meta_data(mmap_buffer)
return MMDB(filename, mmap_buffer, meta_data)
def geoip_lookup(mmdb_path, cache_path):
"""Performs GeoIP lookups for IPs stored in cache"""
if not os.path.exists(cache_path):
return None
with open(cache_path, 'rb') as f:
cache = json.loads(f.read())
result = defaultdict(lambda: 0)
with open_mmdb(mmdb_path) as db:
for i, ip_data in enumerate(cache):
if 'geoip' not in ip_data:
geoip = db.lookup(ip_data['ip'])
if geoip:
cache[i].update(geoip=True, **geoip.to_dict())
result[geoip.country_en] += 1
with open(cache_path, 'wb') as f:
f.write(json.dumps(cache))
return sorted(result.items(), key=operator.itemgetter(1), reverse=True)
|
# -*- coding: UTF-8 -*-
#! /usr/bin/python
# To change this template, choose Tools | Templates
# and open the template in the editor.
__author__="ARA"
__all__ = ['norm']
__date__ ="$Feb 14, 2012 11:40:06 AM$"
from . import common_obj as _com
from . import constants as _cst
import numpy as _np
from .pigasusObject import *
class norm(pigasusObject):
def __init__ ( self, field = None, type = None, func = None, paramevalfunc = False, exact = None ):
pigasusObject.__init__(self)
self.id = self.com.nnorms
self.nparam = 0
self.paramevalfunc = paramevalfunc
if field is not None:
self.field = field
self.space = field.space
self.loc_id = self.space.grids.add_norm_id(self)
else:
raise("You must give a field for the current norm")
if type is not None:
self.type = type
else:
self.type = _cst.NORM_L2
self._set_nparam()
from .utils import function
if func is not None:
self.func = function(func, space=self.space)
else:
self.defaultFuncParam()
if exact is not None:
self.exact = function(exact, space=self.space)
else:
self.defaultFuncExact()
# this must be the last thing to do
self.com.nnorms += 1
self.com.norms.append(self)
def setInfoData(self):
"""
prints informations about the current norm
"""
self.infoData['id'] = str(self.id)
self.infoData['field'] = str(self.field.id)
self.infoData['space'] = str(self.space.id)
self.infoData['loc_id'] = str(self.loc_id)
self.infoData['nparam'] = str(self.nparam)
self.infoData['paramevalfunc'] = str(self.paramevalfunc)
self.infoData['type'] = str(self.type)
def _getGlobalNorm(self):
return self.com.pyfem.getglobalnorm ( self.id )
def _getPatchNorm(self):
li_npatchs = self.space.grids.npatchs
return self.com.pyfem._getPatchNorm ( self.id, li_npatchs )
def _getElementNorm(self, ai_patch):
li_nel = self.space.grids.list_grid[ai_patch].nel
return self.com.pyfem._getElementNorm ( self.id, ai_patch, li_nel)
def get(self, type=0, ai_patch=None):
"""
returns values for a given type of norm
type = 0 : for a global computation
type = 1 : for a patch computation
type = 2 : for an element computation
"""
if (type == 0) :
return self._getGlobalNorm()
if (type == 1) :
return self._getPatchNorm()
if (type == 2) and (ai_patch is not None):
return self._getElementNorm(ai_patch)
def setEvalNorm(self, ai_patch=0, fields=[], funcs=[]):
"""
fields is a list of fields
funcs is a list of functions
"""
lpr_pts = self.space.get_points(ai_patch)
list_pts = []
for i in range(0, self.space.dim):
list_pts.append(lpr_pts[i,0,:])
lpr_pts = list_pts
li_dim = self.space.dim
if li_dim not in [2]:
print("setEvalNorm: Not yet implemetend for the desired dimension")
lpi_shape = lpr_pts.shape[0:-1]
lpr_val = _np.zeros((1,lpi_shape[0],lpi_shape[1]))
for F in fields:
lpr_f = F.eval(ai_patch, elts)[ai_patch,:,:]
lpr_val[0,:,:] += lpr_f[:,:]
for func in funcs:
lpr_f = _np.zeros(lpr_pts.shape[0:-1])
for (i,list_p) in enumerate(lpr_pts):
for (j,p) in enumerate(list_p):
lpr_f[i,j] =func (p[0], p[1])[0]
lpr_val[0,:,:] += lpr_f[:,:]
self.com.pyfem.set_field_on_grids(self.field.id, ai_patch, lpr_val)
def _set_nparam(self):
if ( self.type in [ _cst.NORM_L2 ] ):
self.nparam = 1
return
if ( self.type in [ _cst.NORM_H1 ] ):
li_dim = self.space.dim
self.nparam = li_dim**2
return
else :
print("NORM-_set_nparam : type not implemented yet")
import sys; sys.exit(1)
def evalfunc(self, ai_patch, apr_points, elts=None, type="param"):
"""
Evaluation of the param-function over a given list of points
"""
if not self.paramevalfunc :
lpr_val = self._evalfunc_std(ai_patch, apr_points, elts, type)
else:
lpr_parampts = self.space.get_parametricPoints(ai_patch_id=ai_patch)
lpr_val = self._evalfunc_std(ai_patch, lpr_parampts, elts, type)
return lpr_val
def _evalfunc_std(self, ai_patch, apr_points, elts, type):
"""
sequential version of the evaluation
"""
if type == "param":
# print "==== param evaluation"
return self.func(apr_points)
if type == "exact":
# print "==== exact evaluation"
return self.exact(apr_points)
def defaultFuncParam(self):
li_dim = self.space.dim
if ( self.type in [ _cst.NORM_L2 ] ):
if li_dim == 1:
func = lambda x : [1.0]
if li_dim == 2:
func = lambda x,y : [1.0]
if li_dim == 3:
func = lambda x,y,z : [1.0]
elif ( self.type in [ _cst.NORM_H1 ] ):
if li_dim == 1:
func = lambda x : [1.0]
if li_dim == 2:
func = lambda x,y : [1.0, 0.0, 0.0, 1.0]
if li_dim == 3:
func = lambda x,y,z : [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
else :
print("NORM-defaultFuncParam : type not implemented yet")
import sys; sys.exit(1)
from .utils import function
self.func = function(func, space=self.space)
def defaultFuncExact(self):
li_dim = self.space.dim
if li_dim == 1:
func = lambda x : [0.0] * self.field.ndof
elif li_dim == 2:
func = lambda x,y : [0.0] * self.field.ndof
elif li_dim == 3:
func = lambda x,y,z : [0.0] * self.field.ndof
else :
raise("type not implemented yet")
from .utils import function
self.exact = function(exact, space=self.space)
def set_func(self, exact):
"""
this sets the param-function of the current field
"""
from .utils import function
self.exact = function(exact, space=self.space)
|
# -*- coding: utf-8 -*-
"""Climate indices computation package based on Xarray."""
from importlib.resources import contents, path
from xclim.core import units # noqa
from xclim.core.indicator import build_indicator_module_from_yaml
from xclim.core.locales import load_locale
from xclim.core.options import set_options # noqa
from xclim.indicators import atmos, land, seaIce # noqa
__author__ = """Travis Logan"""
__email__ = "logan.travis@ouranos.ca"
__version__ = "0.28.0"
# Load official locales
for filename in contents("xclim.data"):
# Only select <locale>.json and not <module>.<locale>.json
if filename.endswith(".json") and filename.count(".") == 1:
locale = filename.split(".")[0]
with path("xclim.data", filename) as f:
load_locale(f, locale)
# Virtual modules creation:
with path("xclim.data", "icclim.yml") as f:
build_indicator_module_from_yaml(f.with_suffix(""), mode="raise")
with path("xclim.data", "anuclim.yml") as f:
build_indicator_module_from_yaml(f.with_suffix(""), mode="raise")
with path("xclim.data", "cf.yml") as f:
# ignore because some generic function are missing.
build_indicator_module_from_yaml(f.with_suffix(""), mode="ignore")
|
"""Class for storing SRP password verifiers."""
from utils.cryptomath import *
from utils.compat import *
import mathtls
from BaseDB import BaseDB
class VerifierDB(BaseDB):
"""This class represent an in-memory or on-disk database of SRP
password verifiers.
A VerifierDB can be passed to a server handshake to authenticate
a client based on one of the verifiers.
This class is thread-safe.
"""
def __init__(self, filename=None):
"""Create a new VerifierDB instance.
@type filename: str
@param filename: Filename for an on-disk database, or None for
an in-memory database. If the filename already exists, follow
this with a call to open(). To create a new on-disk database,
follow this with a call to create().
"""
BaseDB.__init__(self, filename, "verifier")
def _getItem(self, username, valueStr):
(N, g, salt, verifier) = valueStr.split(" ")
N = base64ToNumber(N)
g = base64ToNumber(g)
salt = base64ToString(salt)
verifier = base64ToNumber(verifier)
return (N, g, salt, verifier)
def __setitem__(self, username, verifierEntry):
"""Add a verifier entry to the database.
@type username: str
@param username: The username to associate the verifier with.
Must be less than 256 characters in length. Must not already
be in the database.
@type verifierEntry: tuple
@param verifierEntry: The verifier entry to add. Use
L{tlslite.VerifierDB.VerifierDB.makeVerifier} to create a
verifier entry.
"""
BaseDB.__setitem__(self, username, verifierEntry)
def _setItem(self, username, value):
if len(username)>=256:
raise ValueError("username too long")
N, g, salt, verifier = value
N = numberToBase64(N)
g = numberToBase64(g)
salt = stringToBase64(salt)
verifier = numberToBase64(verifier)
valueStr = " ".join( (N, g, salt, verifier) )
return valueStr
def _checkItem(self, value, username, param):
(N, g, salt, verifier) = value
x = mathtls.makeX(salt, username, param)
v = powMod(g, x, N)
return (verifier == v)
def makeVerifier(username, password, bits):
"""Create a verifier entry which can be stored in a VerifierDB.
@type username: str
@param username: The username for this verifier. Must be less
than 256 characters in length.
@type password: str
@param password: The password for this verifier.
@type bits: int
@param bits: This values specifies which SRP group parameters
to use. It must be one of (1024, 1536, 2048, 3072, 4096, 6144,
8192). Larger values are more secure but slower. 2048 is a
good compromise between safety and speed.
@rtype: tuple
@return: A tuple which may be stored in a VerifierDB.
"""
return mathtls.makeVerifier(username, password, bits)
makeVerifier = staticmethod(makeVerifier)
|
#!/usr/bin/env python3
# _*_ coding:utf-8 _*_
'''
____ _ _ _ _ __ __ _
| _ \ __ _| |__ | |__ (_) |_| \/ | __ _ ___| | __
| |_) / _` | '_ \| '_ \| | __| |\/| |/ _` / __| |/ /
| _ < (_| | |_) | |_) | | |_| | | | (_| \__ \ <
|_| \_\__,_|_.__/|_.__/|_|\__|_| |_|\__,_|___/_|\_\
'''
# 协议默认字典配置
HTTP_PORT=['80']
HTTPS_PORT=['443','8443']
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.3 on 2016-04-02 16:27
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("poll", "0007_poll_content_type"),
]
operations = [
migrations.RemoveField(
model_name="poll",
name="content_type",
),
]
|
from cmdbus import cmdbus, Command
class AddCommand(Command):
def __init__(self, v1: int, v2: int):
self.v1 = v1
self.v2 = v2
def handle(self):
return self.v1 + self.v2
def test_dispatch():
cmd = AddCommand(3, 5)
result = cmdbus.dispatch(cmd)
assert result is 8
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class OctConv(nn.Module):
def __init__(self, ch_in, ch_out, kernel_size, stride=1, alphas=(0.5, 0.5)):
super(OctConv, self).__init__()
self.alpha_in, self.alpha_out = alphas
assert 0 <= self.alpha_in <= 1 and 0 <= self.alpha_in <= 1, "Alphas must be in interval [0, 1]"
# CH IN
self.ch_in_hf = int((1 - self.alpha_in) * ch_in)
self.ch_in_lf = ch_in - self.ch_in_hf
# CH OUT
self.ch_out_hf = int((1 - self.alpha_out) * ch_out)
self.ch_out_lf = ch_out - self.ch_out_hf
# FILTERS
self.wHtoH = nn.Parameter(torch.randn(self.ch_out_hf, self.ch_in_hf, kernel_size, kernel_size))
self.wHtoL = nn.Parameter(torch.randn(self.ch_out_lf, self.ch_in_hf, kernel_size, kernel_size))
self.wLtoH = nn.Parameter(torch.randn(self.ch_out_hf, self.ch_in_lf, kernel_size, kernel_size))
self.wLtoL = nn.Parameter(torch.randn(self.ch_out_lf, self.ch_in_lf, kernel_size, kernel_size))
# PADDING: (H - F + 2P)/S + 1 = 2 * [(0.5 H - F + 2P)/S +1] -> P = (F-S)/2
self.padding = (kernel_size - stride) // 2
def forward(self, input):
# logic to handle input tensors:
# if alpha_in = 0., we assume to be at the first layer, with only high freq repr
if self.alpha_in == 0:
hf_input = input
lf_input = torch.Tensor([]).reshape(1, 0)
else:
fmap_size = input.shape[-1]
hf_input = input[:, :self.ch_in_hf * 4, ...].reshape(-1, self.ch_in_hf, fmap_size * 2, fmap_size * 2)
lf_input = input[:, self.ch_in_hf * 4:, ...]
HtoH = HtoL = LtoL = LtoH = 0.
if self.alpha_in < 1:
# if alpha < 1 there is high freq component
if self.ch_out_hf > 0:
HtoH = F.conv2d(hf_input, self.wHtoH, padding=self.padding)
if self.ch_out_lf > 0:
HtoL = F.conv2d(F.avg_pool2d(hf_input, 2), self.wHtoL, padding=self.padding)
if self.alpha_in > 0:
# if alpha > 0 there is low freq component
if self.ch_out_hf > 0:
LtoH = F.interpolate(F.conv2d(lf_input, self.wLtoH, padding=self.padding),
scale_factor=2, mode='nearest')
if self.ch_out_lf > 0:
LtoL = F.conv2d(lf_input, self.wLtoL, padding=self.padding)
hf_output = HtoH + LtoH
lf_output = LtoL + HtoL
if 0 < self.alpha_out < 1:
# if alpha in (0, 1)
fmap_size = hf_output.shape[-1] // 2
hf_output = hf_output.reshape(-1, 4 * self.ch_out_hf, fmap_size, fmap_size)
output = torch.cat([hf_output, lf_output], dim=1) # cat over channel dim
elif np.isclose(self.alpha_out, 1., atol=1e-8):
# if only low req (alpha_out = 1.)
output = lf_output
elif np.isclose(self.alpha_out, 0., atol=1e-8):
# if only high freq (alpha_out = 0.)
output = hf_output
return output
oc = OctConv(ch_in=3, ch_out=3, kernel_size=3, alphas=(0., 0.5))
oc1 = OctConv(ch_in=3, ch_out=10, kernel_size=7, alphas=(0.5, 0.8))
oc2 = OctConv(ch_in=10, ch_out=1, kernel_size=3, alphas=(0.8, 0.))
out = oc2(oc1(oc(torch.randn(2, 3, 32, 32))))
print(out.shape)
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import unittest
from tvcm import parse_html_deps
from tvcm import module as module_module
from tvcm import html_generation_controller
class ParseTests(unittest.TestCase):
def test_parse_empty(self):
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse("")
self.assertEquals([], module.scripts_external)
self.assertEquals([], module.inline_scripts)
self.assertEquals([], module.stylesheets)
self.assertEquals([], module.imports)
def test_parse_none(self):
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(None)
self.assertEquals([], module.scripts_external)
self.assertEquals([], module.inline_scripts)
self.assertEquals([], module.stylesheets)
self.assertEquals([], module.imports)
def test_parse_script_src(self):
html = """<!DOCTYPE html>
<html>
<head>
<script src="polymer.min.js"></script>
<script src="foo.js"></script>
</head>
<body>
</body>
</html>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(['polymer.min.js', 'foo.js'], module.scripts_external);
self.assertEquals([], module.inline_scripts)
self.assertEquals([], module.stylesheets)
self.assertEquals([], module.imports)
self.assertTrue(module.has_decl)
self.assertTrue('DOCTYPE html' not in module.html_contents_without_links_and_script)
class Ctl(html_generation_controller.HTMLGenerationController):
def GetHTMLForScriptHRef(self, href):
if href == "polymer.min.js":
return "<script>POLYMER</script>"
elif href == "foo.js":
return "<script>FOO</script>"
return None
def GetHTMLForStylesheetHRef(self, href):
return None
gen_html = module.GenerateHTML(Ctl())
ghtm = """
<html>
<head>
<script>POLYMER</script>
<script>FOO</script>
</head>
<body>
</body>
</html>"""
self.assertEquals(ghtm, gen_html)
def test_parse_link_rel_import(self):
html = """<!DOCTYPE html>
<html>
<head>
<link rel="import" href="x-foo.html">
</head>
<body>
</body>
</html>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals([], module.scripts_external);
self.assertEquals([], module.inline_scripts)
self.assertEquals([], module.stylesheets)
self.assertEquals(['x-foo.html'], module.imports)
self.assertTrue(module.has_decl)
def test_parse_script_inline(self):
html = """<polymer-element name="tk-element-proto">
<template>
</template>
<script>
tvcm.require("foo");
tvcm.require('bar');
</script>
</polymer-element>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals([], module.scripts_external);
self.assertEquals(1, len(module.inline_scripts))
self.assertEquals([], module.stylesheets)
self.assertEquals([], module.imports)
self.assertFalse(module.has_decl)
script0 = module.inline_scripts[0]
val = re.sub(r"\s+", '', script0.contents)
inner_script = """tvcm.require("foo");tvcm.require('bar');"""
self.assertEquals(inner_script, val)
self.assertEquals(1, len(script0.open_tags))
self.assertEquals('polymer-element', script0.open_tags[0].tag)
assert 'tvcm.require("foo");' not in module.html_contents_without_links_and_script
def test_parse_script_src_sripping(self):
html = """
<script src="blah.js"></script>
"""
module = parse_html_deps.HTMLModuleParser().Parse(html)
self.assertEquals('\n\n', module.html_contents_without_links_and_script)
def test_parse_link_rel_stylesheet(self):
html = """<polymer-element name="hi">
<template>
<link rel="stylesheet" href="frameworkstyles.css">
</template>
</polymer-element>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals([], module.scripts_external);
self.assertEquals([], module.inline_scripts)
self.assertEquals(['frameworkstyles.css'], module.stylesheets)
self.assertEquals([], module.imports)
self.assertFalse(module.has_decl)
class Ctl(html_generation_controller.HTMLGenerationController):
def GetHTMLForScriptHRef(self, href):
return None
def GetHTMLForStylesheetHRef(self, href):
if href == "frameworkstyles.css":
return "<style>FRAMEWORK</style>"
return None
gen_html = module.GenerateHTML(Ctl())
ghtm = """<polymer-element name="hi">
<template>
<style>FRAMEWORK</style>
</template>
</polymer-element>"""
self.assertEquals(ghtm, gen_html)
def test_parse_inline_style(self):
html = """
<style>
hello
</style>"""
module = parse_html_deps.HTMLModuleParser().Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
class Ctl(html_generation_controller.HTMLGenerationController):
def GetHTMLForInlineStylesheet(self, contents):
if contents == '\n hello\n':
return '\n HELLO\n'
return None
gen_html = module.GenerateHTML(Ctl())
ghtm = """
<style>
HELLO
</style>"""
self.assertEquals(ghtm, gen_html)
def test_parse_style_import(self):
html = """<polymer-element name="x-blink">
<template>
<style>
@import url(awesome.css);
</style>
</template>
</polymer-element>"""
parser = parse_html_deps.HTMLModuleParser()
self.assertRaises(lambda: parser.Parse(html))
def test_html_contents_basic(self):
html = """<a b="c">d</a>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
def test_html_contents_with_entity(self):
html = """<a>→</a>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
def test_html_content_with_charref(self):
html = """<a>></a>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
def test_html_content_start_end_br(self):
html = """<a><br /></a>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
def test_html_content_start_end_img(self):
html = """<a><img src="foo.png" id="bar" /></a>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals(html, module.html_contents_without_links_and_script)
def test_html_contents_with_link_stripping(self):
html = """<a b="c">d</a>
<link rel="import" href="x-foo.html">"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals("""<a b="c">d</a>""",
module.html_contents_without_links_and_script.strip())
def test_html_contents_with_style_link_stripping(self):
html = """<a b="c">d</a>
<link rel="stylesheet" href="frameworkstyles.css">"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
self.assertEquals("""<a b="c">d</a>""",
module.html_contents_without_links_and_script.strip())
def test_malformed_script_raises(self):
html = """<script src="x"/>"""
parser = parse_html_deps.HTMLModuleParser()
def DoIt():
module = parser.Parse(html)
self.assertRaises(Exception, DoIt)
def test_malformed_br_raises(self):
html = """<br>"""
parser = parse_html_deps.HTMLModuleParser()
def DoIt():
module = parser.Parse(html)
self.assertRaises(Exception, DoIt)
def test_br_does_not_raise(self):
html = """<div><br/></div>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
def test_p_does_not_raises(self):
html = """<div></p></div>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
def test_link_endlink_does_not_raise(self):
html = """<link rel="stylesheet" href="foo.css"></link>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
def test_link_script_does_not_raise(self):
html = """<link rel="stylesheet" href="foo.css">
<script>
</script>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
def test_malformed_script_raises(self):
html = """<script src="/jszip-inflate.js"</script>"""
parser = parse_html_deps.HTMLModuleParser()
def DoIt():
module = parser.Parse(html)
self.assertRaises(Exception, DoIt)
def test_script_with_script_inside_as_js(self):
html = """<script>
var html_lines = [
'<script>',
'<\/script>',
];
</script>"""
parser = parse_html_deps.HTMLModuleParser()
module = parser.Parse(html)
def test_invalid_script_escaping_raises(self):
html = """<script>
var html_lines = [
'<script>',
'< /script>',
];
</script>"""
parser = parse_html_deps.HTMLModuleParser()
def DoIt():
module = parser.Parse(html)
self.assertRaises(Exception, DoIt)
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.