text
stringlengths 2
999k
|
|---|
"""An implementation of matrix capsules with EM routing.
"""
import tensorflow as tf
from core import _conv2d_wrapper, capsules_init, capsules_conv, capsules_fc
slim = tf.contrib.slim
# ------------------------------------------------------------------------------#
# -------------------------------- capsules net --------------------------------#
# ------------------------------------------------------------------------------#
def capsules_v0(inputs, num_classes, iterations, name='CapsuleEM-V0'):
"""Replicate the network in `Matrix Capsules with EM Routing.`
"""
with tf.variable_scope(name) as scope:
# inputs [N, H, W, C] -> conv2d, 5x5, strides 2, channels 32 -> nets [N, OH, OW, 32]
nets = _conv2d_wrapper(
inputs, shape=[5, 5, 1, 32], strides=[1, 2, 2, 1], padding='SAME', add_bias=True, activation_fn=tf.nn.relu, name='conv1'
)
# inputs [N, H, W, C] -> conv2d, 1x1, strides 1, channels 32x(4x4+1) -> (poses, activations)
nets = capsules_init(
nets, shape=[1, 1, 32, 32], strides=[1, 1, 1, 1], padding='VALID', pose_shape=[4, 4], name='capsule_init'
)
# inputs: (poses, activations) -> capsule-conv 3x3x32x32x4x4, strides 2 -> (poses, activations)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 2, 2, 1], iterations=iterations, name='capsule_conv1'
)
# inputs: (poses, activations) -> capsule-conv 3x3x32x32x4x4, strides 1 -> (poses, activations)
nets = capsules_conv(
nets, shape=[3, 3, 32, 32], strides=[1, 1, 1, 1], iterations=iterations, name='capsule_conv2'
)
# inputs: (poses, activations) -> capsule-fc 1x1x32x10x4x4 shared view transform matrix within each channel -> (poses, activations)
nets = capsules_fc(
nets, num_classes, iterations=iterations, name='capsule_fc'
)
poses, activations = nets
return poses, activations
# ------------------------------------------------------------------------------#
# ------------------------------------ loss ------------------------------------#
# ------------------------------------------------------------------------------#
def spread_loss(labels, activations, margin, name):
"""This adds spread loss to total loss.
:param labels: [N, O], where O is number of output classes, one hot vector, tf.uint8.
:param activations: [N, O], activations.
:param margin: margin 0.2 - 0.9 fixed schedule during training.
:return: spread loss
"""
activations_shape = activations.get_shape().as_list()
with tf.variable_scope(name) as scope:
mask_t = tf.equal(labels, 1)
mask_i = tf.equal(labels, 0)
activations_t = tf.reshape(
tf.boolean_mask(activations, mask_t), [activations_shape[0], 1]
)
activations_i = tf.reshape(
tf.boolean_mask(activations, mask_i), [activations_shape[0], activations_shape[1] - 1]
)
# margin = tf.Print(
# margin, [margin], 'margin', summarize=20
# )
gap_mit = tf.reduce_sum(
tf.square(
tf.nn.relu(
margin - (activations_t - activations_i)
)
)
)
# tf.add_to_collection(
# tf.GraphKeys.LOSSES, gap_mit
# )
#
# total_loss = tf.add_n(
# tf.get_collection(
# tf.GraphKeys.LOSSES
# ), name='total_loss'
# )
tf.losses.add_loss(gap_mit)
return gap_mit
# ------------------------------------------------------------------------------#
|
import re
from textwrap import dedent
from unittest import mock
from unittest.mock import PropertyMock
import pytest
from get_pr_info import extract_target_section, get_pr_number_from_commit_message, get_pr_summary
from github import BadCredentialsException, UnknownObjectException
class TestGetPrNumberFromCommitMessage:
@pytest.fixture(autouse=True)
def setUp(self):
self.pattern = re.compile(r"#(\d*)")
def test_not_match(self):
actual = get_pr_number_from_commit_message("abcdefg", self.pattern)
expected = 0
assert actual == expected
def test_match(self):
actual = get_pr_number_from_commit_message("ab#123cd", self.pattern)
expected = 123
assert actual == expected
def test_multi_match(self):
actual = get_pr_number_from_commit_message("ab#123cd#456ef", self.pattern)
expected = 123
assert actual == expected
def test_second_line_match(self):
# 2行目にPR番号が含まれている場合
actual = get_pr_number_from_commit_message("abc\ncd#123ef", self.pattern)
expected = 0
assert actual == expected
def test_blank_str(self):
actual = get_pr_number_from_commit_message("", self.pattern)
expected = 0
assert actual == expected
class TestGetPrInfo:
@pytest.fixture(autouse=True)
def setUp(self):
mock_client = mock.Mock()
mock_repo = mock.Mock()
mock_pull = mock.Mock()
with mock.patch("get_pr_info.Github") as mock_github:
mock_github.return_value = mock_client
mock_client.get_repo.return_value = mock_repo
mock_repo.get_pull.return_value = mock_pull
self.mock_github = mock_github
self.mock_client = mock_client
self.mock_repo = mock_repo
self.mock_pull = mock_pull
yield
def test_nomal(self):
mock_body = PropertyMock(return_value="PR_INFO")
type(self.mock_pull).body = mock_body
actual = get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
expected = "PR_INFO"
assert actual == expected
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
assert mock_body.call_count == 1
def test_bad_credential(self):
# トークンが誤っている場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
# NOTE: トークンが誤っている場合でもgithubインスタンスの生成時にはエラーとならず、GitHub操作をした際にエラーとなる
self.mock_client.get_repo.side_effect = BadCredentialsException(
401,
data={
"message": "Bad credentials",
"documentation_url": "https://docs.github.com/rest",
},
)
with pytest.raises(BadCredentialsException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_repository(self):
# リポジトリが存在しない場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
self.mock_client.get_repo.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/repos#get-a-repository", # noqa
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_not_called()
def test_not_exists_pr_number(self):
# PR番号が存在しない場合
# APIにアクセスしたくないため、モックで例外を投げている
# ユニットテストとしては意味がないが、仕様記載の意味で記載しておく
self.mock_repo.get_pull.side_effect = UnknownObjectException(
404,
data={
"message": "Not Found",
"documentation_url": "https://docs.github.com/rest/reference/pulls#get-a-pull-request", # noqa
},
)
with pytest.raises(UnknownObjectException):
get_pr_summary(999, "GITHUB_TOKEN", "REPOSITORY_NAME")
self.mock_github.assert_called_once_with("GITHUB_TOKEN")
self.mock_client.get_repo.assert_called_once_with("REPOSITORY_NAME")
self.mock_repo.get_pull.assert_called_once_with(999)
class TestGetIntegrationTestPoint:
def test_nomal(self):
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55"""
)
assert actual == expected
def test_not_exists_target_section(self):
summary = ""
actual = extract_target_section(summary, "## 結合テスト観点")
expected = ""
assert actual == expected
def test_another_section_at_the_end(self):
# 対象行より後ろに別のセクションが存在する場合
summary = dedent(
"""\
## 概要
- 現状(As is)
- こうなんです
- 理想(To be)
- こうなりたい
- 問題(Problem)
- こまってる
- 解決・やったこと(Action)
- これをやった
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
## 対象外セクション
- 対象外です
"""
)
actual = extract_target_section(summary, "## 結合テスト観点")
expected = dedent(
"""\
## 結合テスト観点
- 対応概要
- こうやった
- 観点
- こういうこと1
- 条件: こうしてほしい2
- こういうこと2
- 条件: こうしてほしい2
- 担当
- API yamap55
"""
)
assert actual == expected
@pytest.mark.parametrize("line_separator", ["\n", "\r\n", "\r"])
def test_various_line_separator(self, line_separator):
summary = f"## HOGE{line_separator}## TARGET_ROW{line_separator}## HUGA"
actual = extract_target_section(summary, "## TARGET_ROW")
expected = "## TARGET_ROW"
assert actual == expected
|
#!/usr/bin/env python
# coding: utf-8
# ### - Ensemble/Blend the 4 model predictions into a single prediction
# In[1]:
import os
import datetime
from time import time
import pathlib
import pandas as pd
import numpy as np
from collections import defaultdict
from collections import Counter
# In[2]:
from sklearn.metrics import precision_recall_curve,average_precision_score
from sklearn.metrics import log_loss, roc_curve
from sklearn.metrics import auc,roc_auc_score
# In[3]:
from numba import njit
from scipy.optimize import minimize, fsolve
# In[4]:
# The two options here are "" and "_subsample"
file_indicator = ""
data_dir = pathlib.Path("../2.data_split/model_data")
# In[5]:
cp_test = pathlib.Path(f"{data_dir}/cp/test_lvl4_data{file_indicator}.csv.gz")
L1000_test = pathlib.Path(f"{data_dir}/L1/test_lvl4_data.csv.gz")
cp_L1000_test = pathlib.Path(f"{data_dir}/merged/test_lvl4_data.csv.gz")
# In[6]:
model_preds_dir = '../L1000_CP_model_predictions/'
# In[7]:
df_cp_test = pd.read_csv(cp_test, compression='gzip',low_memory = False)
df_L1000_test = pd.read_csv(L1000_test, compression='gzip',low_memory = False)
df_cp_L1000_test = pd.read_csv(cp_L1000_test, compression='gzip',low_memory = False)
# In[8]:
df_cp_L1000_test.shape
# In[9]:
##resnet
df_cp_resnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_resnet{file_indicator}.csv'))
df_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_resnet.csv'))
df_cp_L1000_resnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_resnet.csv'))
# In[10]:
print(df_cp_L1000_resnet_test.shape)
df_cp_L1000_resnet_test.head()
# In[11]:
##1-d cnn
df_cp_cnn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_1dcnn{file_indicator}.csv'))
df_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_1dcnn.csv'))
df_cp_L1000_cnn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_1dcnn.csv'))
# In[12]:
print(df_cp_L1000_cnn_test.shape)
df_cp_L1000_cnn_test.head()
# In[13]:
##tabnet
df_cp_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_tabnet{file_indicator}.csv'))
df_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_tabnet.csv'))
df_cp_L1000_tabnet_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_tabnet.csv'))
# In[14]:
df_cp_L1000_tabnet_test.shape
# In[15]:
##stagedNN
df_cp_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, f'cp_test_preds_simplenn{file_indicator}.csv'))
df_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'L1000_test_preds_simplenn.csv'))
df_cp_L1000_simplenn_test = pd.read_csv(os.path.join(model_preds_dir, 'cp_L1000_test_preds_simplenn.csv'))
# In[16]:
df_cp_L1000_simplenn_test.shape
# In[17]:
df_cp_tst_targets = df_cp_test[df_cp_cnn_test.columns]
df_L1000_tst_targets = df_L1000_test[df_L1000_cnn_test.columns]
df_cp_L1000_tst_targets = df_cp_L1000_test[df_cp_L1000_cnn_test.columns]
# In[18]:
df_cp_tst_targets.shape
# In[19]:
df_L1000_tst_targets.shape
# In[20]:
df_cp_L1000_tst_targets.shape
# #### - Resnet, 1d-cnn, Tabnet, Simplenn --> 4 model predictions
# In[21]:
# CPMP's logloss from https://www.kaggle.com/c/lish-moa/discussion/183010
def log_loss_numpy(y_true, y_pred):
y_true_ravel = np.asarray(y_true).ravel()
y_pred = np.asarray(y_pred).ravel()
y_pred = np.clip(y_pred, 1e-15, 1 - 1e-15)
loss = np.where(y_true_ravel == 1, - np.log(y_pred), - np.log(1 - y_pred))
return loss.mean()
def func_numpy_metric(weights, oof, y_true):
oof_blend = np.tensordot(weights, oof, axes = ((0), (0)))
return log_loss_numpy(y_true, oof_blend)
def grad_func(weights, oof, y_true):
oof_clip = np.clip(oof, 1e-15, 1 - 1e-15)
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
@njit
def grad_func_jit(weights, oof, y_true):
oof_clip = np.minimum(1 - 1e-15, np.maximum(oof, 1e-15))
gradients = np.zeros(oof.shape[0])
for i in range(oof.shape[0]):
a, b, c = y_true, oof_clip[i], np.zeros((oof.shape[1], oof.shape[2]))
for j in range(oof.shape[0]):
if j != i:
c += weights[j] * oof_clip[j]
gradients[i] = -np.mean((-a*b+(b**2)*weights[i]+b*c)/((b**2)*(weights[i]**2)+2*b*c*weights[i]-b*weights[i]+(c**2)-c))
return gradients
# In[22]:
cp_model_preds = [df_cp_cnn_test, df_cp_resnet_test, df_cp_tabnet_test, df_cp_simplenn_test]
L1000_model_preds = [df_L1000_cnn_test, df_L1000_resnet_test, df_L1000_tabnet_test, df_L1000_simplenn_test]
cp_L1000_model_preds = [df_cp_L1000_cnn_test, df_cp_L1000_resnet_test, df_cp_L1000_tabnet_test, df_cp_L1000_simplenn_test]
# In[23]:
models_name = ['1d-Cnn', 'Resnet', 'Tabnet', 'SimpleNN']
def get_optmized_blended_weights(model_oofs, df_targets, num_of_models = 4, models_name = models_name):
"""
This function assign weights to each of the models used in predicting MOAs based on the log-loss obtained
when comparing each model prediction results with the actual MOA (Mechanism of actions) test labels.
for more info:https://www.kaggle.com/gogo827jz/optimise-blending-weights-with-bonus-0/notebook
"""
model_oof_preds = np.zeros((num_of_models, df_targets.shape[0], df_targets.shape[1]))
for idx in range(num_of_models):
model_oof_preds[idx] = model_oofs[idx].values
score_oof = log_loss_numpy(df_targets, model_oof_preds[idx])
print(f'{idx} {models_name[idx]}, Test loss:\t', score_oof)
tol = 1e-10
init_guess = [1 / model_oof_preds.shape[0]] * model_oof_preds.shape[0]
bnds = [(0, 1) for _ in range(model_oof_preds.shape[0])]
cons = {
'type': 'eq',
'fun': lambda x: np.sum(x) - 1,
'jac': lambda x: [1] * len(x)
}
print('Inital Blend OOF:', func_numpy_metric(init_guess, model_oof_preds, df_targets.values))
start_time = time()
res_scipy = minimize(fun = func_numpy_metric, x0 = init_guess,
args=(model_oof_preds, df_targets.values),
method = 'SLSQP', ##L-BFGS-B ##SLSQP
jac = grad_func_jit, # grad_func
bounds = bnds, constraints = cons, tol = tol)
print(f'[{str(datetime.timedelta(seconds = time() - start_time))[2:7]}] Optimised Blend OOF:', res_scipy.fun)
print('Optimised Weights:', res_scipy.x)
return model_oof_preds, res_scipy.x
# In[24]:
_, L1000_model_weights = get_optmized_blended_weights(L1000_model_preds, df_L1000_tst_targets,)
# In[25]:
_, cp_model_weights = get_optmized_blended_weights(cp_model_preds, df_cp_tst_targets,)
# In[26]:
_, cp_L1000_model_weights = get_optmized_blended_weights(cp_L1000_model_preds, df_cp_L1000_tst_targets)
# In[27]:
def model_eval_results(df_tst, df_tst_y, df_preds):
"""
This function prints out the model evaluation results from the train and test predictions.
The evaluation metrics used in assessing the performance of the models are: ROC AUC score,
log loss and Precision-Recall AUC score
"""
eval_metrics = ['log loss', 'ROC AUC score', 'PR-AUC/Average_precision_score',]
moa_class_list = df_tst['moa'].unique()
val_moas = [moa for moa_list in moa_class_list for moa in moa_list.split('|')]
print('-' * 10, 'Test data prediction results', '-' * 10)
print(f'{eval_metrics[0]}:', log_loss(np.ravel(df_tst_y), np.ravel(df_preds)))
print(f'{eval_metrics[1]}:', roc_auc_score(df_tst_y[val_moas],df_preds[val_moas], average='macro'))
print(f'{eval_metrics[2]}:', average_precision_score(df_tst_y[val_moas], df_preds[val_moas], average="micro"))
# In[28]:
##[1.57502187e-01,1.15142271e-16,0.00000000e+00,8.42497813e-01] <-- modify the model weights
df_L1000_blend = pd.DataFrame(np.zeros(df_L1000_cnn_test.shape), columns = df_L1000_cnn_test.columns)
df_L1000_blend = df_L1000_cnn_test*0.45 + df_L1000_resnet_test*0.05 + df_L1000_tabnet_test*0.05 + df_L1000_simplenn_test*0.45
# In[29]:
0.45+(0.05*2)+0.45
# In[30]:
model_eval_results(df_L1000_test, df_L1000_tst_targets, df_L1000_blend)
# In[31]:
##[4.29598527e-01 3.27312317e-01 2.43089156e-01 5.42101086e-18] <-- modify the model weights
df_cp_blend = pd.DataFrame(np.zeros(df_cp_cnn_test.shape), columns = df_cp_cnn_test.columns)
df_cp_blend = df_cp_cnn_test*0.35 + df_cp_resnet_test*0.35 + df_cp_tabnet_test*0.25 + df_cp_simplenn_test*0.05
# In[32]:
0.35+0.35+0.25+0.05
# In[33]:
model_eval_results(df_cp_test, df_cp_tst_targets, df_cp_blend)
# In[34]:
##[0.28574384 0.09796798 0.06528908 0.5509991 ] <-- modify the model weights
df_cp_L1000_blend = pd.DataFrame(np.zeros(df_cp_L1000_cnn_test.shape), columns = df_cp_L1000_cnn_test.columns)
df_cp_L1000_blend = df_cp_L1000_cnn_test*0.30 + df_cp_L1000_resnet_test*0.20 + df_cp_L1000_tabnet_test*0.15 + df_cp_L1000_simplenn_test*0.35
# In[35]:
0.30+0.20+0.15+0.35
# In[36]:
model_eval_results(df_cp_L1000_test, df_cp_L1000_tst_targets, df_cp_L1000_blend)
# In[37]:
def save_to_csv(df, path, file_name, compress=None):
"""save dataframes to csv"""
if not os.path.exists(path):
os.mkdir(path)
df.to_csv(os.path.join(path, file_name), index=False, compression=compress)
# In[38]:
save_to_csv(df_cp_blend, model_preds_dir, f'cp_test_preds_blend{file_indicator}.csv')
save_to_csv(df_L1000_blend, model_preds_dir, 'L1000_test_preds_blend.csv')
save_to_csv(df_cp_L1000_blend, model_preds_dir, 'cp_L1000_test_preds_blend.csv')
|
"""
Base module for pyltr.
We import all packages since it's not particularly expensive.
"""
from . import data
from . import metrics
from . import models
from . import util
|
# -*- coding: utf-8 -*-
""" Sahana Eden Evacuees Registry Model
@copyright: 2015-2017 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3EVRCaseModel",
"evr_rheader",
"evr_AddGroupMembers",
)
from gluon import *
from ..s3 import *
# =============================================================================
class S3EVRCaseModel(S3Model):
names = ("evr_case",
"evr_medical_details",
)
def model(self):
T = current.T
settings = current.deployment_settings
define_table = self.define_table
person_id = self.pr_person_id
# ---------------------------------------------------------------------
# Case Data
#
enable_evr_organisation = settings.get_evr_link_to_organisation()
organisation_label = settings.get_hrm_organisation_label()
org_organisation_represent = self.org_OrganisationRepresent()
org_widget = S3HierarchyWidget(lookup="org_organisation",
represent=org_organisation_represent,
multiple=False,
leafonly=False,)
tablename = "evr_case"
define_table(tablename,
person_id(ondelete = "CASCADE"),
self.org_organisation_id(
empty = not settings.get_hrm_org_required(),
label = organisation_label,
requires = self.org_organisation_requires(required=True),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Designed Organisation"),
T("Organisation designed to take care of evacuee"))),
widget = org_widget,
readable = enable_evr_organisation,
writable = enable_evr_organisation,
),
Field("fiscal_code", "string", length=16,
label = T("Fiscal Code"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Fiscal Code"),
T("Insert the fiscal code with no spaces")
)
),
),
s3_comments(),
*s3_meta_fields())
# If fiscal code is present, it's unique
# fiscal_code = db.evr_case.fiscal_code
# fiscal_code.requires = IS_EMPTY_OR(
# IS_NOT_IN_DB(db(db.evr_case.deleted != True),
# fiscal_code),
# null=''
# )
self.configure(tablename,
onaccept = self.evr_case_onaccept,
)
# ---------------------------------------------------------------------
# Medical Details
#
# @todo: use string-codes for option fields for better
# maintainability/interoperability
#
evr_therapy_opts = {1: T("Vital Long-Term Medication"),
2: T("Dialysis"),
3: T("Chronic Oxygen Supply"),
4: T("Intermittend Ventilator Support"),
5: T("Ventilator Dependend"),
6: T("Cardiac Assist Device"),
}
evr_allergy_opts = {1: T("Drug"),
2: T("Food"),
3: T("Olive Tree"),
4: T("Grass"),
5: T("Dust"),
6: T("Other"),
}
evr_disability_opts = {1: T("Visually Impaired"),
2: T("Blind"),
3: T("Hearing-Impaired"),
4: T("Deaf"),
5: T("Deaf-Mute"),
6: T("Deaf-Blind"),
7: T("Aphasic"),
8: T("Mobility-Impaired"),
9: T("Paralysed"),
10: T("Amputated"),
11: T("Other Physical Disability"),
12: T("Mentally Disabled"),
}
evr_aids_appliances_opts = {1: ("Guide Dog"),
2: ("Wheelchair"),
3: ("Walking stick"),
4: ("Crutch"),
5: ("Tripod"),
6: ("Artificial limb"),
7: ("Catheter"),
8: ("Sanity Napkin"),
}
def med_multiopt_field(fieldname, options, label=None):
""" Simple generator for option fields """
return Field(fieldname, "list:integer",
label = label,
represent = S3Represent(options = options,
multiple = True),
requires = IS_IN_SET(options, multiple = True),
widget = S3MultiSelectWidget(filter = False,
selectedList = 3,
noneSelectedText = "Select",
)
)
evr_source_opts = {1: "Self",
2: "Mother",
3: "Father",
4: "Uncle",
5: "Grandfather",
6: "Grandmother",
7: "Official",
8: "Attendant",
9: "Neighbour",
10: "Teacher",
11: "Priest",
12: "Other",
}
tablename = "evr_medical_details"
define_table(tablename,
person_id(),
med_multiopt_field("therapy",
evr_therapy_opts,
label = T("Therapy"),
),
Field("therapy_comment"),
Field("pregnancy", "boolean",
label = T("Pregnancy"),
),
med_multiopt_field("allergy",
evr_allergy_opts,
label = T("Allergies"),
),
Field("diet",
label = T("Food intolerance"),
),
med_multiopt_field("disability",
evr_disability_opts,
label = T("Disabilities"),
),
Field("self_sufficient", "boolean",
label = T("Self-Sufficient"),
),
med_multiopt_field("aids_appliances",
evr_aids_appliances_opts,
label = T("Aids and Appliances"),
),
Field("declared_by_name",
label = T("Declared by (Name)"),
),
Field("declared_by_relationship", "integer",
label = T("Declared by (Relationship)"),
represent=S3Represent(options=evr_source_opts),
requires = IS_IN_SET(evr_source_opts,
zero=None),
),
Field("declared_by_phone",
label = T("Declared by (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("declared_by_email",
label = T("Declared by (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
Field("has_attendant", "boolean",
label = T("Has Attendant"),
),
Field("attendant_name",
label = T("Attendant (Name)"),
),
Field("attendant_phone",
label = T("Attendant (Phone)"),
requires = IS_NULL_OR(IS_PHONE_NUMBER()),
),
Field("attendant_email",
label = T("Attendant (Email)"),
requires = IS_NULL_OR(IS_EMAIL()),
),
s3_comments(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Socio-economic Background
#
tablename = "evr_background"
define_table(tablename,
person_id(),
Field("legal_measure",
label = T("Legal measure / Home warrant"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Legal measure / Home warrant"),
T("Evacuee subject to special or legal measures/penalities")
)
),
),
Field("diet_restrictions",
label = T("Food Restrictions")
),
Field("social_welfare",
label = T("Social Welfare"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Social Welfare"),
T("Evacuee subject to Social Welfare")
)
),
),
Field("interpreter",
label = T("Interpreter / Cultural Mediator Required"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Interpreter / Cultural Mediator"),
T("Specific language interpreter and/or cultural mediator required")
)
),
),
Field("home_help", "boolean",
label = T("Home Help"),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Home Help"),
T("Evacuee requiring dedicated assistance at home")
)
),
),
Field("distance_from_shelter", "integer",
label = T("Working Distance from Shelter (km)")
),
Field("job_lost_by_event", "boolean",
label = T("Job lost by event")
),
Field("domestic_animal", "boolean",
label = T("With Domestic Animals")
),
Field("car_available", "boolean",
label = T("Car available")
),
s3_comments(),
*s3_meta_fields())
# -------------------------------------------------------------------------
@staticmethod
def evr_case_onaccept(form):
"""
After DB I/O, check the correctness of fiscal code (ITALY)
@ToDo: The function should be made a deployment_setting when anyone else wishes to use this module
"""
# Initialization
fiscal_code = form.vars.fiscal_code
if fiscal_code == "" or fiscal_code == None:
return
fiscal_code = fiscal_code.upper()
MALE = 3
CONSONANTS = "BCDFGHJKLMNPQRSTVWXYZ"
VOWELS = "AEIOU"
MONTHS = "ABCDEHLMPRST"
T = current.T
ptable = current.s3db.pr_person
query = (form.vars.person_id == ptable.id)
row = current.db(query).select(ptable.first_name,
ptable.last_name,
ptable.date_of_birth,
ptable.gender,
limitby = (0, 1)
).first()
name = row.first_name.upper()
surname = row.last_name.upper()
date_of_birth = row.date_of_birth
year = date_of_birth.year
month = date_of_birth.month
day = date_of_birth.day
gender = row.gender
# Check surname
cons = ""
for c in surname:
if c in CONSONANTS:
cons += c
vow = ""
for c in surname:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[:3] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
# Check name
cons = ""
for c in name:
if c in CONSONANTS:
cons += c
if len(cons) > 3:
chars = cons[0] + cons[2] + cons[3]
else:
vow = ""
for c in name:
if c in VOWELS:
vow += c
chars = cons + vow
if len(chars) < 3:
chars += ["X", "X"]
if fiscal_code[3:6] != chars[0:3].upper():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
# Check date of birth and gender
year = str(year)[2:4] # Convert to string and take only the last two elements
if fiscal_code[6:8] != year or \
fiscal_code[8] != MONTHS[month - 1]:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
if gender == MALE:
birthday_in_cf = fiscal_code[9:11]
if not birthday_in_cf.isdigit():
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else:
birthday_in_cf = int(birthday_in_cf)
if birthday_in_cf != day:
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
else: # if gender == FEMALE
if fiscal_code[9:11] != str(day + 40):
current.response.warning = T("Warning: fiscal code isn't \
consistent with personal data")
return
return
# =============================================================================
def evr_rheader(r):
"""
EVR Resource Headers
@param r: the S3Request
"""
T = current.T
settings = current.deployment_settings
if r.representation != "html" or not r.record:
return None
resourcename = r.name
rheader_fields = None
if resourcename == "person":
tabs = [(T("Person"), None),
(T("Addresses"), "address"),
(T("Contact Data"), "contacts"),
(T("Groups"), "group_membership"),
# these can be hidden since inline in the main form,
# but can enabled to verify the functionality:
#(T("Identity Documents"), "identity"),
#(T("Case Details"), "case"),
(T("Images"), "image"),
(T("Medical Information"), "medical_details"),
(T("Socio-Economic Background"), "background"),
]
if settings.get_evr_show_physical_description():
tabs.append((T("Physical Description"), "physical_description"))
if settings.has_module("cr"):
tabs.append((T("Shelter Registration"), "shelter_registration"))
rheader_fields = [["first_name", "last_name"],
["date_of_birth"],
]
# Show profile picture in rheader
itable = current.s3db.pr_image
query = (itable.pe_id == r.record.pe_id) & \
(itable.profile == True)
image = current.db(query).select(itable.image,
limitby=(0, 1)).first()
if image:
image = itable.image.represent(image.image)
else:
image = A(IMG(_src=URL(c="static", f="img", args="blank-user.gif"),
_height=60,
_title=T("No image available")),
_class="th",
_href=URL(f="person", args=[r.id, "image", "create"]),
)
return DIV(DIV(image, _style="float:left"),
S3ResourceHeader(rheader_fields, tabs)(r))
elif resourcename == "group":
tabs = [("Group Details", None),
(T("Contact Data"), "contact"),
(T("Members"), "group_membership"),
]
# Show "Add Members" tab only when we action it explicitly
# (=> from action-button in the group members list)
if r.method == "add_members":
tabs.append((T("Add Members"), "add_members"))
rheader_fields = [["name"],
["description"],
]
return S3ResourceHeader(rheader_fields, tabs)(r)
return None
# =============================================================================
class evr_AddGroupMembers(S3Method):
"""
Custom method to select multiple persons from a filtered list
and add them to a group
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point for REST controller
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
# Add button "Add Members" to members tab
if r.http in ("GET", "POST"):
if r.representation == "html" and r.id or \
r.representation == "aadata":
return self.add_members(r, **attr)
else:
r.error(415, current.ERROR.BAD_FORMAT)
else:
r.error(405, current.ERROR.BAD_METHOD)
# -------------------------------------------------------------------------
def add_members(self, r, **attr):
"""
Add-members action: renders a filtered multi-select datatable
form, and creates group_memberships on POST
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
T = current.T
db = current.db
s3db = current.s3db
unaffiliated = ((S3FieldSelector("group_membership.id") == None) & \
(S3FieldSelector("case.id") != None))
if r.http == "POST":
# Form submission
group_id = r.id
added = 0
post_vars = r.post_vars
if all([name in post_vars
for name in ("add", "selected", "mode")]):
# Get selection
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
# Handle exclusion filter
if post_vars.mode == "Exclusive":
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.filterURL)
else:
filters = None
query = unaffiliated & \
(~(S3FieldSelector("id").belongs(selected)))
resource = s3db.resource("pr_person",
filter=query,
vars=filters)
rows = resource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
# Avoid duplicates
gtable = s3db.pr_group_membership
query = (gtable.group_id == group_id) & \
(gtable.person_id.belongs(selected)) & \
(gtable.deleted != True)
rows = db(query).select(gtable.person_id)
skip = set(row.person_id for row in rows)
# Add new group members
for record_id in selected:
try:
person_id = int(record_id.strip())
except ValueError:
continue
if person_id in skip:
continue
gtable.insert(group_id = group_id,
person_id = person_id,
)
added += 1
# Confirmation message (in session because we redirect)
session = current.session
if not selected:
session.warning = T("No Persons Selected!")
else:
session.confirmation = T("%(number)s Members added to Group") % \
dict(number=added)
# Go back to list of existing group members
redirect(r.url(method = "",
id = group_id,
component = "group_membership"))
else:
resource = s3db.resource("pr_person", vars=r.get_vars)
resource.add_filter(unaffiliated)
get_config = resource.get_config
# Filter widgets
filter_widgets = get_config("filter_widgets", [])
filter_widgets.append(S3DateFilter("created_on",
label = T("Registered on"),
)
)
# List fields
list_fields = ["id",
"first_name",
"last_name",
"gender",
"date_of_birth",
]
response = current.response
# Data table boundaries
get_vars = self.request.get_vars
if "displayStart" in get_vars:
start = int(get_vars["displayStart"])
else:
start = None
if "pageLength" in get_vars:
display_length = int(get_vars["pageLength"])
else:
display_length = response.s3.ROWSPERPAGE
limit = 4 * display_length
# Apply datatable filter and sorting
totalrows = resource.count()
filter, orderby, left = resource.datatable_filter(list_fields, get_vars)
if not orderby:
# Most recently created records on top
orderby = "pr_person.created_on desc"
resource.add_filter(filter)
# Retrieve the data
data = resource.select(list_fields,
start=start,
limit=limit,
orderby=orderby,
left=left,
count=True,
represent=True)
filteredrows = data["numrows"]
# Generate the datatable
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
# Bulk Action
dt_bulk_actions = [(T("Add as Group Members"), "add")]
if r.representation == "html":
# Page load
# Custom open-button, no delete-option
resource.configure(deletable = False)
open_url = URL(f = "person", args = ["[id]"])
S3CRUD.action_buttons(r,
deletable = False,
read_url = open_url,
update_url = open_url)
# Need no export formats (as this is a form)
response.s3.no_formats = True
# Data table (items)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url=URL(c="evr",
f="group",
args=["add_members"],
vars={},
extension="aadata",
),
dt_bulk_actions=dt_bulk_actions,
dt_pageLength=display_length,
dt_pagination="true",
dt_searching="false",
)
resource.configure(deletable = False)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
_vars = resource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="person",
args=["filter.options"],
vars={},
)
# Define filter form
filter_clear = get_config("filter_clear", True)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear=filter_clear,
submit=filter_submit,
ajax=True,
url=filter_submit_url,
ajaxurl=filter_ajax_url,
_class="filter-form",
_id="datatable-filter-form",
)
# Render filter form
fresource = s3db.resource(resource.tablename)
alias = resource.alias if r.component else None
ff = filter_form.html(fresource,
r.get_vars,
target="datatable",
alias=alias)
else:
ff = ""
output = dict(items = items,
title = T("Add Members to Group"),
addheader = "%s:" % T("Select People to add them to the Group"),
list_filter_form = ff,
)
response.view = "list_filter.html"
return output
else:
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions,
)
response.headers["Content-Type"] = "application/json"
return items
# END =========================================================================
|
"""
Django settings for rindr project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'type',
'ticket',
'django_bootstrap5',
'jquery',
'dashboard',
'mathfilters',
'BI'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'rindr.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'rindr.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
#}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'rindr',
'USER': 'rindr',
'PASSWORD': 'freya',
'HOST': '10.100.102.161',
'PORT': ''
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGIN_URL="/login"
|
"""The helper module for the flip interfaces.
"""
from enum import Enum
from apysc._type.boolean import Boolean
class Axis(Enum):
X = 'x'
Y = 'y'
def make_flip_update_expression(
*, before_value: Boolean, after_value: Boolean,
axis: Axis, interface_variable_name: str) -> str:
"""
Make a flipping value updating expression.
Parameters
----------
before_value : Boolean
Before updating flipping value.
after_value : Boolean
After updating flipping value.
axis : Axis
X or y axis value.
interface_variable_name : str
Interface instance variable name.
Returns
-------
expression : str
Made expression string.
"""
from apysc._type import value_util
before_value_str: str = value_util.get_value_str_for_expression(
value=before_value)
after_value_str: str = value_util.get_value_str_for_expression(
value=after_value)
expression: str = (
f'if ({before_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\nif ({after_value_str}) {{'
f'\n {interface_variable_name}.flip("{axis.value}");'
'\n}'
f'\n{before_value_str} = {after_value_str};'
)
return expression
|
from django.contrib import admin
from .models import Meeting, MeetingMinutes, Resource, Event
# Register your models here.
admin.site.register(Meeting)
admin.site.register(MeetingMinutes)
admin.site.register(Resource)
admin.site.register(Event)
|
# flake8: noqa E501
from hmda.models.hmda_data_file import HmdaDataFile
# Access this using HMDA_DATA_FILES[geo][field_descriptions][records]
LOAN_FILE_METADATA = {
'nationwide': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7036352', '482.83 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7201366', '453.04 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5986659', '369.82 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6113423', '485.63 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '4832425', '323.43 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5526941', '330.65 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7783986', '467.08 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '5946435', '399.41 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '6764902', '455.07 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '7126202', '492.02 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_labels.zip', '8298882', '573.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_labels.zip', '16332987', '1.2 GB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_labels.zip', '26605695', '1.72 GB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_labels.zip', '14285496', '986 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_labels.zip', '14374184', '1.21 GB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_labels.zip', '12049341', '862.92 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_labels.zip', '17391570', '1.06 GB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_labels.zip', '19493491', '1.29 GB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_labels.zip', '14873415', '1.08 GB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_labels.zip', '16348557', '1.19 GB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_labels.zip', '17016159', '1.27 GB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_labels.zip', '18691551', '1.4 GB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_labels.zip', '8377907', '457.12 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_labels.zip', '10441545', '528.7 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_labels.zip', '7339057', '247.2 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_labels.zip', '7404258', '461.08 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_labels.zip', '6039826', '331.36 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_labels.zip', '7177262', '360.36 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_labels.zip', '8950936', '416.67 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_labels.zip', '7095262', '381.6 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_labels.zip', '7863337', '419.27 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_labels.zip', '8706657', '476.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_labels.zip', '9783966', '529.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7036352', '165.84 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7201366', '141.73 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5986659', '77.47 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6113423', '144.37 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '4787867', '113.54 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5526941', '107.3 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7783986', '140.57 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '5946435', '132 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '6764902', '149.36 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '7126202', '166.47 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_first-lien-owner-occupied-1-4-family-records_codes.zip', '8298882', '189.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_all-records_codes.zip', '16332987', '384.11 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_all-records_codes.zip', '26605695', '461.15 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_all-records_codes.zip', '14285496', '182.02 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_all-records_codes.zip', '14374184', '337.27 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_all-records_codes.zip', '11875464', '278.4 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_all-records_codes.zip', '17391570', '309.22 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_all-records_codes.zip', '19493491', '331.31 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_all-records_codes.zip', '14873415', '335.22 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_all-records_codes.zip', '16348557', '367.78 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_all-records_codes.zip', '17016159', '400.19 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_all-records_codes.zip', '18691551', '434.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nationwide_originated-records_codes.zip', '8377907', '196.62 MB'),
'2007': HmdaDataFile('hmda_2007_nationwide_originated-records_codes.zip', '10441545', '199.55 MB'),
'2017': HmdaDataFile('hmda_2017_nationwide_originated-records_codes.zip', '7339057', '94.95 MB'),
'2015': HmdaDataFile('hmda_2015_nationwide_originated-records_codes.zip', '7404258', '173.96 MB'),
'2014': HmdaDataFile('hmda_2014_nationwide_originated-records_codes.zip', '5979766', '140.82 MB'),
'2008': HmdaDataFile('hmda_2008_nationwide_originated-records_codes.zip', '7177262', '137.98 MB'),
'2009': HmdaDataFile('hmda_2009_nationwide_originated-records_codes.zip', '8950936', '162.04 MB'),
'2011': HmdaDataFile('hmda_2011_nationwide_originated-records_codes.zip', '7095262', '157.4 MB'),
'2010': HmdaDataFile('hmda_2010_nationwide_originated-records_codes.zip', '7863337', '173.45 MB'),
'2013': HmdaDataFile('hmda_2013_nationwide_originated-records_codes.zip', '8706657', '203.33 MB'),
'2012': HmdaDataFile('hmda_2012_nationwide_originated-records_codes.zip', '9783966', '224.11 MB')
}
}
},
'va': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '216152', '11.27 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228323', '11.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '175737', '5.58 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '191048', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '147744', '7.63 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '186185', '8.91 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '273787', '12.14 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '205670', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '228664', '11.48 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '244599', '12.77 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_labels.zip', '288436', '14.9 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_labels.zip', '494057', '27.35 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_labels.zip', '784919', '38.17 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_labels.zip', '411507', '14.48 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_labels.zip', '445447', '27.97 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_labels.zip', '365572', '20.34 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_labels.zip', '539572', '26.49 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_labels.zip', '637212', '29.34 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_labels.zip', '482943', '27.06 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_labels.zip', '517819', '28.54 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_labels.zip', '563167', '31.8 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_labels.zip', '634102', '35.59 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_labels.zip', '252237', '13.42 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_labels.zip', '327766', '16.15 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_labels.zip', '211218', '6.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_labels.zip', '227837', '13.49 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_labels.zip', '183729', '9.73 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_labels.zip', '234813', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_labels.zip', '308658', '13.99 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_labels.zip', '239310', '12.46 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_labels.zip', '260214', '13.38 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_labels.zip', '294145', '15.54 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_labels.zip', '334770', '17.51 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '216152', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228323', '7.71 MB'),
'2017': HmdaDataFile('hmda_2017_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '175737', '3.89 MB'),
'2015': HmdaDataFile('hmda_2015_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '191048', '7.65 MB'),
'2014': HmdaDataFile('hmda_2014_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '147744', '5.26 MB'),
'2008': HmdaDataFile('hmda_2008_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '186185', '6.21 MB'),
'2009': HmdaDataFile('hmda_2009_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '273787', '8.61 MB'),
'2011': HmdaDataFile('hmda_2011_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '205670', '7.08 MB'),
'2010': HmdaDataFile('hmda_2010_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '228664', '7.72 MB'),
'2013': HmdaDataFile('hmda_2013_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '244599', '8.81 MB'),
'2012': HmdaDataFile('hmda_2012_va_first-lien-owner-occupied-1-4-family-records_codes.zip', '288436', '10.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_va_all-records_codes.zip', '494057', '18.2 MB'),
'2007': HmdaDataFile('hmda_2007_va_all-records_codes.zip', '784919', '25.76 MB'),
'2017': HmdaDataFile('hmda_2017_va_all-records_codes.zip', '411507', '9.31 MB'),
'2015': HmdaDataFile('hmda_2015_va_all-records_codes.zip', '445447', '18.51 MB'),
'2014': HmdaDataFile('hmda_2014_va_all-records_codes.zip', '365572', '13.57 MB'),
'2008': HmdaDataFile('hmda_2008_va_all-records_codes.zip', '539572', '17.99 MB'),
'2009': HmdaDataFile('hmda_2009_va_all-records_codes.zip', '637212', '20.26 MB'),
'2011': HmdaDataFile('hmda_2011_va_all-records_codes.zip', '482943', '17.83 MB'),
'2010': HmdaDataFile('hmda_2010_va_all-records_codes.zip', '517819', '18.81 MB'),
'2013': HmdaDataFile('hmda_2013_va_all-records_codes.zip', '563167', '21.36 MB'),
'2012': HmdaDataFile('hmda_2012_va_all-records_codes.zip', '634102', '23.86 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_va_originated-records_codes.zip', '252237', '9.19 MB'),
'2007': HmdaDataFile('hmda_2007_va_originated-records_codes.zip', '327766', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_va_originated-records_codes.zip', '211218', '4.75 MB'),
'2015': HmdaDataFile('hmda_2015_va_originated-records_codes.zip', '227837', '9.22 MB'),
'2014': HmdaDataFile('hmda_2014_va_originated-records_codes.zip', '183729', '6.67 MB'),
'2008': HmdaDataFile('hmda_2008_va_originated-records_codes.zip', '234813', '7.98 MB'),
'2009': HmdaDataFile('hmda_2009_va_originated-records_codes.zip', '308658', '9.9 MB'),
'2011': HmdaDataFile('hmda_2011_va_originated-records_codes.zip', '239310', '8.34 MB'),
'2010': HmdaDataFile('hmda_2010_va_originated-records_codes.zip', '260214', '8.98 MB'),
'2013': HmdaDataFile('hmda_2013_va_originated-records_codes.zip', '294145', '10.63 MB'),
'2012': HmdaDataFile('hmda_2012_va_originated-records_codes.zip', '334770', '11.93 MB')
}
}
},
'co': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '228866', '11.12 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '144805', '6.71 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '182654', '5.82 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '194123', '10.51 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '139220', '6.92 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '128542', '5.81 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '209511', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '149880', '7.07 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '180911', '8.7 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '192627', '9.59 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_labels.zip', '222498', '10.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_labels.zip', '483436', '25.25 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_labels.zip', '537363', '25.41 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_labels.zip', '404517', '14.76 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_labels.zip', '409511', '23.35 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_labels.zip', '313445', '16.62 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_labels.zip', '370468', '17.59 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_labels.zip', '492317', '21.47 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_labels.zip', '366969', '18.87 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_labels.zip', '413027', '21.58 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_labels.zip', '427952', '22.81 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_labels.zip', '474846', '24.96 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_labels.zip', '263402', '13.07 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_labels.zip', '218842', '10.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_labels.zip', '216848', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_labels.zip', '227578', '12.54 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_labels.zip', '169959', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_labels.zip', '162244', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_labels.zip', '236219', '9.99 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_labels.zip', '179323', '8.69 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_labels.zip', '207951', '10.35 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_labels.zip', '235157', '11.95 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_labels.zip', '263229', '13.2 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '228866', '7.81 MB'),
'2007': HmdaDataFile('hmda_2007_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '144805', '4.75 MB'),
'2017': HmdaDataFile('hmda_2017_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '182654', '4.07 MB'),
'2015': HmdaDataFile('hmda_2015_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '194123', '7.52 MB'),
'2014': HmdaDataFile('hmda_2014_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '139220', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '128542', '4.11 MB'),
'2009': HmdaDataFile('hmda_2009_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '209511', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '149880', '4.88 MB'),
'2010': HmdaDataFile('hmda_2010_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '180911', '6.03 MB'),
'2013': HmdaDataFile('hmda_2013_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '192627', '6.74 MB'),
'2012': HmdaDataFile('hmda_2012_co_first-lien-owner-occupied-1-4-family-records_codes.zip', '222498', '7.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_co_all-records_codes.zip', '483436', '17.2 MB'),
'2007': HmdaDataFile('hmda_2007_co_all-records_codes.zip', '537363', '17.72 MB'),
'2017': HmdaDataFile('hmda_2017_co_all-records_codes.zip', '404517', '9.9 MB'),
'2015': HmdaDataFile('hmda_2015_co_all-records_codes.zip', '409511', '16.01 MB'),
'2014': HmdaDataFile('hmda_2014_co_all-records_codes.zip', '313445', '11.43 MB'),
'2008': HmdaDataFile('hmda_2008_co_all-records_codes.zip', '370468', '12.3 MB'),
'2009': HmdaDataFile('hmda_2009_co_all-records_codes.zip', '492317', '15.15 MB'),
'2011': HmdaDataFile('hmda_2011_co_all-records_codes.zip', '366969', '12.63 MB'),
'2010': HmdaDataFile('hmda_2010_co_all-records_codes.zip', '413027', '14.47 MB'),
'2013': HmdaDataFile('hmda_2013_co_all-records_codes.zip', '427952', '15.58 MB'),
'2012': HmdaDataFile('hmda_2012_co_all-records_codes.zip', '474846', '16.87 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_co_originated-records_codes.zip', '263402', '9.15 MB'),
'2007': HmdaDataFile('hmda_2007_co_originated-records_codes.zip', '218842', '7.18 MB'),
'2017': HmdaDataFile('hmda_2017_co_originated-records_codes.zip', '216848', '4.99 MB'),
'2015': HmdaDataFile('hmda_2015_co_originated-records_codes.zip', '227578', '8.91 MB'),
'2014': HmdaDataFile('hmda_2014_co_originated-records_codes.zip', '169959', '6.05 MB'),
'2008': HmdaDataFile('hmda_2008_co_originated-records_codes.zip', '162244', '5.23 MB'),
'2009': HmdaDataFile('hmda_2009_co_originated-records_codes.zip', '236219', '7.16 MB'),
'2011': HmdaDataFile('hmda_2011_co_originated-records_codes.zip', '179323', '5.95 MB'),
'2010': HmdaDataFile('hmda_2010_co_originated-records_codes.zip', '207951', '7.15 MB'),
'2013': HmdaDataFile('hmda_2013_co_originated-records_codes.zip', '235157', '8.37 MB'),
'2012': HmdaDataFile('hmda_2012_co_originated-records_codes.zip', '263229', '9.16 MB')
}
}
},
'vi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '15', '1.55 KB'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_labels.zip', '0', '647 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_labels.zip', '0', '581 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_labels.zip', '0', '581 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_labels.zip', '47', '2.39 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_labels.zip', '0', '581 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_labels.zip', '0', '581 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_labels.zip', '0', '581 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_labels.zip', '0', '581 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_labels.zip', '0', '581 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_labels.zip', '0', '581 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_labels.zip', '0', '581 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_labels.zip', '0', '581 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_labels.zip', '0', '595 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_labels.zip', '0', '595 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_labels.zip', '23', '1.73 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_labels.zip', '0', '595 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_labels.zip', '0', '595 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_labels.zip', '0', '595 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_labels.zip', '0', '595 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_labels.zip', '0', '595 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_labels.zip', '0', '595 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_labels.zip', '0', '595 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_labels.zip', '0', '595 bytes')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '15', '940 bytes'),
'2015': HmdaDataFile('hmda_2015_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_first-lien-owner-occupied-1-4-family-records_codes.zip', '0', '540 bytes')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vi_all-records_codes.zip', '0', '474 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_all-records_codes.zip', '0', '474 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_all-records_codes.zip', '47', '1.37 KB'),
'2015': HmdaDataFile('hmda_2015_vi_all-records_codes.zip', '0', '474 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_all-records_codes.zip', '0', '474 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_all-records_codes.zip', '0', '474 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_all-records_codes.zip', '0', '474 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_all-records_codes.zip', '0', '474 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_all-records_codes.zip', '0', '474 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_all-records_codes.zip', '0', '474 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_all-records_codes.zip', '0', '474 bytes')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vi_originated-records_codes.zip', '0', '488 bytes'),
'2007': HmdaDataFile('hmda_2007_vi_originated-records_codes.zip', '0', '488 bytes'),
'2017': HmdaDataFile('hmda_2017_vi_originated-records_codes.zip', '23', '1.03 KB'),
'2015': HmdaDataFile('hmda_2015_vi_originated-records_codes.zip', '0', '488 bytes'),
'2014': HmdaDataFile('hmda_2014_vi_originated-records_codes.zip', '0', '488 bytes'),
'2008': HmdaDataFile('hmda_2008_vi_originated-records_codes.zip', '0', '488 bytes'),
'2009': HmdaDataFile('hmda_2009_vi_originated-records_codes.zip', '0', '488 bytes'),
'2011': HmdaDataFile('hmda_2011_vi_originated-records_codes.zip', '0', '488 bytes'),
'2010': HmdaDataFile('hmda_2010_vi_originated-records_codes.zip', '0', '488 bytes'),
'2013': HmdaDataFile('hmda_2013_vi_originated-records_codes.zip', '0', '488 bytes'),
'2012': HmdaDataFile('hmda_2012_vi_originated-records_codes.zip', '0', '488 bytes')
}
}
},
'ak': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15356', '685.22 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16758', '627.93 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12579', '350.04 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '14511', '637.91 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '12147', '530.18 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '15576', '536.13 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '23301', '812.74 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '16974', '682.85 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '18636', '750.13 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '17337', '684.52 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_labels.zip', '22064', '872.42 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_labels.zip', '36105', '1.77 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_labels.zip', '48143', '2.05 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_labels.zip', '28632', '904.87 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_labels.zip', '33421', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_labels.zip', '26499', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_labels.zip', '36410', '1.47 MB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_labels.zip', '51821', '2.02 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_labels.zip', '36900', '1.62 MB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_labels.zip', '41203', '1.81 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_labels.zip', '39394', '1.81 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_labels.zip', '46691', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_labels.zip', '17503', '792.19 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_labels.zip', '21167', '801.83 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_labels.zip', '14430', '400.6 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_labels.zip', '16680', '749.38 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_labels.zip', '14272', '633.57 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_labels.zip', '17485', '614.3 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_labels.zip', '24987', '878.94 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_labels.zip', '19236', '785.43 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_labels.zip', '20697', '847.87 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_labels.zip', '20363', '840.47 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_labels.zip', '24887', '1.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15356', '426.06 KB'),
'2007': HmdaDataFile('hmda_2007_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16758', '411.75 KB'),
'2017': HmdaDataFile('hmda_2017_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12579', '240.69 KB'),
'2015': HmdaDataFile('hmda_2015_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '14511', '394.05 KB'),
'2014': HmdaDataFile('hmda_2014_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '12147', '329.2 KB'),
'2008': HmdaDataFile('hmda_2008_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '15576', '364.56 KB'),
'2009': HmdaDataFile('hmda_2009_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '23301', '544.64 KB'),
'2011': HmdaDataFile('hmda_2011_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '16974', '426.2 KB'),
'2010': HmdaDataFile('hmda_2010_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '18636', '464.07 KB'),
'2013': HmdaDataFile('hmda_2013_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '17337', '417.9 KB'),
'2012': HmdaDataFile('hmda_2012_ak_first-lien-owner-occupied-1-4-family-records_codes.zip', '22064', '530.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ak_all-records_codes.zip', '36105', '1.07 MB'),
'2007': HmdaDataFile('hmda_2007_ak_all-records_codes.zip', '48143', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_ak_all-records_codes.zip', '28632', '571.83 KB'),
'2015': HmdaDataFile('hmda_2015_ak_all-records_codes.zip', '33421', '951.9 KB'),
'2014': HmdaDataFile('hmda_2014_ak_all-records_codes.zip', '26499', '778.42 KB'),
'2008': HmdaDataFile('hmda_2008_ak_all-records_codes.zip', '36410', '899.28 KB'),
'2009': HmdaDataFile('hmda_2009_ak_all-records_codes.zip', '51821', '1.29 MB'),
'2011': HmdaDataFile('hmda_2011_ak_all-records_codes.zip', '36900', '992.55 KB'),
'2010': HmdaDataFile('hmda_2010_ak_all-records_codes.zip', '41203', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_ak_all-records_codes.zip', '39394', '1.01 MB'),
'2012': HmdaDataFile('hmda_2012_ak_all-records_codes.zip', '46691', '1.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ak_originated-records_codes.zip', '17503', '490.92 KB'),
'2007': HmdaDataFile('hmda_2007_ak_originated-records_codes.zip', '21167', '527.65 KB'),
'2017': HmdaDataFile('hmda_2017_ak_originated-records_codes.zip', '14430', '275.46 KB'),
'2015': HmdaDataFile('hmda_2015_ak_originated-records_codes.zip', '16680', '458.98 KB'),
'2014': HmdaDataFile('hmda_2014_ak_originated-records_codes.zip', '14272', '392.19 KB'),
'2008': HmdaDataFile('hmda_2008_ak_originated-records_codes.zip', '17485', '413.46 KB'),
'2009': HmdaDataFile('hmda_2009_ak_originated-records_codes.zip', '24987', '583.23 KB'),
'2011': HmdaDataFile('hmda_2011_ak_originated-records_codes.zip', '19236', '488.12 KB'),
'2010': HmdaDataFile('hmda_2010_ak_originated-records_codes.zip', '20697', '518.32 KB'),
'2013': HmdaDataFile('hmda_2013_ak_originated-records_codes.zip', '20363', '514.87 KB'),
'2012': HmdaDataFile('hmda_2012_ak_originated-records_codes.zip', '24887', '617.44 KB')
}
}
},
'al': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '86522', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '106195', '4.88 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '80115', '2.33 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75252', '4.13 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '63808', '3.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.1 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '103427', '4.47 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '75467', '3.54 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '87005', '3.98 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '93236', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_labels.zip', '99531', '4.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_labels.zip', '226918', '11.95 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_labels.zip', '367991', '17.47 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_labels.zip', '209002', '7.08 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_labels.zip', '205039', '11.99 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_labels.zip', '182825', '9.57 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_labels.zip', '286567', '13.72 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_labels.zip', '294820', '13.87 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_labels.zip', '228420', '11.89 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_labels.zip', '249347', '12.77 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_labels.zip', '253915', '13.16 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_labels.zip', '264313', '13.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_labels.zip', '109870', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_labels.zip', '153334', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_labels.zip', '103096', '3.05 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_labels.zip', '98097', '5.48 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_labels.zip', '85899', '4.21 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_labels.zip', '119306', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_labels.zip', '126063', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_labels.zip', '97761', '4.65 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_labels.zip', '106706', '4.95 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_labels.zip', '118638', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_labels.zip', '123170', '5.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '86522', '2.77 MB'),
'2007': HmdaDataFile('hmda_2007_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '106195', '3.34 MB'),
'2017': HmdaDataFile('hmda_2017_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '80115', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75252', '2.81 MB'),
'2014': HmdaDataFile('hmda_2014_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '63808', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.83 MB'),
'2009': HmdaDataFile('hmda_2009_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '103427', '3.06 MB'),
'2011': HmdaDataFile('hmda_2011_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '75467', '2.35 MB'),
'2010': HmdaDataFile('hmda_2010_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '87005', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '93236', '2.92 MB'),
'2012': HmdaDataFile('hmda_2012_al_first-lien-owner-occupied-1-4-family-records_codes.zip', '99531', '3.15 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_al_all-records_codes.zip', '226918', '7.69 MB'),
'2007': HmdaDataFile('hmda_2007_al_all-records_codes.zip', '367991', '11.49 MB'),
'2017': HmdaDataFile('hmda_2017_al_all-records_codes.zip', '209002', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_al_all-records_codes.zip', '205039', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_al_all-records_codes.zip', '182825', '6.25 MB'),
'2008': HmdaDataFile('hmda_2008_al_all-records_codes.zip', '286567', '9.04 MB'),
'2009': HmdaDataFile('hmda_2009_al_all-records_codes.zip', '294820', '9.23 MB'),
'2011': HmdaDataFile('hmda_2011_al_all-records_codes.zip', '228420', '7.78 MB'),
'2010': HmdaDataFile('hmda_2010_al_all-records_codes.zip', '249347', '8.38 MB'),
'2013': HmdaDataFile('hmda_2013_al_all-records_codes.zip', '253915', '8.52 MB'),
'2012': HmdaDataFile('hmda_2012_al_all-records_codes.zip', '264313', '9.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_al_originated-records_codes.zip', '109870', '3.56 MB'),
'2007': HmdaDataFile('hmda_2007_al_originated-records_codes.zip', '153334', '4.85 MB'),
'2017': HmdaDataFile('hmda_2017_al_originated-records_codes.zip', '103096', '2.14 MB'),
'2015': HmdaDataFile('hmda_2015_al_originated-records_codes.zip', '98097', '3.71 MB'),
'2014': HmdaDataFile('hmda_2014_al_originated-records_codes.zip', '85899', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_al_originated-records_codes.zip', '119306', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_al_originated-records_codes.zip', '126063', '3.8 MB'),
'2011': HmdaDataFile('hmda_2011_al_originated-records_codes.zip', '97761', '3.07 MB'),
'2010': HmdaDataFile('hmda_2010_al_originated-records_codes.zip', '106706', '3.27 MB'),
'2013': HmdaDataFile('hmda_2013_al_originated-records_codes.zip', '118638', '3.76 MB'),
'2012': HmdaDataFile('hmda_2012_al_originated-records_codes.zip', '123170', '3.94 MB')
}
}
},
'ar': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49025', '2.38 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '55026', '2.46 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '45327', '1.49 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '42845', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '37601', '1.77 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '49034', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61531', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '46801', '2.1 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54493', '2.37 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '54893', '2.56 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_labels.zip', '61834', '2.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_labels.zip', '131352', '6.97 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_labels.zip', '179146', '8.6 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_labels.zip', '125392', '4.6 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_labels.zip', '118384', '6.72 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_labels.zip', '108526', '5.63 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_labels.zip', '141191', '6.71 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_labels.zip', '159208', '7.41 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_labels.zip', '127757', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_labels.zip', '142441', '7.13 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_labels.zip', '146285', '7.61 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_labels.zip', '154830', '7.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_labels.zip', '65762', '3.27 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_labels.zip', '83327', '3.71 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_labels.zip', '62259', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_labels.zip', '59384', '3.2 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_labels.zip', '52994', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_labels.zip', '71395', '3.11 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_labels.zip', '78016', '3.4 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_labels.zip', '62549', '2.86 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_labels.zip', '69315', '3.08 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_labels.zip', '73125', '3.48 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_labels.zip', '79283', '3.68 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49025', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '55026', '1.62 MB'),
'2017': HmdaDataFile('hmda_2017_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '45327', '1.04 MB'),
'2015': HmdaDataFile('hmda_2015_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '42845', '1.51 MB'),
'2014': HmdaDataFile('hmda_2014_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '37601', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '49034', '1.41 MB'),
'2009': HmdaDataFile('hmda_2009_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61531', '1.81 MB'),
'2011': HmdaDataFile('hmda_2011_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '46801', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54493', '1.53 MB'),
'2013': HmdaDataFile('hmda_2013_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '54893', '1.65 MB'),
'2012': HmdaDataFile('hmda_2012_ar_first-lien-owner-occupied-1-4-family-records_codes.zip', '61834', '1.81 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ar_all-records_codes.zip', '131352', '4.44 MB'),
'2007': HmdaDataFile('hmda_2007_ar_all-records_codes.zip', '179146', '5.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_all-records_codes.zip', '125392', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ar_all-records_codes.zip', '118384', '4.33 MB'),
'2014': HmdaDataFile('hmda_2014_ar_all-records_codes.zip', '108526', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ar_all-records_codes.zip', '141191', '4.14 MB'),
'2009': HmdaDataFile('hmda_2009_ar_all-records_codes.zip', '159208', '4.87 MB'),
'2011': HmdaDataFile('hmda_2011_ar_all-records_codes.zip', '127757', '4.13 MB'),
'2010': HmdaDataFile('hmda_2010_ar_all-records_codes.zip', '142441', '4.51 MB'),
'2013': HmdaDataFile('hmda_2013_ar_all-records_codes.zip', '146285', '4.87 MB'),
'2012': HmdaDataFile('hmda_2012_ar_all-records_codes.zip', '154830', '5.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ar_originated-records_codes.zip', '65762', '2.1 MB'),
'2007': HmdaDataFile('hmda_2007_ar_originated-records_codes.zip', '83327', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_ar_originated-records_codes.zip', '62259', '1.41 MB'),
'2015': HmdaDataFile('hmda_2015_ar_originated-records_codes.zip', '59384', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ar_originated-records_codes.zip', '52994', '1.65 MB'),
'2008': HmdaDataFile('hmda_2008_ar_originated-records_codes.zip', '71395', '2.03 MB'),
'2009': HmdaDataFile('hmda_2009_ar_originated-records_codes.zip', '78016', '2.3 MB'),
'2011': HmdaDataFile('hmda_2011_ar_originated-records_codes.zip', '62549', '1.82 MB'),
'2010': HmdaDataFile('hmda_2010_ar_originated-records_codes.zip', '69315', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_ar_originated-records_codes.zip', '73125', '2.21 MB'),
'2012': HmdaDataFile('hmda_2012_ar_originated-records_codes.zip', '79283', '2.34 MB')
}
}
},
'vt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '11252', '465.67 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13430', '571.04 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '9250', '291.43 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '10215', '455.27 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '8361', '346.05 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '491.28 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21026', '782.79 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '13512', '578.18 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15651', '620.42 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '14400', '594.85 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_labels.zip', '17914', '703.52 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_labels.zip', '25971', '1.19 MB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_labels.zip', '43664', '1.95 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_labels.zip', '22335', '765.5 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_labels.zip', '24028', '1.19 MB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_labels.zip', '20816', '953.84 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_labels.zip', '33040', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_labels.zip', '46532', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_labels.zip', '32687', '1.56 MB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_labels.zip', '36637', '1.61 MB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_labels.zip', '32790', '1.49 MB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_labels.zip', '37869', '1.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_labels.zip', '15176', '645.84 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_labels.zip', '20755', '882.15 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_labels.zip', '13325', '425.68 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_labels.zip', '14053', '645.96 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_labels.zip', '11893', '504.52 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_labels.zip', '17432', '722.72 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_labels.zip', '25699', '980.48 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_labels.zip', '17791', '786.1 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_labels.zip', '19808', '804.9 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_labels.zip', '19293', '814.7 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_labels.zip', '22745', '914.05 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '11252', '290.62 KB'),
'2007': HmdaDataFile('hmda_2007_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13430', '365.86 KB'),
'2017': HmdaDataFile('hmda_2017_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '9250', '198.27 KB'),
'2015': HmdaDataFile('hmda_2015_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '10215', '285.97 KB'),
'2014': HmdaDataFile('hmda_2014_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '8361', '216.87 KB'),
'2008': HmdaDataFile('hmda_2008_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '314.55 KB'),
'2009': HmdaDataFile('hmda_2009_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21026', '510.88 KB'),
'2011': HmdaDataFile('hmda_2011_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '13512', '361.01 KB'),
'2010': HmdaDataFile('hmda_2010_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15651', '386.89 KB'),
'2013': HmdaDataFile('hmda_2013_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '14400', '367.68 KB'),
'2012': HmdaDataFile('hmda_2012_vt_first-lien-owner-occupied-1-4-family-records_codes.zip', '17914', '433.81 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_vt_all-records_codes.zip', '25971', '716.41 KB'),
'2007': HmdaDataFile('hmda_2007_vt_all-records_codes.zip', '43664', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_vt_all-records_codes.zip', '22335', '486.85 KB'),
'2015': HmdaDataFile('hmda_2015_vt_all-records_codes.zip', '24028', '719.81 KB'),
'2014': HmdaDataFile('hmda_2014_vt_all-records_codes.zip', '20816', '576.12 KB'),
'2008': HmdaDataFile('hmda_2008_vt_all-records_codes.zip', '33040', '900.48 KB'),
'2009': HmdaDataFile('hmda_2009_vt_all-records_codes.zip', '46532', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_vt_all-records_codes.zip', '32687', '942.34 KB'),
'2010': HmdaDataFile('hmda_2010_vt_all-records_codes.zip', '36637', '971.16 KB'),
'2013': HmdaDataFile('hmda_2013_vt_all-records_codes.zip', '32790', '897.96 KB'),
'2012': HmdaDataFile('hmda_2012_vt_all-records_codes.zip', '37869', '998.22 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_vt_originated-records_codes.zip', '15176', '399.34 KB'),
'2007': HmdaDataFile('hmda_2007_vt_originated-records_codes.zip', '20755', '567.25 KB'),
'2017': HmdaDataFile('hmda_2017_vt_originated-records_codes.zip', '13325', '283.84 KB'),
'2015': HmdaDataFile('hmda_2015_vt_originated-records_codes.zip', '14053', '403.29 KB'),
'2014': HmdaDataFile('hmda_2014_vt_originated-records_codes.zip', '11893', '311.95 KB'),
'2008': HmdaDataFile('hmda_2008_vt_originated-records_codes.zip', '17432', '462.77 KB'),
'2009': HmdaDataFile('hmda_2009_vt_originated-records_codes.zip', '25699', '638.61 KB'),
'2011': HmdaDataFile('hmda_2011_vt_originated-records_codes.zip', '17791', '486.47 KB'),
'2010': HmdaDataFile('hmda_2010_vt_originated-records_codes.zip', '19808', '497.91 KB'),
'2013': HmdaDataFile('hmda_2013_vt_originated-records_codes.zip', '19293', '501.93 KB'),
'2012': HmdaDataFile('hmda_2012_vt_originated-records_codes.zip', '22745', '561.59 KB')
}
}
},
'il': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278035', '14.02 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '346807', '16.76 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '225445', '7.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '245110', '13.66 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '193991', '9.89 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '278003', '13.04 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '396141', '17.05 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '282861', '13.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '343779', '16.76 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '295909', '14.97 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_labels.zip', '378550', '18.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_labels.zip', '583019', '31.48 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_labels.zip', '1117310', '53.26 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_labels.zip', '502511', '18.63 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_labels.zip', '517360', '30.86 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_labels.zip', '437239', '23.79 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_labels.zip', '761632', '36.35 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_labels.zip', '849782', '37.88 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_labels.zip', '620832', '33.1 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_labels.zip', '716356', '37.81 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_labels.zip', '637258', '34.91 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_labels.zip', '754118', '40.71 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_labels.zip', '317597', '16.17 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_labels.zip', '470592', '22.68 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_labels.zip', '265490', '8.51 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_labels.zip', '284551', '16.06 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_labels.zip', '232557', '11.98 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_labels.zip', '339543', '16.11 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_labels.zip', '432707', '18.86 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_labels.zip', '319004', '15.78 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_labels.zip', '378335', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_labels.zip', '344172', '17.61 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_labels.zip', '424748', '21.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278035', '10.15 MB'),
'2007': HmdaDataFile('hmda_2007_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '346807', '12.31 MB'),
'2017': HmdaDataFile('hmda_2017_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '225445', '5.06 MB'),
'2015': HmdaDataFile('hmda_2015_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '245110', '9.83 MB'),
'2014': HmdaDataFile('hmda_2014_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '193991', '7.12 MB'),
'2008': HmdaDataFile('hmda_2008_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '278003', '9.54 MB'),
'2009': HmdaDataFile('hmda_2009_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '396141', '12.55 MB'),
'2011': HmdaDataFile('hmda_2011_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '282861', '9.82 MB'),
'2010': HmdaDataFile('hmda_2010_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '343779', '11.91 MB'),
'2013': HmdaDataFile('hmda_2013_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '295909', '10.83 MB'),
'2012': HmdaDataFile('hmda_2012_il_first-lien-owner-occupied-1-4-family-records_codes.zip', '378550', '13.52 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_il_all-records_codes.zip', '583019', '22.25 MB'),
'2007': HmdaDataFile('hmda_2007_il_all-records_codes.zip', '1117310', '37.59 MB'),
'2017': HmdaDataFile('hmda_2017_il_all-records_codes.zip', '502511', '12.69 MB'),
'2015': HmdaDataFile('hmda_2015_il_all-records_codes.zip', '517360', '21.34 MB'),
'2014': HmdaDataFile('hmda_2014_il_all-records_codes.zip', '437239', '16.64 MB'),
'2008': HmdaDataFile('hmda_2008_il_all-records_codes.zip', '761632', '25.64 MB'),
'2009': HmdaDataFile('hmda_2009_il_all-records_codes.zip', '849782', '26.91 MB'),
'2011': HmdaDataFile('hmda_2011_il_all-records_codes.zip', '620832', '22.91 MB'),
'2010': HmdaDataFile('hmda_2010_il_all-records_codes.zip', '716356', '26.12 MB'),
'2013': HmdaDataFile('hmda_2013_il_all-records_codes.zip', '637258', '24.68 MB'),
'2012': HmdaDataFile('hmda_2012_il_all-records_codes.zip', '754118', '28.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_il_originated-records_codes.zip', '317597', '11.62 MB'),
'2007': HmdaDataFile('hmda_2007_il_originated-records_codes.zip', '470592', '16.53 MB'),
'2017': HmdaDataFile('hmda_2017_il_originated-records_codes.zip', '265490', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_il_originated-records_codes.zip', '284551', '11.44 MB'),
'2014': HmdaDataFile('hmda_2014_il_originated-records_codes.zip', '232557', '8.54 MB'),
'2008': HmdaDataFile('hmda_2008_il_originated-records_codes.zip', '339543', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_il_originated-records_codes.zip', '432707', '13.79 MB'),
'2011': HmdaDataFile('hmda_2011_il_originated-records_codes.zip', '319004', '11.14 MB'),
'2010': HmdaDataFile('hmda_2010_il_originated-records_codes.zip', '378335', '13.17 MB'),
'2013': HmdaDataFile('hmda_2013_il_originated-records_codes.zip', '344172', '12.64 MB'),
'2012': HmdaDataFile('hmda_2012_il_originated-records_codes.zip', '424748', '15.26 MB')
}
}
},
'ga': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '225258', '11.78 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '236346', '11.17 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '203948', '6.37 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '193285', '11.11 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '147432', '7.81 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '179658', '8.42 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '220141', '9.84 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '165891', '8.32 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '171421', '8.49 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '210048', '10.77 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_labels.zip', '229259', '11.67 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_labels.zip', '547637', '30.4 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_labels.zip', '899812', '43.49 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_labels.zip', '501310', '18.01 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_labels.zip', '478359', '29.23 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_labels.zip', '391231', '22.06 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_labels.zip', '583802', '28.24 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_labels.zip', '612188', '28.44 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_labels.zip', '444258', '24.51 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_labels.zip', '466839', '25.16 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_labels.zip', '537898', '29.53 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_labels.zip', '559464', '30.51 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_labels.zip', '264802', '14.07 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_labels.zip', '352181', '16.55 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_labels.zip', '244731', '7.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_labels.zip', '232822', '13.6 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_labels.zip', '185375', '10 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_labels.zip', '244230', '11.53 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_labels.zip', '261989', '11.88 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_labels.zip', '204582', '10.57 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_labels.zip', '208728', '10.42 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_labels.zip', '262544', '13.64 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_labels.zip', '277607', '14.27 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '225258', '8.23 MB'),
'2007': HmdaDataFile('hmda_2007_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '236346', '7.88 MB'),
'2017': HmdaDataFile('hmda_2017_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '203948', '4.43 MB'),
'2015': HmdaDataFile('hmda_2015_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '193285', '7.71 MB'),
'2014': HmdaDataFile('hmda_2014_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '147432', '5.46 MB'),
'2008': HmdaDataFile('hmda_2008_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '179658', '5.94 MB'),
'2009': HmdaDataFile('hmda_2009_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '220141', '7.02 MB'),
'2011': HmdaDataFile('hmda_2011_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '165891', '5.63 MB'),
'2010': HmdaDataFile('hmda_2010_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '171421', '5.76 MB'),
'2013': HmdaDataFile('hmda_2013_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '210048', '7.54 MB'),
'2012': HmdaDataFile('hmda_2012_ga_first-lien-owner-occupied-1-4-family-records_codes.zip', '229259', '8.16 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ga_all-records_codes.zip', '547637', '20.69 MB'),
'2007': HmdaDataFile('hmda_2007_ga_all-records_codes.zip', '899812', '29.71 MB'),
'2017': HmdaDataFile('hmda_2017_ga_all-records_codes.zip', '501310', '11.97 MB'),
'2015': HmdaDataFile('hmda_2015_ga_all-records_codes.zip', '478359', '19.47 MB'),
'2014': HmdaDataFile('hmda_2014_ga_all-records_codes.zip', '391231', '14.94 MB'),
'2008': HmdaDataFile('hmda_2008_ga_all-records_codes.zip', '583802', '19.31 MB'),
'2009': HmdaDataFile('hmda_2009_ga_all-records_codes.zip', '612188', '19.6 MB'),
'2011': HmdaDataFile('hmda_2011_ga_all-records_codes.zip', '444258', '16.2 MB'),
'2010': HmdaDataFile('hmda_2010_ga_all-records_codes.zip', '466839', '16.61 MB'),
'2013': HmdaDataFile('hmda_2013_ga_all-records_codes.zip', '537898', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_ga_all-records_codes.zip', '559464', '20.8 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ga_originated-records_codes.zip', '264802', '9.77 MB'),
'2007': HmdaDataFile('hmda_2007_ga_originated-records_codes.zip', '352181', '11.59 MB'),
'2017': HmdaDataFile('hmda_2017_ga_originated-records_codes.zip', '244731', '5.49 MB'),
'2015': HmdaDataFile('hmda_2015_ga_originated-records_codes.zip', '232822', '9.35 MB'),
'2014': HmdaDataFile('hmda_2014_ga_originated-records_codes.zip', '185375', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_ga_originated-records_codes.zip', '244230', '8.04 MB'),
'2009': HmdaDataFile('hmda_2009_ga_originated-records_codes.zip', '261989', '8.4 MB'),
'2011': HmdaDataFile('hmda_2011_ga_originated-records_codes.zip', '204582', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_ga_originated-records_codes.zip', '208728', '7.02 MB'),
'2013': HmdaDataFile('hmda_2013_ga_originated-records_codes.zip', '262544', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_ga_originated-records_codes.zip', '277607', '9.89 MB')
}
}
},
'in': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '149979', '7.55 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '145394', '6.8 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '135246', '4.03 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '134522', '7.59 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '108789', '5.36 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '124555', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '187145', '8.03 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '141707', '6.73 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '161225', '7.29 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '159649', '7.66 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_labels.zip', '188614', '8.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_labels.zip', '319123', '17.64 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_labels.zip', '474561', '24.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_labels.zip', '292152', '10.14 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_labels.zip', '288746', '17.61 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_labels.zip', '248347', '13.51 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_labels.zip', '348681', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_labels.zip', '421392', '19.89 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_labels.zip', '322061', '17.24 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_labels.zip', '359860', '18.65 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_labels.zip', '344116', '18.48 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_labels.zip', '385267', '20.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_labels.zip', '172307', '8.79 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_labels.zip', '199213', '9.38 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_labels.zip', '158693', '4.9 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_labels.zip', '156109', '8.92 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_labels.zip', '130131', '6.52 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_labels.zip', '155308', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_labels.zip', '207593', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_labels.zip', '160424', '7.77 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_labels.zip', '179820', '8.27 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_labels.zip', '184428', '8.97 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_labels.zip', '210891', '10.05 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '149979', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '145394', '4.63 MB'),
'2017': HmdaDataFile('hmda_2017_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '135246', '2.85 MB'),
'2015': HmdaDataFile('hmda_2015_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '134522', '5.11 MB'),
'2014': HmdaDataFile('hmda_2014_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '108789', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '124555', '3.9 MB'),
'2009': HmdaDataFile('hmda_2009_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '187145', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '141707', '4.43 MB'),
'2010': HmdaDataFile('hmda_2010_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '161225', '4.82 MB'),
'2013': HmdaDataFile('hmda_2013_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '159649', '5.11 MB'),
'2012': HmdaDataFile('hmda_2012_in_first-lien-owner-occupied-1-4-family-records_codes.zip', '188614', '5.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_in_all-records_codes.zip', '319123', '11.54 MB'),
'2007': HmdaDataFile('hmda_2007_in_all-records_codes.zip', '474561', '16.44 MB'),
'2017': HmdaDataFile('hmda_2017_in_all-records_codes.zip', '292152', '6.68 MB'),
'2015': HmdaDataFile('hmda_2015_in_all-records_codes.zip', '288746', '11.46 MB'),
'2014': HmdaDataFile('hmda_2014_in_all-records_codes.zip', '248347', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_in_all-records_codes.zip', '348681', '11.68 MB'),
'2009': HmdaDataFile('hmda_2009_in_all-records_codes.zip', '421392', '13.64 MB'),
'2011': HmdaDataFile('hmda_2011_in_all-records_codes.zip', '322061', '11.1 MB'),
'2010': HmdaDataFile('hmda_2010_in_all-records_codes.zip', '359860', '12.06 MB'),
'2013': HmdaDataFile('hmda_2013_in_all-records_codes.zip', '344116', '12.12 MB'),
'2012': HmdaDataFile('hmda_2012_in_all-records_codes.zip', '385267', '13.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_in_originated-records_codes.zip', '172307', '5.84 MB'),
'2007': HmdaDataFile('hmda_2007_in_originated-records_codes.zip', '199213', '6.61 MB'),
'2017': HmdaDataFile('hmda_2017_in_originated-records_codes.zip', '158693', '3.43 MB'),
'2015': HmdaDataFile('hmda_2015_in_originated-records_codes.zip', '156109', '5.98 MB'),
'2014': HmdaDataFile('hmda_2014_in_originated-records_codes.zip', '130131', '4.37 MB'),
'2008': HmdaDataFile('hmda_2008_in_originated-records_codes.zip', '155308', '4.94 MB'),
'2009': HmdaDataFile('hmda_2009_in_originated-records_codes.zip', '207593', '6.34 MB'),
'2011': HmdaDataFile('hmda_2011_in_originated-records_codes.zip', '160424', '5.09 MB'),
'2010': HmdaDataFile('hmda_2010_in_originated-records_codes.zip', '179820', '5.44 MB'),
'2013': HmdaDataFile('hmda_2013_in_originated-records_codes.zip', '184428', '5.94 MB'),
'2012': HmdaDataFile('hmda_2012_in_originated-records_codes.zip', '210891', '6.64 MB')
}
}
},
'ia': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '71680', '3.32 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '65703', '3.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60347', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '63773', '3.32 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '52784', '2.39 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '60097', '2.72 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '96858', '3.85 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '72565', '3.17 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '84923', '3.79 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '76061', '3.38 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_labels.zip', '93906', '4.12 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_labels.zip', '149227', '7.39 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_labels.zip', '197991', '9.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_labels.zip', '127446', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_labels.zip', '136795', '7.68 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_labels.zip', '115594', '5.62 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_labels.zip', '157339', '7.52 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_labels.zip', '200497', '8.54 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_labels.zip', '150683', '7.18 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_labels.zip', '172100', '8.41 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_labels.zip', '160707', '7.67 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_labels.zip', '181237', '8.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_labels.zip', '90932', '4.27 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_labels.zip', '93343', '4.31 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_labels.zip', '79977', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_labels.zip', '83214', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_labels.zip', '71413', '3.26 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_labels.zip', '78965', '3.63 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_labels.zip', '112522', '4.57 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_labels.zip', '87178', '3.88 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_labels.zip', '100132', '4.56 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_labels.zip', '95886', '4.34 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_labels.zip', '111610', '4.98 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '71680', '2.17 MB'),
'2007': HmdaDataFile('hmda_2007_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '65703', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60347', '1.29 MB'),
'2015': HmdaDataFile('hmda_2015_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '63773', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '52784', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '60097', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '96858', '2.64 MB'),
'2011': HmdaDataFile('hmda_2011_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '72565', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '84923', '2.46 MB'),
'2013': HmdaDataFile('hmda_2013_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '76061', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_ia_first-lien-owner-occupied-1-4-family-records_codes.zip', '93906', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ia_all-records_codes.zip', '149227', '4.65 MB'),
'2007': HmdaDataFile('hmda_2007_ia_all-records_codes.zip', '197991', '6.1 MB'),
'2017': HmdaDataFile('hmda_2017_ia_all-records_codes.zip', '127446', '2.7 MB'),
'2015': HmdaDataFile('hmda_2015_ia_all-records_codes.zip', '136795', '4.9 MB'),
'2014': HmdaDataFile('hmda_2014_ia_all-records_codes.zip', '115594', '3.51 MB'),
'2008': HmdaDataFile('hmda_2008_ia_all-records_codes.zip', '157339', '4.92 MB'),
'2009': HmdaDataFile('hmda_2009_ia_all-records_codes.zip', '200497', '5.67 MB'),
'2011': HmdaDataFile('hmda_2011_ia_all-records_codes.zip', '150683', '4.49 MB'),
'2010': HmdaDataFile('hmda_2010_ia_all-records_codes.zip', '172100', '5.28 MB'),
'2013': HmdaDataFile('hmda_2013_ia_all-records_codes.zip', '160707', '4.75 MB'),
'2012': HmdaDataFile('hmda_2012_ia_all-records_codes.zip', '181237', '5.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ia_originated-records_codes.zip', '90932', '2.74 MB'),
'2007': HmdaDataFile('hmda_2007_ia_originated-records_codes.zip', '93343', '2.86 MB'),
'2017': HmdaDataFile('hmda_2017_ia_originated-records_codes.zip', '79977', '1.67 MB'),
'2015': HmdaDataFile('hmda_2015_ia_originated-records_codes.zip', '83214', '2.85 MB'),
'2014': HmdaDataFile('hmda_2014_ia_originated-records_codes.zip', '71413', '2.08 MB'),
'2008': HmdaDataFile('hmda_2008_ia_originated-records_codes.zip', '78965', '2.41 MB'),
'2009': HmdaDataFile('hmda_2009_ia_originated-records_codes.zip', '112522', '3.11 MB'),
'2011': HmdaDataFile('hmda_2011_ia_originated-records_codes.zip', '87178', '2.48 MB'),
'2010': HmdaDataFile('hmda_2010_ia_originated-records_codes.zip', '100132', '2.93 MB'),
'2013': HmdaDataFile('hmda_2013_ia_originated-records_codes.zip', '95886', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_ia_originated-records_codes.zip', '111610', '3.18 MB')
}
}
},
'az': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '201543', '9.98 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '198791', '8.9 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '177849', '5.45 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '166410', '8.87 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '124580', '6.3 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '129343', '5.73 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '161393', '6.71 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '114583', '5.41 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '132138', '6.21 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '176268', '8.6 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_labels.zip', '197491', '9.59 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_labels.zip', '478386', '24.75 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_labels.zip', '803675', '35.11 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_labels.zip', '428411', '14.46 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_labels.zip', '391879', '22 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_labels.zip', '317345', '16.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_labels.zip', '425680', '19.23 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_labels.zip', '441291', '18.88 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_labels.zip', '313348', '16.05 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_labels.zip', '350571', '17.93 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_labels.zip', '428383', '22.29 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_labels.zip', '458365', '23.72 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_labels.zip', '236688', '11.85 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_labels.zip', '292272', '13.05 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_labels.zip', '213547', '6.63 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_labels.zip', '198908', '10.8 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_labels.zip', '155001', '7.95 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_labels.zip', '165011', '7.37 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_labels.zip', '190609', '7.98 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_labels.zip', '146231', '7.09 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_labels.zip', '160055', '7.73 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_labels.zip', '224986', '11.14 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_labels.zip', '247572', '12.19 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '201543', '7.26 MB'),
'2007': HmdaDataFile('hmda_2007_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '198791', '6.42 MB'),
'2017': HmdaDataFile('hmda_2017_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '177849', '3.82 MB'),
'2015': HmdaDataFile('hmda_2015_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '166410', '6.44 MB'),
'2014': HmdaDataFile('hmda_2014_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '124580', '4.59 MB'),
'2008': HmdaDataFile('hmda_2008_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '129343', '4.15 MB'),
'2009': HmdaDataFile('hmda_2009_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '161393', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '114583', '3.79 MB'),
'2010': HmdaDataFile('hmda_2010_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '132138', '4.33 MB'),
'2013': HmdaDataFile('hmda_2013_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '176268', '6.24 MB'),
'2012': HmdaDataFile('hmda_2012_az_first-lien-owner-occupied-1-4-family-records_codes.zip', '197491', '6.92 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_az_all-records_codes.zip', '478386', '17.45 MB'),
'2007': HmdaDataFile('hmda_2007_az_all-records_codes.zip', '803675', '24.35 MB'),
'2017': HmdaDataFile('hmda_2017_az_all-records_codes.zip', '428411', '9.68 MB'),
'2015': HmdaDataFile('hmda_2015_az_all-records_codes.zip', '391879', '15.26 MB'),
'2014': HmdaDataFile('hmda_2014_az_all-records_codes.zip', '317345', '11.78 MB'),
'2008': HmdaDataFile('hmda_2008_az_all-records_codes.zip', '425680', '13.42 MB'),
'2009': HmdaDataFile('hmda_2009_az_all-records_codes.zip', '441291', '13.49 MB'),
'2011': HmdaDataFile('hmda_2011_az_all-records_codes.zip', '313348', '10.91 MB'),
'2010': HmdaDataFile('hmda_2010_az_all-records_codes.zip', '350571', '12.17 MB'),
'2013': HmdaDataFile('hmda_2013_az_all-records_codes.zip', '428383', '15.68 MB'),
'2012': HmdaDataFile('hmda_2012_az_all-records_codes.zip', '458365', '16.67 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_az_originated-records_codes.zip', '236688', '8.56 MB'),
'2007': HmdaDataFile('hmda_2007_az_originated-records_codes.zip', '292272', '9.39 MB'),
'2017': HmdaDataFile('hmda_2017_az_originated-records_codes.zip', '213547', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_az_originated-records_codes.zip', '198908', '7.79 MB'),
'2014': HmdaDataFile('hmda_2014_az_originated-records_codes.zip', '155001', '5.74 MB'),
'2008': HmdaDataFile('hmda_2008_az_originated-records_codes.zip', '165011', '5.32 MB'),
'2009': HmdaDataFile('hmda_2009_az_originated-records_codes.zip', '190609', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_az_originated-records_codes.zip', '146231', '4.94 MB'),
'2010': HmdaDataFile('hmda_2010_az_originated-records_codes.zip', '160055', '5.38 MB'),
'2013': HmdaDataFile('hmda_2013_az_originated-records_codes.zip', '224986', '8.02 MB'),
'2012': HmdaDataFile('hmda_2012_az_originated-records_codes.zip', '247572', '8.75 MB')
}
}
},
'id': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49270', '2.03 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '45546', '1.84 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '43374', '1.27 MB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41252', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30539', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '36308', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '49056', '1.87 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '30665', '1.26 MB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '37182', '1.47 MB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '41770', '1.75 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_labels.zip', '46607', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_labels.zip', '103880', '4.67 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_labels.zip', '156706', '6.54 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_labels.zip', '92755', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_labels.zip', '89063', '4.72 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_labels.zip', '70222', '3.27 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_labels.zip', '108689', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_labels.zip', '125244', '5.05 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_labels.zip', '77672', '3.5 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_labels.zip', '94170', '4.16 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_labels.zip', '97051', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_labels.zip', '103766', '4.64 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_labels.zip', '59151', '2.48 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_labels.zip', '67396', '2.74 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_labels.zip', '54102', '1.62 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_labels.zip', '50554', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_labels.zip', '38605', '1.67 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_labels.zip', '46531', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_labels.zip', '56985', '2.2 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_labels.zip', '37943', '1.6 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_labels.zip', '44663', '1.79 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_labels.zip', '53109', '2.27 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_labels.zip', '57188', '2.39 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49270', '1.31 MB'),
'2007': HmdaDataFile('hmda_2007_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '45546', '1.22 MB'),
'2017': HmdaDataFile('hmda_2017_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '43374', '882.08 KB'),
'2015': HmdaDataFile('hmda_2015_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41252', '1.28 MB'),
'2014': HmdaDataFile('hmda_2014_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30539', '834.4 KB'),
'2008': HmdaDataFile('hmda_2008_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '36308', '1 MB'),
'2009': HmdaDataFile('hmda_2009_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '49056', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '30665', '802.44 KB'),
'2010': HmdaDataFile('hmda_2010_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '37182', '944.45 KB'),
'2013': HmdaDataFile('hmda_2013_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '41770', '1.12 MB'),
'2012': HmdaDataFile('hmda_2012_id_first-lien-owner-occupied-1-4-family-records_codes.zip', '46607', '1.21 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_id_all-records_codes.zip', '103880', '2.88 MB'),
'2007': HmdaDataFile('hmda_2007_id_all-records_codes.zip', '156706', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_id_all-records_codes.zip', '92755', '1.96 MB'),
'2015': HmdaDataFile('hmda_2015_id_all-records_codes.zip', '89063', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_id_all-records_codes.zip', '70222', '2.02 MB'),
'2008': HmdaDataFile('hmda_2008_id_all-records_codes.zip', '108689', '3.03 MB'),
'2009': HmdaDataFile('hmda_2009_id_all-records_codes.zip', '125244', '3.33 MB'),
'2011': HmdaDataFile('hmda_2011_id_all-records_codes.zip', '77672', '2.17 MB'),
'2010': HmdaDataFile('hmda_2010_id_all-records_codes.zip', '94170', '2.57 MB'),
'2013': HmdaDataFile('hmda_2013_id_all-records_codes.zip', '97051', '2.76 MB'),
'2012': HmdaDataFile('hmda_2012_id_all-records_codes.zip', '103766', '2.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_id_originated-records_codes.zip', '59151', '1.59 MB'),
'2007': HmdaDataFile('hmda_2007_id_originated-records_codes.zip', '67396', '1.83 MB'),
'2017': HmdaDataFile('hmda_2017_id_originated-records_codes.zip', '54102', '1.11 MB'),
'2015': HmdaDataFile('hmda_2015_id_originated-records_codes.zip', '50554', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_id_originated-records_codes.zip', '38605', '1.07 MB'),
'2008': HmdaDataFile('hmda_2008_id_originated-records_codes.zip', '46531', '1.29 MB'),
'2009': HmdaDataFile('hmda_2009_id_originated-records_codes.zip', '56985', '1.49 MB'),
'2011': HmdaDataFile('hmda_2011_id_originated-records_codes.zip', '37943', '1.02 MB'),
'2010': HmdaDataFile('hmda_2010_id_originated-records_codes.zip', '44663', '1.14 MB'),
'2013': HmdaDataFile('hmda_2013_id_originated-records_codes.zip', '53109', '1.45 MB'),
'2012': HmdaDataFile('hmda_2012_id_originated-records_codes.zip', '57188', '1.51 MB')
}
}
},
'ct': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '66025', '3.2 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '89152', '4.26 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '57398', '1.83 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '59656', '3.12 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '49553', '2.54 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '68658', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '104310', '4.56 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '78978', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '93797', '4.54 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '87390', '4.18 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_labels.zip', '105049', '4.95 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_labels.zip', '146885', '7.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_labels.zip', '301760', '15.1 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_labels.zip', '129401', '4.77 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_labels.zip', '132491', '7.44 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_labels.zip', '114931', '6.31 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_labels.zip', '193168', '9.76 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_labels.zip', '235997', '11.17 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_labels.zip', '177460', '9.5 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_labels.zip', '204936', '10.79 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_labels.zip', '187158', '9.86 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_labels.zip', '214191', '11.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_labels.zip', '75239', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_labels.zip', '126259', '6.09 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_labels.zip', '67000', '2.19 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_labels.zip', '68865', '3.65 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_labels.zip', '58456', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_labels.zip', '84484', '4.01 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_labels.zip', '113317', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_labels.zip', '87795', '4.47 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_labels.zip', '102545', '5.08 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_labels.zip', '98739', '4.8 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_labels.zip', '115361', '5.49 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '66025', '2.19 MB'),
'2007': HmdaDataFile('hmda_2007_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '89152', '2.98 MB'),
'2017': HmdaDataFile('hmda_2017_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '57398', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '59656', '2.2 MB'),
'2014': HmdaDataFile('hmda_2014_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '49553', '1.74 MB'),
'2008': HmdaDataFile('hmda_2008_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '68658', '2.25 MB'),
'2009': HmdaDataFile('hmda_2009_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '104310', '3.22 MB'),
'2011': HmdaDataFile('hmda_2011_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '78978', '2.66 MB'),
'2010': HmdaDataFile('hmda_2010_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '93797', '3.09 MB'),
'2013': HmdaDataFile('hmda_2013_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '87390', '2.84 MB'),
'2012': HmdaDataFile('hmda_2012_ct_first-lien-owner-occupied-1-4-family-records_codes.zip', '105049', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ct_all-records_codes.zip', '146885', '5.17 MB'),
'2007': HmdaDataFile('hmda_2007_ct_all-records_codes.zip', '301760', '10.45 MB'),
'2017': HmdaDataFile('hmda_2017_ct_all-records_codes.zip', '129401', '3.24 MB'),
'2015': HmdaDataFile('hmda_2015_ct_all-records_codes.zip', '132491', '5.05 MB'),
'2014': HmdaDataFile('hmda_2014_ct_all-records_codes.zip', '114931', '4.22 MB'),
'2008': HmdaDataFile('hmda_2008_ct_all-records_codes.zip', '193168', '6.73 MB'),
'2009': HmdaDataFile('hmda_2009_ct_all-records_codes.zip', '235997', '7.73 MB'),
'2011': HmdaDataFile('hmda_2011_ct_all-records_codes.zip', '177460', '6.28 MB'),
'2010': HmdaDataFile('hmda_2010_ct_all-records_codes.zip', '204936', '7.18 MB'),
'2013': HmdaDataFile('hmda_2013_ct_all-records_codes.zip', '187158', '6.56 MB'),
'2012': HmdaDataFile('hmda_2012_ct_all-records_codes.zip', '214191', '7.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ct_originated-records_codes.zip', '75239', '2.54 MB'),
'2007': HmdaDataFile('hmda_2007_ct_originated-records_codes.zip', '126259', '4.28 MB'),
'2017': HmdaDataFile('hmda_2017_ct_originated-records_codes.zip', '67000', '1.55 MB'),
'2015': HmdaDataFile('hmda_2015_ct_originated-records_codes.zip', '68865', '2.57 MB'),
'2014': HmdaDataFile('hmda_2014_ct_originated-records_codes.zip', '58456', '2.07 MB'),
'2008': HmdaDataFile('hmda_2008_ct_originated-records_codes.zip', '84484', '2.79 MB'),
'2009': HmdaDataFile('hmda_2009_ct_originated-records_codes.zip', '113317', '3.55 MB'),
'2011': HmdaDataFile('hmda_2011_ct_originated-records_codes.zip', '87795', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ct_originated-records_codes.zip', '102545', '3.46 MB'),
'2013': HmdaDataFile('hmda_2013_ct_originated-records_codes.zip', '98739', '3.24 MB'),
'2012': HmdaDataFile('hmda_2012_ct_originated-records_codes.zip', '115361', '3.66 MB')
}
}
},
'nh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '32293', '1.45 MB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30786', '1.35 MB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '28246', '825.86 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '27470', '1.33 MB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '21418', '953.08 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '25186', '1.1 MB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '40099', '1.65 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '30214', '1.28 MB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '35463', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '34908', '1.55 MB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_labels.zip', '41909', '1.8 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_labels.zip', '72628', '3.51 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_labels.zip', '116500', '5.13 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_labels.zip', '65016', '2 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_labels.zip', '63482', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_labels.zip', '53373', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_labels.zip', '78591', '3.53 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_labels.zip', '100680', '4.29 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_labels.zip', '75090', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_labels.zip', '85990', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_labels.zip', '80737', '3.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_labels.zip', '92574', '4.33 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_labels.zip', '38262', '1.76 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_labels.zip', '46139', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_labels.zip', '34290', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_labels.zip', '33120', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_labels.zip', '26589', '1.21 MB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_labels.zip', '32850', '1.46 MB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_labels.zip', '45790', '1.91 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_labels.zip', '35321', '1.53 MB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_labels.zip', '40696', '1.75 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_labels.zip', '41589', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_labels.zip', '48098', '2.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '32293', '929.81 KB'),
'2007': HmdaDataFile('hmda_2007_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30786', '870.93 KB'),
'2017': HmdaDataFile('hmda_2017_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '28246', '579.48 KB'),
'2015': HmdaDataFile('hmda_2015_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '27470', '880.79 KB'),
'2014': HmdaDataFile('hmda_2014_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '21418', '605.7 KB'),
'2008': HmdaDataFile('hmda_2008_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '25186', '724.38 KB'),
'2009': HmdaDataFile('hmda_2009_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '40099', '1.1 MB'),
'2011': HmdaDataFile('hmda_2011_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '30214', '801.84 KB'),
'2010': HmdaDataFile('hmda_2010_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '35463', '934.66 KB'),
'2013': HmdaDataFile('hmda_2013_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '34908', '991.18 KB'),
'2012': HmdaDataFile('hmda_2012_nh_first-lien-owner-occupied-1-4-family-records_codes.zip', '41909', '1.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nh_all-records_codes.zip', '72628', '2.16 MB'),
'2007': HmdaDataFile('hmda_2007_nh_all-records_codes.zip', '116500', '3.28 MB'),
'2017': HmdaDataFile('hmda_2017_nh_all-records_codes.zip', '65016', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_nh_all-records_codes.zip', '63482', '2.15 MB'),
'2014': HmdaDataFile('hmda_2014_nh_all-records_codes.zip', '53373', '1.56 MB'),
'2008': HmdaDataFile('hmda_2008_nh_all-records_codes.zip', '78591', '2.27 MB'),
'2009': HmdaDataFile('hmda_2009_nh_all-records_codes.zip', '100680', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_nh_all-records_codes.zip', '75090', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_nh_all-records_codes.zip', '85990', '2.4 MB'),
'2013': HmdaDataFile('hmda_2013_nh_all-records_codes.zip', '80737', '2.4 MB'),
'2012': HmdaDataFile('hmda_2012_nh_all-records_codes.zip', '92574', '2.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nh_originated-records_codes.zip', '38262', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_nh_originated-records_codes.zip', '46139', '1.31 MB'),
'2017': HmdaDataFile('hmda_2017_nh_originated-records_codes.zip', '34290', '702.18 KB'),
'2015': HmdaDataFile('hmda_2015_nh_originated-records_codes.zip', '33120', '1.08 MB'),
'2014': HmdaDataFile('hmda_2014_nh_originated-records_codes.zip', '26589', '760.91 KB'),
'2008': HmdaDataFile('hmda_2008_nh_originated-records_codes.zip', '32850', '959.54 KB'),
'2009': HmdaDataFile('hmda_2009_nh_originated-records_codes.zip', '45790', '1.27 MB'),
'2011': HmdaDataFile('hmda_2011_nh_originated-records_codes.zip', '35321', '955.91 KB'),
'2010': HmdaDataFile('hmda_2010_nh_originated-records_codes.zip', '40696', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_nh_originated-records_codes.zip', '41589', '1.2 MB'),
'2012': HmdaDataFile('hmda_2012_nh_originated-records_codes.zip', '48098', '1.33 MB')
}
}
},
'nj': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '168437', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '222771', '10.95 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '141365', '4.57 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '147544', '8.22 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '113481', '5.78 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '166597', '8.06 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '247580', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '184520', '9.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '206564', '10.4 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '194532', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_labels.zip', '237169', '11.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_labels.zip', '399389', '21.26 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_labels.zip', '808103', '39.11 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_labels.zip', '349563', '12.38 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_labels.zip', '354746', '21.29 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_labels.zip', '289377', '15.65 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_labels.zip', '514816', '25.05 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_labels.zip', '613066', '28.18 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_labels.zip', '451221', '24.28 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_labels.zip', '499489', '27 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_labels.zip', '460264', '24.64 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_labels.zip', '541802', '29.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_labels.zip', '194913', '9.86 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_labels.zip', '317276', '15.57 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_labels.zip', '169196', '5.6 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_labels.zip', '171685', '9.68 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_labels.zip', '135675', '6.98 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_labels.zip', '208721', '10.17 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_labels.zip', '274489', '12.53 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_labels.zip', '210948', '10.71 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_labels.zip', '232001', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_labels.zip', '226181', '11.43 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_labels.zip', '269377', '13.64 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '168437', '6.08 MB'),
'2007': HmdaDataFile('hmda_2007_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '222771', '7.81 MB'),
'2017': HmdaDataFile('hmda_2017_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '141365', '3.22 MB'),
'2015': HmdaDataFile('hmda_2015_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '147544', '5.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '113481', '4.17 MB'),
'2008': HmdaDataFile('hmda_2008_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '166597', '5.77 MB'),
'2009': HmdaDataFile('hmda_2009_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '247580', '8.06 MB'),
'2011': HmdaDataFile('hmda_2011_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '184520', '6.49 MB'),
'2010': HmdaDataFile('hmda_2010_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '206564', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '194532', '6.89 MB'),
'2012': HmdaDataFile('hmda_2012_nj_first-lien-owner-occupied-1-4-family-records_codes.zip', '237169', '8.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nj_all-records_codes.zip', '399389', '14.78 MB'),
'2007': HmdaDataFile('hmda_2007_nj_all-records_codes.zip', '808103', '27.04 MB'),
'2017': HmdaDataFile('hmda_2017_nj_all-records_codes.zip', '349563', '8.1 MB'),
'2015': HmdaDataFile('hmda_2015_nj_all-records_codes.zip', '354746', '14.72 MB'),
'2014': HmdaDataFile('hmda_2014_nj_all-records_codes.zip', '289377', '10.83 MB'),
'2008': HmdaDataFile('hmda_2008_nj_all-records_codes.zip', '514816', '17.43 MB'),
'2009': HmdaDataFile('hmda_2009_nj_all-records_codes.zip', '613066', '19.76 MB'),
'2011': HmdaDataFile('hmda_2011_nj_all-records_codes.zip', '451221', '16.47 MB'),
'2010': HmdaDataFile('hmda_2010_nj_all-records_codes.zip', '499489', '18.44 MB'),
'2013': HmdaDataFile('hmda_2013_nj_all-records_codes.zip', '460264', '16.86 MB'),
'2012': HmdaDataFile('hmda_2012_nj_all-records_codes.zip', '541802', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nj_originated-records_codes.zip', '194913', '7.06 MB'),
'2007': HmdaDataFile('hmda_2007_nj_originated-records_codes.zip', '317276', '11.05 MB'),
'2017': HmdaDataFile('hmda_2017_nj_originated-records_codes.zip', '169196', '3.91 MB'),
'2015': HmdaDataFile('hmda_2015_nj_originated-records_codes.zip', '171685', '6.93 MB'),
'2014': HmdaDataFile('hmda_2014_nj_originated-records_codes.zip', '135675', '5 MB'),
'2008': HmdaDataFile('hmda_2008_nj_originated-records_codes.zip', '208721', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_nj_originated-records_codes.zip', '274489', '9.01 MB'),
'2011': HmdaDataFile('hmda_2011_nj_originated-records_codes.zip', '210948', '7.44 MB'),
'2010': HmdaDataFile('hmda_2010_nj_originated-records_codes.zip', '232001', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_nj_originated-records_codes.zip', '226181', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_nj_originated-records_codes.zip', '269377', '9.59 MB')
}
}
},
'nm': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '32653', '1.47 MB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '47002', '2.07 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '28982', '924.59 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '29038', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '24491', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '35421', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '45147', '1.83 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '31586', '1.35 MB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '36691', '1.68 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '38141', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_labels.zip', '42629', '1.91 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_labels.zip', '89390', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_labels.zip', '178911', '8.19 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_labels.zip', '80423', '2.86 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_labels.zip', '79236', '4.37 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_labels.zip', '71841', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_labels.zip', '114678', '5.3 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_labels.zip', '123495', '5.37 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_labels.zip', '90785', '4.39 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_labels.zip', '104045', '5.33 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_labels.zip', '102217', '5.23 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_labels.zip', '108074', '5.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_labels.zip', '41334', '1.93 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_labels.zip', '69876', '3.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_labels.zip', '37465', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_labels.zip', '37572', '1.93 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_labels.zip', '32547', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_labels.zip', '46512', '2.09 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_labels.zip', '54007', '2.27 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_labels.zip', '39979', '1.8 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_labels.zip', '45261', '2.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_labels.zip', '49389', '2.34 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_labels.zip', '53038', '2.41 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '32653', '980.97 KB'),
'2007': HmdaDataFile('hmda_2007_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '47002', '1.42 MB'),
'2017': HmdaDataFile('hmda_2017_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '28982', '644.64 KB'),
'2015': HmdaDataFile('hmda_2015_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '29038', '989.53 KB'),
'2014': HmdaDataFile('hmda_2014_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '24491', '759.74 KB'),
'2008': HmdaDataFile('hmda_2008_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '35421', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '45147', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '31586', '892.04 KB'),
'2010': HmdaDataFile('hmda_2010_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '36691', '1.12 MB'),
'2013': HmdaDataFile('hmda_2013_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '38141', '1.18 MB'),
'2012': HmdaDataFile('hmda_2012_nm_first-lien-owner-occupied-1-4-family-records_codes.zip', '42629', '1.26 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nm_all-records_codes.zip', '89390', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_nm_all-records_codes.zip', '178911', '5.48 MB'),
'2017': HmdaDataFile('hmda_2017_nm_all-records_codes.zip', '80423', '1.88 MB'),
'2015': HmdaDataFile('hmda_2015_nm_all-records_codes.zip', '79236', '2.87 MB'),
'2014': HmdaDataFile('hmda_2014_nm_all-records_codes.zip', '71841', '2.38 MB'),
'2008': HmdaDataFile('hmda_2008_nm_all-records_codes.zip', '114678', '3.55 MB'),
'2009': HmdaDataFile('hmda_2009_nm_all-records_codes.zip', '123495', '3.6 MB'),
'2011': HmdaDataFile('hmda_2011_nm_all-records_codes.zip', '90785', '2.81 MB'),
'2010': HmdaDataFile('hmda_2010_nm_all-records_codes.zip', '104045', '3.42 MB'),
'2013': HmdaDataFile('hmda_2013_nm_all-records_codes.zip', '102217', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_nm_all-records_codes.zip', '108074', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nm_originated-records_codes.zip', '41334', '1.3 MB'),
'2007': HmdaDataFile('hmda_2007_nm_originated-records_codes.zip', '69876', '2.15 MB'),
'2017': HmdaDataFile('hmda_2017_nm_originated-records_codes.zip', '37465', '864.54 KB'),
'2015': HmdaDataFile('hmda_2015_nm_originated-records_codes.zip', '37572', '1.32 MB'),
'2014': HmdaDataFile('hmda_2014_nm_originated-records_codes.zip', '32547', '1.05 MB'),
'2008': HmdaDataFile('hmda_2008_nm_originated-records_codes.zip', '46512', '1.43 MB'),
'2009': HmdaDataFile('hmda_2009_nm_originated-records_codes.zip', '54007', '1.56 MB'),
'2011': HmdaDataFile('hmda_2011_nm_originated-records_codes.zip', '39979', '1.19 MB'),
'2010': HmdaDataFile('hmda_2010_nm_originated-records_codes.zip', '45261', '1.4 MB'),
'2013': HmdaDataFile('hmda_2013_nm_originated-records_codes.zip', '49389', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_nm_originated-records_codes.zip', '53038', '1.59 MB')
}
}
},
'tx': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '513200', '26.59 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '454756', '22.34 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '457884', '15.35 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '460596', '26.85 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391921', '20.38 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '364311', '17.57 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '448222', '20.5 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '391184', '19.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '405050', '20.17 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '503627', '26.39 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_labels.zip', '502655', '26.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_labels.zip', '1266767', '70.12 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_labels.zip', '1723576', '83.63 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_labels.zip', '1148206', '42.77 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_labels.zip', '1139573', '71.35 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_labels.zip', '1011598', '55.94 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_labels.zip', '1204457', '58.59 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_labels.zip', '1242037', '57.86 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_labels.zip', '1038591', '56.64 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_labels.zip', '1063486', '58.04 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_labels.zip', '1254738', '70.8 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_labels.zip', '1221801', '69.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_labels.zip', '613325', '32.57 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_labels.zip', '653817', '32.47 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_labels.zip', '559492', '19.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_labels.zip', '557266', '33.01 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_labels.zip', '484747', '25.75 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_labels.zip', '473701', '23.41 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_labels.zip', '520422', '24.41 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_labels.zip', '466338', '24 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_labels.zip', '476566', '24.45 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_labels.zip', '611180', '32.38 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_labels.zip', '594151', '31.5 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '513200', '18.9 MB'),
'2007': HmdaDataFile('hmda_2007_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '454756', '16.07 MB'),
'2017': HmdaDataFile('hmda_2017_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '457884', '10.67 MB'),
'2015': HmdaDataFile('hmda_2015_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '460596', '19.03 MB'),
'2014': HmdaDataFile('hmda_2014_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391921', '14.47 MB'),
'2008': HmdaDataFile('hmda_2008_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '364311', '12.62 MB'),
'2009': HmdaDataFile('hmda_2009_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '448222', '14.81 MB'),
'2011': HmdaDataFile('hmda_2011_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '391184', '13.52 MB'),
'2010': HmdaDataFile('hmda_2010_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '405050', '13.93 MB'),
'2013': HmdaDataFile('hmda_2013_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '503627', '18.83 MB'),
'2012': HmdaDataFile('hmda_2012_tx_first-lien-owner-occupied-1-4-family-records_codes.zip', '502655', '18.62 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tx_all-records_codes.zip', '1266767', '48.14 MB'),
'2007': HmdaDataFile('hmda_2007_tx_all-records_codes.zip', '1723576', '58.17 MB'),
'2017': HmdaDataFile('hmda_2017_tx_all-records_codes.zip', '1148206', '27.41 MB'),
'2015': HmdaDataFile('hmda_2015_tx_all-records_codes.zip', '1139573', '48.5 MB'),
'2014': HmdaDataFile('hmda_2014_tx_all-records_codes.zip', '1011598', '38.26 MB'),
'2008': HmdaDataFile('hmda_2008_tx_all-records_codes.zip', '1204457', '40.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_all-records_codes.zip', '1242037', '40.49 MB'),
'2011': HmdaDataFile('hmda_2011_tx_all-records_codes.zip', '1038591', '38.13 MB'),
'2010': HmdaDataFile('hmda_2010_tx_all-records_codes.zip', '1063486', '39.1 MB'),
'2013': HmdaDataFile('hmda_2013_tx_all-records_codes.zip', '1254738', '48.99 MB'),
'2012': HmdaDataFile('hmda_2012_tx_all-records_codes.zip', '1221801', '47.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tx_originated-records_codes.zip', '613325', '23.08 MB'),
'2007': HmdaDataFile('hmda_2007_tx_originated-records_codes.zip', '653817', '23.29 MB'),
'2017': HmdaDataFile('hmda_2017_tx_originated-records_codes.zip', '559492', '13.3 MB'),
'2015': HmdaDataFile('hmda_2015_tx_originated-records_codes.zip', '557266', '23.23 MB'),
'2014': HmdaDataFile('hmda_2014_tx_originated-records_codes.zip', '484747', '18.19 MB'),
'2008': HmdaDataFile('hmda_2008_tx_originated-records_codes.zip', '473701', '16.77 MB'),
'2009': HmdaDataFile('hmda_2009_tx_originated-records_codes.zip', '520422', '17.57 MB'),
'2011': HmdaDataFile('hmda_2011_tx_originated-records_codes.zip', '466338', '16.56 MB'),
'2010': HmdaDataFile('hmda_2010_tx_originated-records_codes.zip', '476566', '16.85 MB'),
'2013': HmdaDataFile('hmda_2013_tx_originated-records_codes.zip', '611180', '22.91 MB'),
'2012': HmdaDataFile('hmda_2012_tx_originated-records_codes.zip', '594151', '22.16 MB')
}
}
},
'la': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '72627', '3.46 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86710', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '61729', '1.89 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '67942', '3.69 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '59398', '2.86 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69191', '3.26 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '81760', '3.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '69735', '3.2 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '75645', '3.55 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '77713', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_labels.zip', '86148', '4.02 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_labels.zip', '195937', '10.16 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_labels.zip', '300738', '14.74 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_labels.zip', '173079', '6.06 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_labels.zip', '180533', '10.55 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_labels.zip', '170514', '8.9 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_labels.zip', '221773', '11.04 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_labels.zip', '231242', '11.32 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_labels.zip', '210644', '10.99 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_labels.zip', '214403', '11.27 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_labels.zip', '219546', '11.6 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_labels.zip', '231946', '12.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_labels.zip', '94261', '4.64 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_labels.zip', '126278', '6.16 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_labels.zip', '83323', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_labels.zip', '89327', '4.95 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_labels.zip', '81230', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_labels.zip', '97778', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_labels.zip', '103928', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_labels.zip', '90349', '4.38 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_labels.zip', '96150', '4.64 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_labels.zip', '101849', '4.89 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_labels.zip', '107636', '5.1 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '72627', '2.33 MB'),
'2007': HmdaDataFile('hmda_2007_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86710', '2.79 MB'),
'2017': HmdaDataFile('hmda_2017_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '61729', '1.36 MB'),
'2015': HmdaDataFile('hmda_2015_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '67942', '2.53 MB'),
'2014': HmdaDataFile('hmda_2014_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '59398', '1.93 MB'),
'2008': HmdaDataFile('hmda_2008_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69191', '2.23 MB'),
'2009': HmdaDataFile('hmda_2009_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '81760', '2.59 MB'),
'2011': HmdaDataFile('hmda_2011_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '69735', '2.12 MB'),
'2010': HmdaDataFile('hmda_2010_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '75645', '2.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '77713', '2.44 MB'),
'2012': HmdaDataFile('hmda_2012_la_first-lien-owner-occupied-1-4-family-records_codes.zip', '86148', '2.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_la_all-records_codes.zip', '195937', '6.65 MB'),
'2007': HmdaDataFile('hmda_2007_la_all-records_codes.zip', '300738', '9.84 MB'),
'2017': HmdaDataFile('hmda_2017_la_all-records_codes.zip', '173079', '4 MB'),
'2015': HmdaDataFile('hmda_2015_la_all-records_codes.zip', '180533', '6.97 MB'),
'2014': HmdaDataFile('hmda_2014_la_all-records_codes.zip', '170514', '5.81 MB'),
'2008': HmdaDataFile('hmda_2008_la_all-records_codes.zip', '221773', '7.43 MB'),
'2009': HmdaDataFile('hmda_2009_la_all-records_codes.zip', '231242', '7.74 MB'),
'2011': HmdaDataFile('hmda_2011_la_all-records_codes.zip', '210644', '7.15 MB'),
'2010': HmdaDataFile('hmda_2010_la_all-records_codes.zip', '214403', '7.36 MB'),
'2013': HmdaDataFile('hmda_2013_la_all-records_codes.zip', '219546', '7.57 MB'),
'2012': HmdaDataFile('hmda_2012_la_all-records_codes.zip', '231946', '7.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_la_originated-records_codes.zip', '94261', '3.12 MB'),
'2007': HmdaDataFile('hmda_2007_la_originated-records_codes.zip', '126278', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_la_originated-records_codes.zip', '83323', '1.86 MB'),
'2015': HmdaDataFile('hmda_2015_la_originated-records_codes.zip', '89327', '3.37 MB'),
'2014': HmdaDataFile('hmda_2014_la_originated-records_codes.zip', '81230', '2.72 MB'),
'2008': HmdaDataFile('hmda_2008_la_originated-records_codes.zip', '97778', '3.29 MB'),
'2009': HmdaDataFile('hmda_2009_la_originated-records_codes.zip', '103928', '3.38 MB'),
'2011': HmdaDataFile('hmda_2011_la_originated-records_codes.zip', '90349', '2.9 MB'),
'2010': HmdaDataFile('hmda_2010_la_originated-records_codes.zip', '96150', '3.06 MB'),
'2013': HmdaDataFile('hmda_2013_la_originated-records_codes.zip', '101849', '3.22 MB'),
'2012': HmdaDataFile('hmda_2012_la_originated-records_codes.zip', '107636', '3.34 MB')
}
}
},
'wa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '225231', '11.2 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '215619', '10.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '191532', '6.03 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '183367', '10.31 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '133913', '6.66 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '165363', '7.57 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '255387', '10.98 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '174449', '8.42 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '211008', '10.2 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '201071', '10.04 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_labels.zip', '245095', '11.94 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_labels.zip', '466566', '24.68 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_labels.zip', '722481', '33.39 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_labels.zip', '402196', '13.8 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_labels.zip', '387805', '23.58 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_labels.zip', '311425', '16.4 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_labels.zip', '485622', '22.64 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_labels.zip', '590758', '25.93 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_labels.zip', '406149', '20.95 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_labels.zip', '473922', '24.62 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_labels.zip', '448753', '23.93 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_labels.zip', '519479', '27.08 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_labels.zip', '263712', '13.26 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_labels.zip', '310267', '14.49 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_labels.zip', '230076', '7.35 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_labels.zip', '218772', '12.48 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_labels.zip', '166279', '8.37 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_labels.zip', '207841', '9.62 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_labels.zip', '286416', '12.41 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_labels.zip', '205550', '10.07 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_labels.zip', '241128', '11.8 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_labels.zip', '246580', '12.48 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_labels.zip', '287748', '14.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '225231', '7.74 MB'),
'2007': HmdaDataFile('hmda_2007_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '215619', '7.08 MB'),
'2017': HmdaDataFile('hmda_2017_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '191532', '4.12 MB'),
'2015': HmdaDataFile('hmda_2015_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '183367', '7.25 MB'),
'2014': HmdaDataFile('hmda_2014_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '133913', '4.63 MB'),
'2008': HmdaDataFile('hmda_2008_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '165363', '5.29 MB'),
'2009': HmdaDataFile('hmda_2009_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '255387', '7.8 MB'),
'2011': HmdaDataFile('hmda_2011_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '174449', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '211008', '6.97 MB'),
'2013': HmdaDataFile('hmda_2013_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '201071', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_wa_first-lien-owner-occupied-1-4-family-records_codes.zip', '245095', '8.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wa_all-records_codes.zip', '466566', '16.4 MB'),
'2007': HmdaDataFile('hmda_2007_wa_all-records_codes.zip', '722481', '22.74 MB'),
'2017': HmdaDataFile('hmda_2017_wa_all-records_codes.zip', '402196', '8.75 MB'),
'2015': HmdaDataFile('hmda_2015_wa_all-records_codes.zip', '387805', '15.87 MB'),
'2014': HmdaDataFile('hmda_2014_wa_all-records_codes.zip', '311425', '10.92 MB'),
'2008': HmdaDataFile('hmda_2008_wa_all-records_codes.zip', '485622', '15.32 MB'),
'2009': HmdaDataFile('hmda_2009_wa_all-records_codes.zip', '590758', '17.9 MB'),
'2011': HmdaDataFile('hmda_2011_wa_all-records_codes.zip', '406149', '13.93 MB'),
'2010': HmdaDataFile('hmda_2010_wa_all-records_codes.zip', '473922', '16.26 MB'),
'2013': HmdaDataFile('hmda_2013_wa_all-records_codes.zip', '448753', '15.96 MB'),
'2012': HmdaDataFile('hmda_2012_wa_all-records_codes.zip', '519479', '18.1 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wa_originated-records_codes.zip', '263712', '9.1 MB'),
'2007': HmdaDataFile('hmda_2007_wa_originated-records_codes.zip', '310267', '10.15 MB'),
'2017': HmdaDataFile('hmda_2017_wa_originated-records_codes.zip', '230076', '4.96 MB'),
'2015': HmdaDataFile('hmda_2015_wa_originated-records_codes.zip', '218772', '8.71 MB'),
'2014': HmdaDataFile('hmda_2014_wa_originated-records_codes.zip', '166279', '5.77 MB'),
'2008': HmdaDataFile('hmda_2008_wa_originated-records_codes.zip', '207841', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_wa_originated-records_codes.zip', '286416', '8.79 MB'),
'2011': HmdaDataFile('hmda_2011_wa_originated-records_codes.zip', '205550', '6.9 MB'),
'2010': HmdaDataFile('hmda_2010_wa_originated-records_codes.zip', '241128', '8.02 MB'),
'2013': HmdaDataFile('hmda_2013_wa_originated-records_codes.zip', '246580', '8.58 MB'),
'2012': HmdaDataFile('hmda_2012_wa_originated-records_codes.zip', '287748', '9.76 MB')
}
}
},
'nc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '220677', '10.93 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '228163', '10.68 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '196986', '6.12 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '190281', '11.14 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '154486', '7.77 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '202793', '9.24 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '263921', '11.18 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '189738', '9.12 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '211447', '10.31 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '224534', '11.35 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_labels.zip', '240943', '12.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_labels.zip', '519897', '27.86 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_labels.zip', '779619', '36.44 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_labels.zip', '464109', '16.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_labels.zip', '457002', '29.06 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_labels.zip', '392549', '21.12 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_labels.zip', '575937', '26.69 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_labels.zip', '617968', '27.09 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_labels.zip', '476288', '24.93 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_labels.zip', '511912', '26.98 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_labels.zip', '562524', '30.51 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_labels.zip', '578793', '31.47 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_labels.zip', '262765', '13.32 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_labels.zip', '337640', '15.84 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_labels.zip', '240128', '7.69 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_labels.zip', '231114', '13.82 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_labels.zip', '192473', '9.85 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_labels.zip', '260693', '12.04 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_labels.zip', '303410', '13.01 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_labels.zip', '227079', '11.18 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_labels.zip', '248590', '12.36 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_labels.zip', '278062', '14.32 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_labels.zip', '289407', '14.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '220677', '7.49 MB'),
'2007': HmdaDataFile('hmda_2007_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '228163', '7.37 MB'),
'2017': HmdaDataFile('hmda_2017_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '196986', '4.24 MB'),
'2015': HmdaDataFile('hmda_2015_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '190281', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '154486', '5.35 MB'),
'2008': HmdaDataFile('hmda_2008_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '202793', '6.4 MB'),
'2009': HmdaDataFile('hmda_2009_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '263921', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '189738', '6.03 MB'),
'2010': HmdaDataFile('hmda_2010_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '211447', '6.9 MB'),
'2013': HmdaDataFile('hmda_2013_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '224534', '7.74 MB'),
'2012': HmdaDataFile('hmda_2012_nc_first-lien-owner-occupied-1-4-family-records_codes.zip', '240943', '8.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nc_all-records_codes.zip', '519897', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_nc_all-records_codes.zip', '779619', '24.55 MB'),
'2017': HmdaDataFile('hmda_2017_nc_all-records_codes.zip', '464109', '10.37 MB'),
'2015': HmdaDataFile('hmda_2015_nc_all-records_codes.zip', '457002', '19.13 MB'),
'2014': HmdaDataFile('hmda_2014_nc_all-records_codes.zip', '392549', '13.94 MB'),
'2008': HmdaDataFile('hmda_2008_nc_all-records_codes.zip', '575937', '17.94 MB'),
'2009': HmdaDataFile('hmda_2009_nc_all-records_codes.zip', '617968', '18.62 MB'),
'2011': HmdaDataFile('hmda_2011_nc_all-records_codes.zip', '476288', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_nc_all-records_codes.zip', '511912', '17.45 MB'),
'2013': HmdaDataFile('hmda_2013_nc_all-records_codes.zip', '562524', '20.11 MB'),
'2012': HmdaDataFile('hmda_2012_nc_all-records_codes.zip', '578793', '20.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nc_originated-records_codes.zip', '262765', '9.11 MB'),
'2007': HmdaDataFile('hmda_2007_nc_originated-records_codes.zip', '337640', '10.93 MB'),
'2017': HmdaDataFile('hmda_2017_nc_originated-records_codes.zip', '240128', '5.27 MB'),
'2015': HmdaDataFile('hmda_2015_nc_originated-records_codes.zip', '231114', '9.42 MB'),
'2014': HmdaDataFile('hmda_2014_nc_originated-records_codes.zip', '192473', '6.73 MB'),
'2008': HmdaDataFile('hmda_2008_nc_originated-records_codes.zip', '260693', '8.29 MB'),
'2009': HmdaDataFile('hmda_2009_nc_originated-records_codes.zip', '303410', '9.19 MB'),
'2011': HmdaDataFile('hmda_2011_nc_originated-records_codes.zip', '227079', '7.35 MB'),
'2010': HmdaDataFile('hmda_2010_nc_originated-records_codes.zip', '248590', '8.23 MB'),
'2013': HmdaDataFile('hmda_2013_nc_originated-records_codes.zip', '278062', '9.71 MB'),
'2012': HmdaDataFile('hmda_2012_nc_originated-records_codes.zip', '289407', '10.13 MB')
}
}
},
'nd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15775', '603.58 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '11613', '456.02 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12661', '336.26 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15228', '660.94 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12935', '513.04 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12079', '485.38 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17245', '600.63 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '14852', '543.35 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16629', '606.86 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17550', '663.49 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19995', '730.61 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_labels.zip', '32670', '1.35 MB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_labels.zip', '32081', '1.34 MB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_labels.zip', '25521', '733.11 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_labels.zip', '31382', '1.48 MB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_labels.zip', '27698', '1.18 MB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_labels.zip', '28946', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_labels.zip', '35789', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_labels.zip', '30234', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_labels.zip', '32754', '1.34 MB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_labels.zip', '35576', '1.48 MB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_labels.zip', '37581', '1.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_labels.zip', '19580', '772.47 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_labels.zip', '17673', '689.98 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_labels.zip', '16016', '429.87 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_labels.zip', '19045', '847.76 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_labels.zip', '17180', '693.97 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_labels.zip', '16806', '683.54 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_labels.zip', '21080', '749.43 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_labels.zip', '18548', '698.61 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_labels.zip', '20218', '761.83 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_labels.zip', '22042', '860.22 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_labels.zip', '24096', '905.02 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15775', '388.61 KB'),
'2007': HmdaDataFile('hmda_2007_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '11613', '292.47 KB'),
'2017': HmdaDataFile('hmda_2017_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12661', '238.72 KB'),
'2015': HmdaDataFile('hmda_2015_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15228', '425.66 KB'),
'2014': HmdaDataFile('hmda_2014_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12935', '330.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12079', '313.83 KB'),
'2009': HmdaDataFile('hmda_2009_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17245', '399.68 KB'),
'2011': HmdaDataFile('hmda_2011_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '14852', '348.81 KB'),
'2010': HmdaDataFile('hmda_2010_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16629', '390.2 KB'),
'2013': HmdaDataFile('hmda_2013_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17550', '426.9 KB'),
'2012': HmdaDataFile('hmda_2012_nd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19995', '472.27 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nd_all-records_codes.zip', '32670', '832.59 KB'),
'2007': HmdaDataFile('hmda_2007_nd_all-records_codes.zip', '32081', '834.25 KB'),
'2017': HmdaDataFile('hmda_2017_nd_all-records_codes.zip', '25521', '487.38 KB'),
'2015': HmdaDataFile('hmda_2015_nd_all-records_codes.zip', '31382', '919.39 KB'),
'2014': HmdaDataFile('hmda_2014_nd_all-records_codes.zip', '27698', '726.15 KB'),
'2008': HmdaDataFile('hmda_2008_nd_all-records_codes.zip', '28946', '781.35 KB'),
'2009': HmdaDataFile('hmda_2009_nd_all-records_codes.zip', '35789', '876.82 KB'),
'2011': HmdaDataFile('hmda_2011_nd_all-records_codes.zip', '30234', '748.63 KB'),
'2010': HmdaDataFile('hmda_2010_nd_all-records_codes.zip', '32754', '826.34 KB'),
'2013': HmdaDataFile('hmda_2013_nd_all-records_codes.zip', '35576', '908.14 KB'),
'2012': HmdaDataFile('hmda_2012_nd_all-records_codes.zip', '37581', '943.93 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nd_originated-records_codes.zip', '19580', '486.75 KB'),
'2007': HmdaDataFile('hmda_2007_nd_originated-records_codes.zip', '17673', '439.38 KB'),
'2017': HmdaDataFile('hmda_2017_nd_originated-records_codes.zip', '16016', '299.25 KB'),
'2015': HmdaDataFile('hmda_2015_nd_originated-records_codes.zip', '19045', '537.29 KB'),
'2014': HmdaDataFile('hmda_2014_nd_originated-records_codes.zip', '17180', '437.05 KB'),
'2008': HmdaDataFile('hmda_2008_nd_originated-records_codes.zip', '16806', '438.4 KB'),
'2009': HmdaDataFile('hmda_2009_nd_originated-records_codes.zip', '21080', '495.22 KB'),
'2011': HmdaDataFile('hmda_2011_nd_originated-records_codes.zip', '18548', '440.47 KB'),
'2010': HmdaDataFile('hmda_2010_nd_originated-records_codes.zip', '20218', '482.84 KB'),
'2013': HmdaDataFile('hmda_2013_nd_originated-records_codes.zip', '22042', '542.74 KB'),
'2012': HmdaDataFile('hmda_2012_nd_originated-records_codes.zip', '24096', '576.32 KB')
}
}
},
'ne': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42823', '1.96 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '35882', '1.57 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '34951', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '38670', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '30830', '1.5 MB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '33590', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '56987', '2.26 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '42269', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '51870', '2.25 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '45909', '2.1 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_labels.zip', '57432', '2.6 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_labels.zip', '89068', '4.35 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_labels.zip', '112752', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_labels.zip', '74966', '2.4 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_labels.zip', '82331', '4.47 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_labels.zip', '68559', '3.58 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_labels.zip', '88586', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_labels.zip', '117158', '4.99 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_labels.zip', '90603', '4.29 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_labels.zip', '105043', '4.96 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_labels.zip', '97423', '4.78 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_labels.zip', '113860', '5.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_labels.zip', '52019', '2.41 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_labels.zip', '51923', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_labels.zip', '43786', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_labels.zip', '48157', '2.46 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_labels.zip', '39960', '1.98 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_labels.zip', '44333', '1.92 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_labels.zip', '65475', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_labels.zip', '50258', '2.23 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_labels.zip', '59744', '2.64 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_labels.zip', '56003', '2.6 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_labels.zip', '67081', '3.09 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42823', '1.28 MB'),
'2007': HmdaDataFile('hmda_2007_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '35882', '1.06 MB'),
'2017': HmdaDataFile('hmda_2017_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '34951', '753.37 KB'),
'2015': HmdaDataFile('hmda_2015_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '38670', '1.3 MB'),
'2014': HmdaDataFile('hmda_2014_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '30830', '988.96 KB'),
'2008': HmdaDataFile('hmda_2008_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '33590', '983.97 KB'),
'2009': HmdaDataFile('hmda_2009_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '56987', '1.58 MB'),
'2011': HmdaDataFile('hmda_2011_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '42269', '1.2 MB'),
'2010': HmdaDataFile('hmda_2010_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '51870', '1.48 MB'),
'2013': HmdaDataFile('hmda_2013_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '45909', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ne_first-lien-owner-occupied-1-4-family-records_codes.zip', '57432', '1.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ne_all-records_codes.zip', '89068', '2.76 MB'),
'2007': HmdaDataFile('hmda_2007_ne_all-records_codes.zip', '112752', '3.37 MB'),
'2017': HmdaDataFile('hmda_2017_ne_all-records_codes.zip', '74966', '1.6 MB'),
'2015': HmdaDataFile('hmda_2015_ne_all-records_codes.zip', '82331', '2.91 MB'),
'2014': HmdaDataFile('hmda_2014_ne_all-records_codes.zip', '68559', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_ne_all-records_codes.zip', '88586', '2.64 MB'),
'2009': HmdaDataFile('hmda_2009_ne_all-records_codes.zip', '117158', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ne_all-records_codes.zip', '90603', '2.71 MB'),
'2010': HmdaDataFile('hmda_2010_ne_all-records_codes.zip', '105043', '3.16 MB'),
'2013': HmdaDataFile('hmda_2013_ne_all-records_codes.zip', '97423', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_ne_all-records_codes.zip', '113860', '3.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ne_originated-records_codes.zip', '52019', '1.56 MB'),
'2007': HmdaDataFile('hmda_2007_ne_originated-records_codes.zip', '51923', '1.52 MB'),
'2017': HmdaDataFile('hmda_2017_ne_originated-records_codes.zip', '43786', '932.71 KB'),
'2015': HmdaDataFile('hmda_2015_ne_originated-records_codes.zip', '48157', '1.64 MB'),
'2014': HmdaDataFile('hmda_2014_ne_originated-records_codes.zip', '39960', '1.29 MB'),
'2008': HmdaDataFile('hmda_2008_ne_originated-records_codes.zip', '44333', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ne_originated-records_codes.zip', '65475', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ne_originated-records_codes.zip', '50258', '1.45 MB'),
'2010': HmdaDataFile('hmda_2010_ne_originated-records_codes.zip', '59744', '1.72 MB'),
'2013': HmdaDataFile('hmda_2013_ne_originated-records_codes.zip', '56003', '1.69 MB'),
'2012': HmdaDataFile('hmda_2012_ne_originated-records_codes.zip', '67081', '2.01 MB')
}
}
},
'tn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '144529', '7.15 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151937', '7.17 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '133331', '4.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '126852', '7.13 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '103024', '5.17 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '123580', '5.68 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '160077', '6.99 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '113228', '5.37 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '128508', '6.09 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '141210', '6.95 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_labels.zip', '151750', '7.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_labels.zip', '350490', '18.5 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_labels.zip', '512117', '24.54 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_labels.zip', '326416', '11.11 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_labels.zip', '305114', '18.47 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_labels.zip', '265214', '14.16 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_labels.zip', '365839', '17.39 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_labels.zip', '406028', '18.65 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_labels.zip', '304377', '16.01 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_labels.zip', '335917', '17.65 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_labels.zip', '358454', '19.31 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_labels.zip', '373362', '20.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_labels.zip', '174965', '8.84 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_labels.zip', '217392', '10.46 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_labels.zip', '164577', '5.23 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_labels.zip', '155616', '8.89 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_labels.zip', '131171', '6.76 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_labels.zip', '163188', '7.73 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_labels.zip', '187776', '8.43 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_labels.zip', '137943', '6.74 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_labels.zip', '153282', '7.46 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_labels.zip', '172612', '8.61 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_labels.zip', '180686', '9.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '144529', '4.86 MB'),
'2007': HmdaDataFile('hmda_2007_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151937', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '133331', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '126852', '4.92 MB'),
'2014': HmdaDataFile('hmda_2014_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '103024', '3.53 MB'),
'2008': HmdaDataFile('hmda_2008_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '123580', '3.92 MB'),
'2009': HmdaDataFile('hmda_2009_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '160077', '4.89 MB'),
'2011': HmdaDataFile('hmda_2011_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '113228', '3.56 MB'),
'2010': HmdaDataFile('hmda_2010_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '128508', '4.04 MB'),
'2013': HmdaDataFile('hmda_2013_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '141210', '4.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_first-lien-owner-occupied-1-4-family-records_codes.zip', '151750', '5.03 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_tn_all-records_codes.zip', '350490', '12.18 MB'),
'2007': HmdaDataFile('hmda_2007_tn_all-records_codes.zip', '512117', '16.43 MB'),
'2017': HmdaDataFile('hmda_2017_tn_all-records_codes.zip', '326416', '7.18 MB'),
'2015': HmdaDataFile('hmda_2015_tn_all-records_codes.zip', '305114', '12.28 MB'),
'2014': HmdaDataFile('hmda_2014_tn_all-records_codes.zip', '265214', '9.33 MB'),
'2008': HmdaDataFile('hmda_2008_tn_all-records_codes.zip', '365839', '11.7 MB'),
'2009': HmdaDataFile('hmda_2009_tn_all-records_codes.zip', '406028', '12.76 MB'),
'2011': HmdaDataFile('hmda_2011_tn_all-records_codes.zip', '304377', '10.33 MB'),
'2010': HmdaDataFile('hmda_2010_tn_all-records_codes.zip', '335917', '11.45 MB'),
'2013': HmdaDataFile('hmda_2013_tn_all-records_codes.zip', '358454', '12.68 MB'),
'2012': HmdaDataFile('hmda_2012_tn_all-records_codes.zip', '373362', '13.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_tn_originated-records_codes.zip', '174965', '6 MB'),
'2007': HmdaDataFile('hmda_2007_tn_originated-records_codes.zip', '217392', '7.22 MB'),
'2017': HmdaDataFile('hmda_2017_tn_originated-records_codes.zip', '164577', '3.62 MB'),
'2015': HmdaDataFile('hmda_2015_tn_originated-records_codes.zip', '155616', '6.1 MB'),
'2014': HmdaDataFile('hmda_2014_tn_originated-records_codes.zip', '131171', '4.61 MB'),
'2008': HmdaDataFile('hmda_2008_tn_originated-records_codes.zip', '163188', '5.35 MB'),
'2009': HmdaDataFile('hmda_2009_tn_originated-records_codes.zip', '187776', '5.9 MB'),
'2011': HmdaDataFile('hmda_2011_tn_originated-records_codes.zip', '137943', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_tn_originated-records_codes.zip', '153282', '4.93 MB'),
'2013': HmdaDataFile('hmda_2013_tn_originated-records_codes.zip', '172612', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_tn_originated-records_codes.zip', '180686', '6.04 MB')
}
}
},
'ny': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '199204', '10.43 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '281690', '14.06 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '187337', '6.57 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '182621', '10.44 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '156647', '8.24 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '195917', '9.68 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '245556', '11.46 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '201157', '10.32 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '217296', '11.01 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '225700', '11.8 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_labels.zip', '242701', '12.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_labels.zip', '477313', '26.86 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_labels.zip', '1009451', '51.35 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_labels.zip', '446902', '17.47 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_labels.zip', '439654', '27.13 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_labels.zip', '389279', '22.13 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_labels.zip', '644647', '33.21 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_labels.zip', '645487', '31.84 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_labels.zip', '503733', '28.17 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_labels.zip', '529869', '29.46 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_labels.zip', '539217', '30.46 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_labels.zip', '566980', '31.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_labels.zip', '246292', '13.15 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_labels.zip', '398639', '20.13 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_labels.zip', '236499', '8.45 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_labels.zip', '228054', '13.23 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_labels.zip', '198817', '10.63 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_labels.zip', '252826', '12.72 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_labels.zip', '281652', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_labels.zip', '238554', '12.42 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_labels.zip', '252729', '13.07 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_labels.zip', '271829', '14.42 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_labels.zip', '285106', '14.71 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '199204', '7.47 MB'),
'2007': HmdaDataFile('hmda_2007_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '281690', '10.14 MB'),
'2017': HmdaDataFile('hmda_2017_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '187337', '4.66 MB'),
'2015': HmdaDataFile('hmda_2015_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '182621', '7.46 MB'),
'2014': HmdaDataFile('hmda_2014_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '156647', '5.87 MB'),
'2008': HmdaDataFile('hmda_2008_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '195917', '6.98 MB'),
'2009': HmdaDataFile('hmda_2009_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '245556', '8.36 MB'),
'2011': HmdaDataFile('hmda_2011_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '201157', '7.36 MB'),
'2010': HmdaDataFile('hmda_2010_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '217296', '7.85 MB'),
'2013': HmdaDataFile('hmda_2013_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '225700', '8.44 MB'),
'2012': HmdaDataFile('hmda_2012_ny_first-lien-owner-occupied-1-4-family-records_codes.zip', '242701', '8.82 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ny_all-records_codes.zip', '477313', '18.61 MB'),
'2007': HmdaDataFile('hmda_2007_ny_all-records_codes.zip', '1009451', '35.76 MB'),
'2017': HmdaDataFile('hmda_2017_ny_all-records_codes.zip', '446902', '11.54 MB'),
'2015': HmdaDataFile('hmda_2015_ny_all-records_codes.zip', '439654', '18.57 MB'),
'2014': HmdaDataFile('hmda_2014_ny_all-records_codes.zip', '389279', '15.25 MB'),
'2008': HmdaDataFile('hmda_2008_ny_all-records_codes.zip', '644647', '23.18 MB'),
'2009': HmdaDataFile('hmda_2009_ny_all-records_codes.zip', '645487', '22.45 MB'),
'2011': HmdaDataFile('hmda_2011_ny_all-records_codes.zip', '503733', '19.55 MB'),
'2010': HmdaDataFile('hmda_2010_ny_all-records_codes.zip', '529869', '20.47 MB'),
'2013': HmdaDataFile('hmda_2013_ny_all-records_codes.zip', '539217', '21.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_all-records_codes.zip', '566980', '21.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ny_originated-records_codes.zip', '246292', '9.34 MB'),
'2007': HmdaDataFile('hmda_2007_ny_originated-records_codes.zip', '398639', '14.43 MB'),
'2017': HmdaDataFile('hmda_2017_ny_originated-records_codes.zip', '236499', '5.9 MB'),
'2015': HmdaDataFile('hmda_2015_ny_originated-records_codes.zip', '228054', '9.33 MB'),
'2014': HmdaDataFile('hmda_2014_ny_originated-records_codes.zip', '198817', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_ny_originated-records_codes.zip', '252826', '9.14 MB'),
'2009': HmdaDataFile('hmda_2009_ny_originated-records_codes.zip', '281652', '9.73 MB'),
'2011': HmdaDataFile('hmda_2011_ny_originated-records_codes.zip', '238554', '8.78 MB'),
'2010': HmdaDataFile('hmda_2010_ny_originated-records_codes.zip', '252729', '9.27 MB'),
'2013': HmdaDataFile('hmda_2013_ny_originated-records_codes.zip', '271829', '10.23 MB'),
'2012': HmdaDataFile('hmda_2012_ny_originated-records_codes.zip', '285106', '10.38 MB')
}
}
},
'pa': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '226492', '11.67 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '276621', '13.86 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '200046', '6.74 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '206884', '12.08 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '175312', '8.94 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '230119', '11.38 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '318871', '14.7 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '247649', '12.37 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '280178', '14.07 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '269808', '13.79 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_labels.zip', '313974', '15.99 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_labels.zip', '526005', '29.04 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_labels.zip', '992904', '49.57 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_labels.zip', '473757', '17.91 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_labels.zip', '481331', '30.25 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_labels.zip', '427665', '23.41 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_labels.zip', '713995', '35.8 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_labels.zip', '768245', '37 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_labels.zip', '593240', '32.54 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_labels.zip', '659124', '36.22 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_labels.zip', '619770', '34.49 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_labels.zip', '695500', '38.56 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_labels.zip', '273334', '14.35 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_labels.zip', '427955', '21.49 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_labels.zip', '249620', '8.62 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_labels.zip', '254361', '15.02 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_labels.zip', '222765', '11.62 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_labels.zip', '322031', '16.08 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_labels.zip', '374957', '17.61 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_labels.zip', '297874', '15.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_labels.zip', '334150', '17.16 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_labels.zip', '328425', '16.99 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_labels.zip', '369571', '19.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '226492', '8.16 MB'),
'2007': HmdaDataFile('hmda_2007_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '276621', '9.82 MB'),
'2017': HmdaDataFile('hmda_2017_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '200046', '4.79 MB'),
'2015': HmdaDataFile('hmda_2015_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '206884', '8.4 MB'),
'2014': HmdaDataFile('hmda_2014_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '175312', '6.24 MB'),
'2008': HmdaDataFile('hmda_2008_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '230119', '8.1 MB'),
'2009': HmdaDataFile('hmda_2009_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '318871', '10.64 MB'),
'2011': HmdaDataFile('hmda_2011_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '247649', '8.65 MB'),
'2010': HmdaDataFile('hmda_2010_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '280178', '9.85 MB'),
'2013': HmdaDataFile('hmda_2013_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '269808', '9.72 MB'),
'2012': HmdaDataFile('hmda_2012_pa_first-lien-owner-occupied-1-4-family-records_codes.zip', '313974', '11.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pa_all-records_codes.zip', '526005', '19.65 MB'),
'2007': HmdaDataFile('hmda_2007_pa_all-records_codes.zip', '992904', '34.13 MB'),
'2017': HmdaDataFile('hmda_2017_pa_all-records_codes.zip', '473757', '11.77 MB'),
'2015': HmdaDataFile('hmda_2015_pa_all-records_codes.zip', '481331', '20.19 MB'),
'2014': HmdaDataFile('hmda_2014_pa_all-records_codes.zip', '427665', '15.79 MB'),
'2008': HmdaDataFile('hmda_2008_pa_all-records_codes.zip', '713995', '24.78 MB'),
'2009': HmdaDataFile('hmda_2009_pa_all-records_codes.zip', '768245', '25.9 MB'),
'2011': HmdaDataFile('hmda_2011_pa_all-records_codes.zip', '593240', '22.19 MB'),
'2010': HmdaDataFile('hmda_2010_pa_all-records_codes.zip', '659124', '24.67 MB'),
'2013': HmdaDataFile('hmda_2013_pa_all-records_codes.zip', '619770', '23.66 MB'),
'2012': HmdaDataFile('hmda_2012_pa_all-records_codes.zip', '695500', '26.36 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pa_originated-records_codes.zip', '273334', '9.99 MB'),
'2007': HmdaDataFile('hmda_2007_pa_originated-records_codes.zip', '427955', '15.19 MB'),
'2017': HmdaDataFile('hmda_2017_pa_originated-records_codes.zip', '249620', '6.04 MB'),
'2015': HmdaDataFile('hmda_2015_pa_originated-records_codes.zip', '254361', '10.34 MB'),
'2014': HmdaDataFile('hmda_2014_pa_originated-records_codes.zip', '222765', '8.07 MB'),
'2008': HmdaDataFile('hmda_2008_pa_originated-records_codes.zip', '322031', '11.45 MB'),
'2009': HmdaDataFile('hmda_2009_pa_originated-records_codes.zip', '374957', '12.74 MB'),
'2011': HmdaDataFile('hmda_2011_pa_originated-records_codes.zip', '297874', '10.53 MB'),
'2010': HmdaDataFile('hmda_2010_pa_originated-records_codes.zip', '334150', '11.94 MB'),
'2013': HmdaDataFile('hmda_2013_pa_originated-records_codes.zip', '328425', '11.84 MB'),
'2012': HmdaDataFile('hmda_2012_pa_originated-records_codes.zip', '369571', '13.21 MB')
}
}
},
'ca': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1007181', '54.33 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '870274', '44.14 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '721751', '25.58 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '840549', '50.22 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '617503', '32.89 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '540095', '26.82 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '871218', '40.28 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '777800', '41.39 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '867778', '45.96 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '943566', '51.22 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_labels.zip', '1179705', '63.7 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_labels.zip', '2235971', '128.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_labels.zip', '3425570', '178.89 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_labels.zip', '1714459', '68.82 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_labels.zip', '1878495', '120.02 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_labels.zip', '1436457', '81.73 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_labels.zip', '1843875', '91.09 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_labels.zip', '2186032', '102.95 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_labels.zip', '1914815', '108.47 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_labels.zip', '2007593', '113.6 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_labels.zip', '2161214', '124.84 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_labels.zip', '2541978', '146.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_labels.zip', '1172541', '63.86 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_labels.zip', '1233502', '62.47 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_labels.zip', '877753', '31.88 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_labels.zip', '993335', '60.03 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_labels.zip', '750422', '40.44 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_labels.zip', '672822', '33.65 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_labels.zip', '972974', '45.3 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_labels.zip', '917070', '49.28 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_labels.zip', '980348', '52.43 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_labels.zip', '1153965', '63.32 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_labels.zip', '1391720', '75.92 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1007181', '40 MB'),
'2007': HmdaDataFile('hmda_2007_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '870274', '32.16 MB'),
'2017': HmdaDataFile('hmda_2017_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '721751', '17.18 MB'),
'2015': HmdaDataFile('hmda_2015_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '840549', '35.92 MB'),
'2014': HmdaDataFile('hmda_2014_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '617503', '24.08 MB'),
'2008': HmdaDataFile('hmda_2008_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '540095', '19.47 MB'),
'2009': HmdaDataFile('hmda_2009_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '871218', '29.59 MB'),
'2011': HmdaDataFile('hmda_2011_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '777800', '30.2 MB'),
'2010': HmdaDataFile('hmda_2010_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '867778', '33.54 MB'),
'2013': HmdaDataFile('hmda_2013_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '943566', '37.62 MB'),
'2012': HmdaDataFile('hmda_2012_ca_first-lien-owner-occupied-1-4-family-records_codes.zip', '1179705', '46.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ca_all-records_codes.zip', '2235971', '91.67 MB'),
'2007': HmdaDataFile('hmda_2007_ca_all-records_codes.zip', '3425570', '117.94 MB'),
'2017': HmdaDataFile('hmda_2017_ca_all-records_codes.zip', '1714459', '42.19 MB'),
'2015': HmdaDataFile('hmda_2015_ca_all-records_codes.zip', '1878495', '82.3 MB'),
'2014': HmdaDataFile('hmda_2014_ca_all-records_codes.zip', '1436457', '57.96 MB'),
'2008': HmdaDataFile('hmda_2008_ca_all-records_codes.zip', '1843875', '63.68 MB'),
'2009': HmdaDataFile('hmda_2009_ca_all-records_codes.zip', '2186032', '72.66 MB'),
'2011': HmdaDataFile('hmda_2011_ca_all-records_codes.zip', '1914815', '76.92 MB'),
'2010': HmdaDataFile('hmda_2010_ca_all-records_codes.zip', '2007593', '80.32 MB'),
'2013': HmdaDataFile('hmda_2013_ca_all-records_codes.zip', '2161214', '88.6 MB'),
'2012': HmdaDataFile('hmda_2012_ca_all-records_codes.zip', '2541978', '104.3 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ca_originated-records_codes.zip', '1172541', '46.78 MB'),
'2007': HmdaDataFile('hmda_2007_ca_originated-records_codes.zip', '1233502', '45.28 MB'),
'2017': HmdaDataFile('hmda_2017_ca_originated-records_codes.zip', '877753', '21.13 MB'),
'2015': HmdaDataFile('hmda_2015_ca_originated-records_codes.zip', '993335', '42.66 MB'),
'2014': HmdaDataFile('hmda_2014_ca_originated-records_codes.zip', '750422', '29.45 MB'),
'2008': HmdaDataFile('hmda_2008_ca_originated-records_codes.zip', '672822', '24.32 MB'),
'2009': HmdaDataFile('hmda_2009_ca_originated-records_codes.zip', '972974', '33.15 MB'),
'2011': HmdaDataFile('hmda_2011_ca_originated-records_codes.zip', '917070', '35.8 MB'),
'2010': HmdaDataFile('hmda_2010_ca_originated-records_codes.zip', '980348', '38.11 MB'),
'2013': HmdaDataFile('hmda_2013_ca_originated-records_codes.zip', '1153965', '46.26 MB'),
'2012': HmdaDataFile('hmda_2012_ca_originated-records_codes.zip', '1391720', '55.58 MB')
}
}
},
'nv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '84481', '3.96 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '73747', '3.21 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '76783', '2.28 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '69214', '3.49 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '49799', '2.4 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '48287', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '59437', '2.29 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '39765', '1.65 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '47126', '2.01 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '62574', '2.95 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_labels.zip', '67783', '3.09 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_labels.zip', '196764', '9.88 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_labels.zip', '326985', '14.53 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_labels.zip', '178587', '5.91 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_labels.zip', '158259', '8.69 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_labels.zip', '119744', '6.27 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_labels.zip', '163606', '6.93 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_labels.zip', '168658', '6.88 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_labels.zip', '112858', '5.27 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_labels.zip', '126313', '5.98 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_labels.zip', '151453', '7.8 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_labels.zip', '161343', '8.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_labels.zip', '97926', '4.69 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_labels.zip', '113248', '5 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_labels.zip', '91540', '2.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_labels.zip', '81624', '4.21 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_labels.zip', '61757', '3.04 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_labels.zip', '62592', '2.58 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_labels.zip', '70049', '2.77 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_labels.zip', '51445', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_labels.zip', '57173', '2.51 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_labels.zip', '82011', '3.96 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_labels.zip', '86528', '4.04 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '84481', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '73747', '2.26 MB'),
'2017': HmdaDataFile('hmda_2017_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '76783', '1.59 MB'),
'2015': HmdaDataFile('hmda_2015_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '69214', '2.48 MB'),
'2014': HmdaDataFile('hmda_2014_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '49799', '1.68 MB'),
'2008': HmdaDataFile('hmda_2008_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '48287', '1.36 MB'),
'2009': HmdaDataFile('hmda_2009_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '59437', '1.63 MB'),
'2011': HmdaDataFile('hmda_2011_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '39765', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '47126', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '62574', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_nv_first-lien-owner-occupied-1-4-family-records_codes.zip', '67783', '2.17 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_nv_all-records_codes.zip', '196764', '6.71 MB'),
'2007': HmdaDataFile('hmda_2007_nv_all-records_codes.zip', '326985', '9.91 MB'),
'2017': HmdaDataFile('hmda_2017_nv_all-records_codes.zip', '178587', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_nv_all-records_codes.zip', '158259', '5.91 MB'),
'2014': HmdaDataFile('hmda_2014_nv_all-records_codes.zip', '119744', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_nv_all-records_codes.zip', '163606', '4.74 MB'),
'2009': HmdaDataFile('hmda_2009_nv_all-records_codes.zip', '168658', '4.74 MB'),
'2011': HmdaDataFile('hmda_2011_nv_all-records_codes.zip', '112858', '3.48 MB'),
'2010': HmdaDataFile('hmda_2010_nv_all-records_codes.zip', '126313', '3.93 MB'),
'2013': HmdaDataFile('hmda_2013_nv_all-records_codes.zip', '151453', '5.33 MB'),
'2012': HmdaDataFile('hmda_2012_nv_all-records_codes.zip', '161343', '5.54 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_nv_originated-records_codes.zip', '97926', '3.28 MB'),
'2007': HmdaDataFile('hmda_2007_nv_originated-records_codes.zip', '113248', '3.54 MB'),
'2017': HmdaDataFile('hmda_2017_nv_originated-records_codes.zip', '91540', '1.93 MB'),
'2015': HmdaDataFile('hmda_2015_nv_originated-records_codes.zip', '81624', '2.98 MB'),
'2014': HmdaDataFile('hmda_2014_nv_originated-records_codes.zip', '61757', '2.13 MB'),
'2008': HmdaDataFile('hmda_2008_nv_originated-records_codes.zip', '62592', '1.82 MB'),
'2009': HmdaDataFile('hmda_2009_nv_originated-records_codes.zip', '70049', '1.98 MB'),
'2011': HmdaDataFile('hmda_2011_nv_originated-records_codes.zip', '51445', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_nv_originated-records_codes.zip', '57173', '1.7 MB'),
'2013': HmdaDataFile('hmda_2013_nv_originated-records_codes.zip', '82011', '2.79 MB'),
'2012': HmdaDataFile('hmda_2012_nv_originated-records_codes.zip', '86528', '2.82 MB')
}
}
},
'pr': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '16754', '783.1 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '35822', '1.56 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '12257', '361.87 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '18782', '893.63 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '20640', '942.76 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '36283', '1.5 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '34669', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25832', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '25342', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '30144', '1.35 MB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_labels.zip', '31083', '1.38 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_labels.zip', '55699', '2.78 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_labels.zip', '141905', '6.23 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_labels.zip', '41775', '1.3 MB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_labels.zip', '58798', '2.92 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_labels.zip', '69716', '3.45 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_labels.zip', '121121', '5.16 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_labels.zip', '117907', '4.79 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_labels.zip', '85316', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_labels.zip', '83046', '3.88 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_labels.zip', '96530', '4.63 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_labels.zip', '87162', '4.17 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_labels.zip', '26124', '1.23 MB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_labels.zip', '57036', '2.51 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_labels.zip', '19395', '576.04 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_labels.zip', '28616', '1.37 MB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_labels.zip', '30528', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_labels.zip', '50632', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_labels.zip', '46237', '1.86 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_labels.zip', '37660', '1.66 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_labels.zip', '35121', '1.55 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_labels.zip', '40075', '1.8 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_labels.zip', '40616', '1.81 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '16754', '536.87 KB'),
'2007': HmdaDataFile('hmda_2007_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '35822', '1.08 MB'),
'2017': HmdaDataFile('hmda_2017_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '12257', '255.16 KB'),
'2015': HmdaDataFile('hmda_2015_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '18782', '614.78 KB'),
'2014': HmdaDataFile('hmda_2014_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '20640', '647.5 KB'),
'2008': HmdaDataFile('hmda_2008_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '36283', '1.05 MB'),
'2009': HmdaDataFile('hmda_2009_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '34669', '973.6 KB'),
'2011': HmdaDataFile('hmda_2011_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25832', '756.02 KB'),
'2010': HmdaDataFile('hmda_2010_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '25342', '737.14 KB'),
'2013': HmdaDataFile('hmda_2013_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '30144', '925.82 KB'),
'2012': HmdaDataFile('hmda_2012_pr_first-lien-owner-occupied-1-4-family-records_codes.zip', '31083', '942.8 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_pr_all-records_codes.zip', '55699', '1.82 MB'),
'2007': HmdaDataFile('hmda_2007_pr_all-records_codes.zip', '141905', '4.18 MB'),
'2017': HmdaDataFile('hmda_2017_pr_all-records_codes.zip', '41775', '873.01 KB'),
'2015': HmdaDataFile('hmda_2015_pr_all-records_codes.zip', '58798', '1.94 MB'),
'2014': HmdaDataFile('hmda_2014_pr_all-records_codes.zip', '69716', '2.27 MB'),
'2008': HmdaDataFile('hmda_2008_pr_all-records_codes.zip', '121121', '3.48 MB'),
'2009': HmdaDataFile('hmda_2009_pr_all-records_codes.zip', '117907', '3.28 MB'),
'2011': HmdaDataFile('hmda_2011_pr_all-records_codes.zip', '85316', '2.56 MB'),
'2010': HmdaDataFile('hmda_2010_pr_all-records_codes.zip', '83046', '2.54 MB'),
'2013': HmdaDataFile('hmda_2013_pr_all-records_codes.zip', '96530', '3.04 MB'),
'2012': HmdaDataFile('hmda_2012_pr_all-records_codes.zip', '87162', '2.75 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_pr_originated-records_codes.zip', '26124', '835.09 KB'),
'2007': HmdaDataFile('hmda_2007_pr_originated-records_codes.zip', '57036', '1.73 MB'),
'2017': HmdaDataFile('hmda_2017_pr_originated-records_codes.zip', '19395', '406.77 KB'),
'2015': HmdaDataFile('hmda_2015_pr_originated-records_codes.zip', '28616', '942.61 KB'),
'2014': HmdaDataFile('hmda_2014_pr_originated-records_codes.zip', '30528', '963.45 KB'),
'2008': HmdaDataFile('hmda_2008_pr_originated-records_codes.zip', '50632', '1.49 MB'),
'2009': HmdaDataFile('hmda_2009_pr_originated-records_codes.zip', '46237', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_pr_originated-records_codes.zip', '37660', '1.13 MB'),
'2010': HmdaDataFile('hmda_2010_pr_originated-records_codes.zip', '35121', '1.05 MB'),
'2013': HmdaDataFile('hmda_2013_pr_originated-records_codes.zip', '40075', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_pr_originated-records_codes.zip', '40616', '1.24 MB')
}
}
},
'de': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21457', '907.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '24785', '1.05 MB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18891', '540.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18365', '848.09 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '14206', '642.68 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '19350', '807.78 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '26431', '1.05 MB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '18557', '764.88 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '21244', '862.31 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '22603', '952.26 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_labels.zip', '25372', '1.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_labels.zip', '56002', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_labels.zip', '102001', '4.48 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_labels.zip', '49695', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_labels.zip', '48176', '2.44 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_labels.zip', '39784', '2 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_labels.zip', '68856', '3.06 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_labels.zip', '72398', '3.14 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_labels.zip', '52039', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_labels.zip', '57559', '2.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_labels.zip', '59314', '2.83 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_labels.zip', '65069', '3.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_labels.zip', '27296', '1.18 MB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_labels.zip', '40053', '1.69 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_labels.zip', '24719', '718.05 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_labels.zip', '24062', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_labels.zip', '19181', '882.64 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_labels.zip', '27619', '1.17 MB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_labels.zip', '32995', '1.31 MB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_labels.zip', '24377', '1.01 MB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_labels.zip', '26997', '1.1 MB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_labels.zip', '30167', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_labels.zip', '32868', '1.38 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21457', '600.87 KB'),
'2007': HmdaDataFile('hmda_2007_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '24785', '699.02 KB'),
'2017': HmdaDataFile('hmda_2017_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18891', '376.86 KB'),
'2015': HmdaDataFile('hmda_2015_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18365', '563.01 KB'),
'2014': HmdaDataFile('hmda_2014_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '14206', '424.11 KB'),
'2008': HmdaDataFile('hmda_2008_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '19350', '544.41 KB'),
'2009': HmdaDataFile('hmda_2009_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '26431', '709.46 KB'),
'2011': HmdaDataFile('hmda_2011_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '18557', '497.81 KB'),
'2010': HmdaDataFile('hmda_2010_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '21244', '559.01 KB'),
'2013': HmdaDataFile('hmda_2013_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '22603', '621.51 KB'),
'2012': HmdaDataFile('hmda_2012_de_first-lien-owner-occupied-1-4-family-records_codes.zip', '25372', '689 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_de_all-records_codes.zip', '56002', '1.68 MB'),
'2007': HmdaDataFile('hmda_2007_de_all-records_codes.zip', '102001', '2.91 MB'),
'2017': HmdaDataFile('hmda_2017_de_all-records_codes.zip', '49695', '1.09 MB'),
'2015': HmdaDataFile('hmda_2015_de_all-records_codes.zip', '48176', '1.54 MB'),
'2014': HmdaDataFile('hmda_2014_de_all-records_codes.zip', '39784', '1.26 MB'),
'2008': HmdaDataFile('hmda_2008_de_all-records_codes.zip', '68856', '2 MB'),
'2009': HmdaDataFile('hmda_2009_de_all-records_codes.zip', '72398', '2.07 MB'),
'2011': HmdaDataFile('hmda_2011_de_all-records_codes.zip', '52039', '1.52 MB'),
'2010': HmdaDataFile('hmda_2010_de_all-records_codes.zip', '57559', '1.66 MB'),
'2013': HmdaDataFile('hmda_2013_de_all-records_codes.zip', '59314', '1.77 MB'),
'2012': HmdaDataFile('hmda_2012_de_all-records_codes.zip', '65069', '1.92 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_de_originated-records_codes.zip', '27296', '769.27 KB'),
'2007': HmdaDataFile('hmda_2007_de_originated-records_codes.zip', '40053', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_de_originated-records_codes.zip', '24719', '494.64 KB'),
'2015': HmdaDataFile('hmda_2015_de_originated-records_codes.zip', '24062', '748.52 KB'),
'2014': HmdaDataFile('hmda_2014_de_originated-records_codes.zip', '19181', '575.97 KB'),
'2008': HmdaDataFile('hmda_2008_de_originated-records_codes.zip', '27619', '775.49 KB'),
'2009': HmdaDataFile('hmda_2009_de_originated-records_codes.zip', '32995', '882.98 KB'),
'2011': HmdaDataFile('hmda_2011_de_originated-records_codes.zip', '24377', '652.39 KB'),
'2010': HmdaDataFile('hmda_2010_de_originated-records_codes.zip', '26997', '709.17 KB'),
'2013': HmdaDataFile('hmda_2013_de_originated-records_codes.zip', '30167', '829.22 KB'),
'2012': HmdaDataFile('hmda_2012_de_originated-records_codes.zip', '32868', '897.42 KB')
}
}
},
'dc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15924', '720.67 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16676', '696.13 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12419', '384.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '14290', '659.64 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '11399', '518.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '12148', '506.83 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '18262', '731.02 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '15280', '677.01 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '16225', '712.21 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '17499', '785.67 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_labels.zip', '20716', '908.46 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_labels.zip', '38399', '1.87 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_labels.zip', '53480', '2.43 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_labels.zip', '30927', '1.07 MB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_labels.zip', '34958', '1.77 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_labels.zip', '28672', '1.42 MB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_labels.zip', '33505', '1.55 MB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_labels.zip', '43540', '1.9 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_labels.zip', '37657', '1.81 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_labels.zip', '38173', '1.84 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_labels.zip', '43020', '2.11 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_labels.zip', '48621', '2.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_labels.zip', '19324', '891.94 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_labels.zip', '23948', '998.05 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_labels.zip', '15414', '483 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_labels.zip', '17821', '839.5 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_labels.zip', '14547', '676.21 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_labels.zip', '15212', '643.03 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_labels.zip', '20287', '823.05 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_labels.zip', '18123', '817.1 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_labels.zip', '18511', '828.68 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_labels.zip', '21877', '1 MB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_labels.zip', '24842', '1.12 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15924', '457.4 KB'),
'2007': HmdaDataFile('hmda_2007_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16676', '454.07 KB'),
'2017': HmdaDataFile('hmda_2017_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12419', '259.55 KB'),
'2015': HmdaDataFile('hmda_2015_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '14290', '415.87 KB'),
'2014': HmdaDataFile('hmda_2014_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '11399', '327.64 KB'),
'2008': HmdaDataFile('hmda_2008_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '12148', '331.88 KB'),
'2009': HmdaDataFile('hmda_2009_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '18262', '485.74 KB'),
'2011': HmdaDataFile('hmda_2011_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '15280', '423.44 KB'),
'2010': HmdaDataFile('hmda_2010_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '16225', '446.48 KB'),
'2013': HmdaDataFile('hmda_2013_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '17499', '493.75 KB'),
'2012': HmdaDataFile('hmda_2012_dc_first-lien-owner-occupied-1-4-family-records_codes.zip', '20716', '567.98 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_dc_all-records_codes.zip', '38399', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_dc_all-records_codes.zip', '53480', '1.55 MB'),
'2017': HmdaDataFile('hmda_2017_dc_all-records_codes.zip', '30927', '685.42 KB'),
'2015': HmdaDataFile('hmda_2015_dc_all-records_codes.zip', '34958', '1.07 MB'),
'2014': HmdaDataFile('hmda_2014_dc_all-records_codes.zip', '28672', '862.7 KB'),
'2008': HmdaDataFile('hmda_2008_dc_all-records_codes.zip', '33505', '982.73 KB'),
'2009': HmdaDataFile('hmda_2009_dc_all-records_codes.zip', '43540', '1.21 MB'),
'2011': HmdaDataFile('hmda_2011_dc_all-records_codes.zip', '37657', '1.1 MB'),
'2010': HmdaDataFile('hmda_2010_dc_all-records_codes.zip', '38173', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_dc_all-records_codes.zip', '43020', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_dc_all-records_codes.zip', '48621', '1.42 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_dc_originated-records_codes.zip', '19324', '561.87 KB'),
'2007': HmdaDataFile('hmda_2007_dc_originated-records_codes.zip', '23948', '647.59 KB'),
'2017': HmdaDataFile('hmda_2017_dc_originated-records_codes.zip', '15414', '321.38 KB'),
'2015': HmdaDataFile('hmda_2015_dc_originated-records_codes.zip', '17821', '524.36 KB'),
'2014': HmdaDataFile('hmda_2014_dc_originated-records_codes.zip', '14547', '423.92 KB'),
'2008': HmdaDataFile('hmda_2008_dc_originated-records_codes.zip', '15212', '416.59 KB'),
'2009': HmdaDataFile('hmda_2009_dc_originated-records_codes.zip', '20287', '542.45 KB'),
'2011': HmdaDataFile('hmda_2011_dc_originated-records_codes.zip', '18123', '507.21 KB'),
'2010': HmdaDataFile('hmda_2010_dc_originated-records_codes.zip', '18511', '514.35 KB'),
'2013': HmdaDataFile('hmda_2013_dc_originated-records_codes.zip', '21877', '626.77 KB'),
'2012': HmdaDataFile('hmda_2012_dc_originated-records_codes.zip', '24842', '691.5 KB')
}
}
},
'wi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '141247', '6.63 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140939', '6.7 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '118149', '3.66 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '126112', '6.94 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '99338', '5.08 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '140787', '6.42 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '233978', '9.45 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '159084', '7.03 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '197382', '8.86 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '155945', '7.35 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_labels.zip', '219594', '10.08 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_labels.zip', '277224', '14.04 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_labels.zip', '460622', '22.04 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_labels.zip', '237542', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_labels.zip', '250077', '14.91 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_labels.zip', '207239', '11.5 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_labels.zip', '359119', '16.92 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_labels.zip', '475760', '20.37 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_labels.zip', '324321', '15.85 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_labels.zip', '394638', '19.35 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_labels.zip', '306118', '15.66 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_labels.zip', '398029', '19.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_labels.zip', '168678', '8.06 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_labels.zip', '211916', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_labels.zip', '146251', '4.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_labels.zip', '153515', '8.58 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_labels.zip', '124916', '6.51 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_labels.zip', '187234', '8.58 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_labels.zip', '270190', '11.1 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_labels.zip', '188288', '8.56 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_labels.zip', '228537', '10.45 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_labels.zip', '188524', '9.07 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_labels.zip', '253728', '11.88 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '141247', '4.45 MB'),
'2007': HmdaDataFile('hmda_2007_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140939', '4.6 MB'),
'2017': HmdaDataFile('hmda_2017_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '118149', '2.59 MB'),
'2015': HmdaDataFile('hmda_2015_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '126112', '4.69 MB'),
'2014': HmdaDataFile('hmda_2014_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '99338', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '140787', '4.45 MB'),
'2009': HmdaDataFile('hmda_2009_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '233978', '6.72 MB'),
'2011': HmdaDataFile('hmda_2011_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '159084', '4.64 MB'),
'2010': HmdaDataFile('hmda_2010_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '197382', '5.93 MB'),
'2013': HmdaDataFile('hmda_2013_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '155945', '4.93 MB'),
'2012': HmdaDataFile('hmda_2012_wi_first-lien-owner-occupied-1-4-family-records_codes.zip', '219594', '6.71 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wi_all-records_codes.zip', '277224', '9.08 MB'),
'2007': HmdaDataFile('hmda_2007_wi_all-records_codes.zip', '460622', '14.82 MB'),
'2017': HmdaDataFile('hmda_2017_wi_all-records_codes.zip', '237542', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_wi_all-records_codes.zip', '250077', '9.7 MB'),
'2014': HmdaDataFile('hmda_2014_wi_all-records_codes.zip', '207239', '7.39 MB'),
'2008': HmdaDataFile('hmda_2008_wi_all-records_codes.zip', '359119', '11.47 MB'),
'2009': HmdaDataFile('hmda_2009_wi_all-records_codes.zip', '475760', '14.08 MB'),
'2011': HmdaDataFile('hmda_2011_wi_all-records_codes.zip', '324321', '10.14 MB'),
'2010': HmdaDataFile('hmda_2010_wi_all-records_codes.zip', '394638', '12.55 MB'),
'2013': HmdaDataFile('hmda_2013_wi_all-records_codes.zip', '306118', '10.17 MB'),
'2012': HmdaDataFile('hmda_2012_wi_all-records_codes.zip', '398029', '12.88 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wi_originated-records_codes.zip', '168678', '5.37 MB'),
'2007': HmdaDataFile('hmda_2007_wi_originated-records_codes.zip', '211916', '6.85 MB'),
'2017': HmdaDataFile('hmda_2017_wi_originated-records_codes.zip', '146251', '3.19 MB'),
'2015': HmdaDataFile('hmda_2015_wi_originated-records_codes.zip', '153515', '5.74 MB'),
'2014': HmdaDataFile('hmda_2014_wi_originated-records_codes.zip', '124916', '4.3 MB'),
'2008': HmdaDataFile('hmda_2008_wi_originated-records_codes.zip', '187234', '5.9 MB'),
'2009': HmdaDataFile('hmda_2009_wi_originated-records_codes.zip', '270190', '7.86 MB'),
'2011': HmdaDataFile('hmda_2011_wi_originated-records_codes.zip', '188288', '5.6 MB'),
'2010': HmdaDataFile('hmda_2010_wi_originated-records_codes.zip', '228537', '6.95 MB'),
'2013': HmdaDataFile('hmda_2013_wi_originated-records_codes.zip', '188524', '6.03 MB'),
'2012': HmdaDataFile('hmda_2012_wi_originated-records_codes.zip', '253728', '7.85 MB')
}
}
},
'wv': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '23752', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '32932', '1.43 MB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22635', '775.09 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '22216', '1.16 MB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '20952', '925.27 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '27892', '1.25 MB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '31766', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '25662', '1.09 MB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '26690', '1.13 MB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '29841', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_labels.zip', '30400', '1.36 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_labels.zip', '59932', '3.14 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_labels.zip', '108639', '4.82 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_labels.zip', '56407', '2.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_labels.zip', '56189', '3.18 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_labels.zip', '53804', '2.59 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_labels.zip', '82256', '3.81 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_labels.zip', '78726', '3.62 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_labels.zip', '65054', '3.02 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_labels.zip', '67127', '3.14 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_labels.zip', '71730', '3.58 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_labels.zip', '71668', '3.48 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_labels.zip', '31084', '1.54 MB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_labels.zip', '49942', '2.16 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_labels.zip', '29490', '1.02 MB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_labels.zip', '29892', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_labels.zip', '28288', '1.28 MB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_labels.zip', '39173', '1.77 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_labels.zip', '40090', '1.78 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_labels.zip', '33089', '1.44 MB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_labels.zip', '34262', '1.49 MB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_labels.zip', '38630', '1.82 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_labels.zip', '38585', '1.77 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '23752', '731.44 KB'),
'2007': HmdaDataFile('hmda_2007_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '32932', '927.05 KB'),
'2017': HmdaDataFile('hmda_2017_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22635', '535.47 KB'),
'2015': HmdaDataFile('hmda_2015_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '22216', '750.99 KB'),
'2014': HmdaDataFile('hmda_2014_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '20952', '586.49 KB'),
'2008': HmdaDataFile('hmda_2008_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '27892', '805.27 KB'),
'2009': HmdaDataFile('hmda_2009_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '31766', '906.21 KB'),
'2011': HmdaDataFile('hmda_2011_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '25662', '682.61 KB'),
'2010': HmdaDataFile('hmda_2010_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '26690', '718.48 KB'),
'2013': HmdaDataFile('hmda_2013_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '29841', '869.85 KB'),
'2012': HmdaDataFile('hmda_2012_wv_first-lien-owner-occupied-1-4-family-records_codes.zip', '30400', '859.14 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wv_all-records_codes.zip', '59932', '1.9 MB'),
'2007': HmdaDataFile('hmda_2007_wv_all-records_codes.zip', '108639', '3.05 MB'),
'2017': HmdaDataFile('hmda_2017_wv_all-records_codes.zip', '56407', '1.32 MB'),
'2015': HmdaDataFile('hmda_2015_wv_all-records_codes.zip', '56189', '1.97 MB'),
'2014': HmdaDataFile('hmda_2014_wv_all-records_codes.zip', '53804', '1.57 MB'),
'2008': HmdaDataFile('hmda_2008_wv_all-records_codes.zip', '82256', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_wv_all-records_codes.zip', '78726', '2.31 MB'),
'2011': HmdaDataFile('hmda_2011_wv_all-records_codes.zip', '65054', '1.83 MB'),
'2010': HmdaDataFile('hmda_2010_wv_all-records_codes.zip', '67127', '1.91 MB'),
'2013': HmdaDataFile('hmda_2013_wv_all-records_codes.zip', '71730', '2.18 MB'),
'2012': HmdaDataFile('hmda_2012_wv_all-records_codes.zip', '71668', '2.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wv_originated-records_codes.zip', '31084', '966.37 KB'),
'2007': HmdaDataFile('hmda_2007_wv_originated-records_codes.zip', '49942', '1.39 MB'),
'2017': HmdaDataFile('hmda_2017_wv_originated-records_codes.zip', '29490', '694.72 KB'),
'2015': HmdaDataFile('hmda_2015_wv_originated-records_codes.zip', '29892', '1.02 MB'),
'2014': HmdaDataFile('hmda_2014_wv_originated-records_codes.zip', '28288', '806.54 KB'),
'2008': HmdaDataFile('hmda_2008_wv_originated-records_codes.zip', '39173', '1.14 MB'),
'2009': HmdaDataFile('hmda_2009_wv_originated-records_codes.zip', '40090', '1.16 MB'),
'2011': HmdaDataFile('hmda_2011_wv_originated-records_codes.zip', '33089', '896.24 KB'),
'2010': HmdaDataFile('hmda_2010_wv_originated-records_codes.zip', '34262', '933.86 KB'),
'2013': HmdaDataFile('hmda_2013_wv_originated-records_codes.zip', '38630', '1.14 MB'),
'2012': HmdaDataFile('hmda_2012_wv_originated-records_codes.zip', '38585', '1.11 MB')
}
}
},
'hi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '25790', '1.11 MB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26443', '1.11 MB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '20146', '641.16 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '21679', '993.8 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '16100', '691.11 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '19577', '808.1 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31476', '1.18 MB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '22003', '927.28 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '24636', '1.02 MB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '26347', '1.13 MB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_labels.zip', '31689', '1.34 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_labels.zip', '57857', '2.66 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_labels.zip', '97609', '4.13 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_labels.zip', '44868', '1.52 MB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_labels.zip', '48255', '2.42 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_labels.zip', '39152', '1.81 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_labels.zip', '58044', '2.51 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_labels.zip', '72505', '2.88 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_labels.zip', '52036', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_labels.zip', '57360', '2.61 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_labels.zip', '62718', '2.93 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_labels.zip', '69807', '3.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_labels.zip', '32822', '1.42 MB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_labels.zip', '39270', '1.64 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_labels.zip', '26553', '842.99 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_labels.zip', '28778', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_labels.zip', '22377', '977.23 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_labels.zip', '25770', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_labels.zip', '36594', '1.37 MB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_labels.zip', '27526', '1.18 MB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_labels.zip', '29807', '1.25 MB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_labels.zip', '36581', '1.6 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_labels.zip', '40668', '1.75 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '25790', '747.28 KB'),
'2007': HmdaDataFile('hmda_2007_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26443', '769.57 KB'),
'2017': HmdaDataFile('hmda_2017_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '20146', '442.43 KB'),
'2015': HmdaDataFile('hmda_2015_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '21679', '682.34 KB'),
'2014': HmdaDataFile('hmda_2014_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '16100', '467.14 KB'),
'2008': HmdaDataFile('hmda_2008_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '19577', '565.45 KB'),
'2009': HmdaDataFile('hmda_2009_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31476', '829.34 KB'),
'2011': HmdaDataFile('hmda_2011_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '22003', '617.15 KB'),
'2010': HmdaDataFile('hmda_2010_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '24636', '682.86 KB'),
'2013': HmdaDataFile('hmda_2013_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '26347', '767.75 KB'),
'2012': HmdaDataFile('hmda_2012_hi_first-lien-owner-occupied-1-4-family-records_codes.zip', '31689', '898.68 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_hi_all-records_codes.zip', '57857', '1.73 MB'),
'2007': HmdaDataFile('hmda_2007_hi_all-records_codes.zip', '97609', '2.81 MB'),
'2017': HmdaDataFile('hmda_2017_hi_all-records_codes.zip', '44868', '991.92 KB'),
'2015': HmdaDataFile('hmda_2015_hi_all-records_codes.zip', '48255', '1.59 MB'),
'2014': HmdaDataFile('hmda_2014_hi_all-records_codes.zip', '39152', '1.17 MB'),
'2008': HmdaDataFile('hmda_2008_hi_all-records_codes.zip', '58044', '1.7 MB'),
'2009': HmdaDataFile('hmda_2009_hi_all-records_codes.zip', '72505', '1.97 MB'),
'2011': HmdaDataFile('hmda_2011_hi_all-records_codes.zip', '52036', '1.54 MB'),
'2010': HmdaDataFile('hmda_2010_hi_all-records_codes.zip', '57360', '1.69 MB'),
'2013': HmdaDataFile('hmda_2013_hi_all-records_codes.zip', '62718', '1.9 MB'),
'2012': HmdaDataFile('hmda_2012_hi_all-records_codes.zip', '69807', '2.09 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_hi_originated-records_codes.zip', '32822', '955.73 KB'),
'2007': HmdaDataFile('hmda_2007_hi_originated-records_codes.zip', '39270', '1.15 MB'),
'2017': HmdaDataFile('hmda_2017_hi_originated-records_codes.zip', '26553', '577.19 KB'),
'2015': HmdaDataFile('hmda_2015_hi_originated-records_codes.zip', '28778', '909.22 KB'),
'2014': HmdaDataFile('hmda_2014_hi_originated-records_codes.zip', '22377', '650.88 KB'),
'2008': HmdaDataFile('hmda_2008_hi_originated-records_codes.zip', '25770', '741.44 KB'),
'2009': HmdaDataFile('hmda_2009_hi_originated-records_codes.zip', '36594', '946.61 KB'),
'2011': HmdaDataFile('hmda_2011_hi_originated-records_codes.zip', '27526', '777.03 KB'),
'2010': HmdaDataFile('hmda_2010_hi_originated-records_codes.zip', '29807', '833.67 KB'),
'2013': HmdaDataFile('hmda_2013_hi_originated-records_codes.zip', '36581', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_hi_originated-records_codes.zip', '40668', '1.17 MB')
}
}
},
'ok': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '61711', '2.99 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '71771', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '56021', '1.71 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '57409', '3.04 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '53820', '2.56 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '64839', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '83962', '3.72 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '62083', '2.92 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68205', '3.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '68275', '3.28 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_labels.zip', '77297', '3.66 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_labels.zip', '165463', '8.55 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_labels.zip', '250763', '12.15 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_labels.zip', '150838', '5.09 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_labels.zip', '152804', '8.67 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_labels.zip', '146824', '7.59 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_labels.zip', '194552', '9.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_labels.zip', '219393', '10.29 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_labels.zip', '163798', '8.6 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_labels.zip', '177023', '9.25 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_labels.zip', '180860', '9.5 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_labels.zip', '189005', '9.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_labels.zip', '84730', '4.21 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_labels.zip', '107739', '5.16 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_labels.zip', '78302', '2.43 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_labels.zip', '79611', '4.27 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_labels.zip', '76088', '3.74 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_labels.zip', '90679', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_labels.zip', '105102', '4.8 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_labels.zip', '81562', '3.94 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_labels.zip', '87420', '4.24 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_labels.zip', '91830', '4.49 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_labels.zip', '98582', '4.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '61711', '2.07 MB'),
'2007': HmdaDataFile('hmda_2007_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '71771', '2.36 MB'),
'2017': HmdaDataFile('hmda_2017_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '56021', '1.24 MB'),
'2015': HmdaDataFile('hmda_2015_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '57409', '2.11 MB'),
'2014': HmdaDataFile('hmda_2014_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '53820', '1.76 MB'),
'2008': HmdaDataFile('hmda_2008_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '64839', '2.14 MB'),
'2009': HmdaDataFile('hmda_2009_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '83962', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '62083', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68205', '2.2 MB'),
'2013': HmdaDataFile('hmda_2013_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '68275', '2.25 MB'),
'2012': HmdaDataFile('hmda_2012_ok_first-lien-owner-occupied-1-4-family-records_codes.zip', '77297', '2.49 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ok_all-records_codes.zip', '165463', '5.67 MB'),
'2007': HmdaDataFile('hmda_2007_ok_all-records_codes.zip', '250763', '8.32 MB'),
'2017': HmdaDataFile('hmda_2017_ok_all-records_codes.zip', '150838', '3.38 MB'),
'2015': HmdaDataFile('hmda_2015_ok_all-records_codes.zip', '152804', '5.81 MB'),
'2014': HmdaDataFile('hmda_2014_ok_all-records_codes.zip', '146824', '5.04 MB'),
'2008': HmdaDataFile('hmda_2008_ok_all-records_codes.zip', '194552', '6.58 MB'),
'2009': HmdaDataFile('hmda_2009_ok_all-records_codes.zip', '219393', '7.12 MB'),
'2011': HmdaDataFile('hmda_2011_ok_all-records_codes.zip', '163798', '5.66 MB'),
'2010': HmdaDataFile('hmda_2010_ok_all-records_codes.zip', '177023', '6.13 MB'),
'2013': HmdaDataFile('hmda_2013_ok_all-records_codes.zip', '180860', '6.33 MB'),
'2012': HmdaDataFile('hmda_2012_ok_all-records_codes.zip', '189005', '6.53 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ok_originated-records_codes.zip', '84730', '2.89 MB'),
'2007': HmdaDataFile('hmda_2007_ok_originated-records_codes.zip', '107739', '3.62 MB'),
'2017': HmdaDataFile('hmda_2017_ok_originated-records_codes.zip', '78302', '1.73 MB'),
'2015': HmdaDataFile('hmda_2015_ok_originated-records_codes.zip', '79611', '2.95 MB'),
'2014': HmdaDataFile('hmda_2014_ok_originated-records_codes.zip', '76088', '2.55 MB'),
'2008': HmdaDataFile('hmda_2008_ok_originated-records_codes.zip', '90679', '3.04 MB'),
'2009': HmdaDataFile('hmda_2009_ok_originated-records_codes.zip', '105102', '3.39 MB'),
'2011': HmdaDataFile('hmda_2011_ok_originated-records_codes.zip', '81562', '2.64 MB'),
'2010': HmdaDataFile('hmda_2010_ok_originated-records_codes.zip', '87420', '2.87 MB'),
'2013': HmdaDataFile('hmda_2013_ok_originated-records_codes.zip', '91830', '3.05 MB'),
'2012': HmdaDataFile('hmda_2012_ok_originated-records_codes.zip', '98582', '3.19 MB')
}
}
},
'fl': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '415338', '21.43 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '501111', '25.12 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '396688', '13.19 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '347883', '21.05 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '272904', '13.99 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '258117', '12.67 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '263772', '12.4 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '231239', '11.59 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '240644', '12.01 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '365896', '19.08 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_labels.zip', '373288', '19.2 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_labels.zip', '1043942', '58 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_labels.zip', '2006660', '102.81 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_labels.zip', '1018763', '38.28 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_labels.zip', '893206', '57.89 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_labels.zip', '732825', '40.79 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_labels.zip', '962944', '49.47 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_labels.zip', '806975', '40.26 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_labels.zip', '647776', '35.66 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_labels.zip', '675688', '37.38 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_labels.zip', '948672', '53.81 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_labels.zip', '919923', '51.66 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_labels.zip', '506394', '26.46 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_labels.zip', '735174', '36.57 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_labels.zip', '492702', '16.61 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_labels.zip', '434779', '26.78 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_labels.zip', '349696', '18.14 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_labels.zip', '344859', '17 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_labels.zip', '318689', '15.06 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_labels.zip', '288632', '14.64 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_labels.zip', '292476', '14.8 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_labels.zip', '467201', '24.7 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_labels.zip', '462049', '24.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '415338', '15.17 MB'),
'2007': HmdaDataFile('hmda_2007_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '501111', '17.95 MB'),
'2017': HmdaDataFile('hmda_2017_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '396688', '9.14 MB'),
'2015': HmdaDataFile('hmda_2015_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '347883', '14.62 MB'),
'2014': HmdaDataFile('hmda_2014_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '272904', '9.87 MB'),
'2008': HmdaDataFile('hmda_2008_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '258117', '8.93 MB'),
'2009': HmdaDataFile('hmda_2009_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '263772', '8.88 MB'),
'2011': HmdaDataFile('hmda_2011_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '231239', '8 MB'),
'2010': HmdaDataFile('hmda_2010_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '240644', '8.28 MB'),
'2013': HmdaDataFile('hmda_2013_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '365896', '13.41 MB'),
'2012': HmdaDataFile('hmda_2012_fl_first-lien-owner-occupied-1-4-family-records_codes.zip', '373288', '13.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_fl_all-records_codes.zip', '1043942', '40.03 MB'),
'2007': HmdaDataFile('hmda_2007_fl_all-records_codes.zip', '2006660', '71.9 MB'),
'2017': HmdaDataFile('hmda_2017_fl_all-records_codes.zip', '1018763', '24.91 MB'),
'2015': HmdaDataFile('hmda_2015_fl_all-records_codes.zip', '893206', '38.26 MB'),
'2014': HmdaDataFile('hmda_2014_fl_all-records_codes.zip', '732825', '28.16 MB'),
'2008': HmdaDataFile('hmda_2008_fl_all-records_codes.zip', '962944', '34.41 MB'),
'2009': HmdaDataFile('hmda_2009_fl_all-records_codes.zip', '806975', '28.11 MB'),
'2011': HmdaDataFile('hmda_2011_fl_all-records_codes.zip', '647776', '23.83 MB'),
'2010': HmdaDataFile('hmda_2010_fl_all-records_codes.zip', '675688', '24.98 MB'),
'2013': HmdaDataFile('hmda_2013_fl_all-records_codes.zip', '948672', '36.68 MB'),
'2012': HmdaDataFile('hmda_2012_fl_all-records_codes.zip', '919923', '35.16 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_fl_originated-records_codes.zip', '506394', '18.65 MB'),
'2007': HmdaDataFile('hmda_2007_fl_originated-records_codes.zip', '735174', '26.09 MB'),
'2017': HmdaDataFile('hmda_2017_fl_originated-records_codes.zip', '492702', '11.34 MB'),
'2015': HmdaDataFile('hmda_2015_fl_originated-records_codes.zip', '434779', '18.42 MB'),
'2014': HmdaDataFile('hmda_2014_fl_originated-records_codes.zip', '349696', '12.69 MB'),
'2008': HmdaDataFile('hmda_2008_fl_originated-records_codes.zip', '344859', '11.94 MB'),
'2009': HmdaDataFile('hmda_2009_fl_originated-records_codes.zip', '318689', '10.73 MB'),
'2011': HmdaDataFile('hmda_2011_fl_originated-records_codes.zip', '288632', '10.03 MB'),
'2010': HmdaDataFile('hmda_2010_fl_originated-records_codes.zip', '292476', '10.16 MB'),
'2013': HmdaDataFile('hmda_2013_fl_originated-records_codes.zip', '467201', '17.24 MB'),
'2012': HmdaDataFile('hmda_2012_fl_originated-records_codes.zip', '462049', '16.84 MB')
}
}
},
'wy': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12342', '483.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '13857', '521.55 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10510', '270.62 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11880', '503.22 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '10422', '425.8 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12387', '474.69 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '16912', '594.41 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '11423', '470.67 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '12694', '489.69 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '14178', '537.95 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_labels.zip', '15410', '568.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_labels.zip', '30163', '1.29 MB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_labels.zip', '48234', '1.94 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_labels.zip', '26154', '743.51 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_labels.zip', '28641', '1.34 MB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_labels.zip', '25049', '1.13 MB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_labels.zip', '35748', '1.45 MB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_labels.zip', '41659', '1.59 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_labels.zip', '28465', '1.31 MB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_labels.zip', '32035', '1.37 MB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_labels.zip', '32956', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_labels.zip', '34092', '1.4 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_labels.zip', '15484', '616.35 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_labels.zip', '21052', '815.34 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_labels.zip', '13702', '356.27 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_labels.zip', '15107', '651.6 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_labels.zip', '13556', '566.41 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_labels.zip', '16892', '658.73 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_labels.zip', '20290', '727.57 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_labels.zip', '14507', '613.35 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_labels.zip', '15602', '616.08 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_labels.zip', '17900', '691.83 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_labels.zip', '19114', '723.04 KB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12342', '312.27 KB'),
'2007': HmdaDataFile('hmda_2007_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '13857', '338.82 KB'),
'2017': HmdaDataFile('hmda_2017_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10510', '187.36 KB'),
'2015': HmdaDataFile('hmda_2015_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11880', '319.17 KB'),
'2014': HmdaDataFile('hmda_2014_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '10422', '275.68 KB'),
'2008': HmdaDataFile('hmda_2008_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12387', '309.95 KB'),
'2009': HmdaDataFile('hmda_2009_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '16912', '397.25 KB'),
'2011': HmdaDataFile('hmda_2011_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '11423', '302.39 KB'),
'2010': HmdaDataFile('hmda_2010_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '12694', '313.52 KB'),
'2013': HmdaDataFile('hmda_2013_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '14178', '348.1 KB'),
'2012': HmdaDataFile('hmda_2012_wy_first-lien-owner-occupied-1-4-family-records_codes.zip', '15410', '368.87 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_wy_all-records_codes.zip', '30163', '802.03 KB'),
'2007': HmdaDataFile('hmda_2007_wy_all-records_codes.zip', '48234', '1.23 MB'),
'2017': HmdaDataFile('hmda_2017_wy_all-records_codes.zip', '26154', '481.64 KB'),
'2015': HmdaDataFile('hmda_2015_wy_all-records_codes.zip', '28641', '812.32 KB'),
'2014': HmdaDataFile('hmda_2014_wy_all-records_codes.zip', '25049', '695.65 KB'),
'2008': HmdaDataFile('hmda_2008_wy_all-records_codes.zip', '35748', '923.02 KB'),
'2009': HmdaDataFile('hmda_2009_wy_all-records_codes.zip', '41659', '1.02 MB'),
'2011': HmdaDataFile('hmda_2011_wy_all-records_codes.zip', '28465', '807.3 KB'),
'2010': HmdaDataFile('hmda_2010_wy_all-records_codes.zip', '32035', '844.63 KB'),
'2013': HmdaDataFile('hmda_2013_wy_all-records_codes.zip', '32956', '857.37 KB'),
'2012': HmdaDataFile('hmda_2012_wy_all-records_codes.zip', '34092', '878.55 KB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_wy_originated-records_codes.zip', '15484', '395.6 KB'),
'2007': HmdaDataFile('hmda_2007_wy_originated-records_codes.zip', '21052', '528.23 KB'),
'2017': HmdaDataFile('hmda_2017_wy_originated-records_codes.zip', '13702', '246.43 KB'),
'2015': HmdaDataFile('hmda_2015_wy_originated-records_codes.zip', '15107', '410.78 KB'),
'2014': HmdaDataFile('hmda_2014_wy_originated-records_codes.zip', '13556', '363.31 KB'),
'2008': HmdaDataFile('hmda_2008_wy_originated-records_codes.zip', '16892', '430.54 KB'),
'2009': HmdaDataFile('hmda_2009_wy_originated-records_codes.zip', '20290', '484.44 KB'),
'2011': HmdaDataFile('hmda_2011_wy_originated-records_codes.zip', '14507', '390.89 KB'),
'2010': HmdaDataFile('hmda_2010_wy_originated-records_codes.zip', '15602', '393.56 KB'),
'2013': HmdaDataFile('hmda_2013_wy_originated-records_codes.zip', '17900', '445.73 KB'),
'2012': HmdaDataFile('hmda_2012_wy_originated-records_codes.zip', '19114', '467.43 KB')
}
}
},
'me': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '25576', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28469', '1.25 MB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23089', '660.63 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '22073', '1.06 MB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '18699', '819.03 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '23318', '1.01 MB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '35977', '1.45 MB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '24858', '1.05 MB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '29685', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '28892', '1.25 MB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_labels.zip', '32810', '1.4 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_labels.zip', '64142', '3.02 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_labels.zip', '102877', '4.79 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_labels.zip', '58188', '1.77 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_labels.zip', '56450', '2.96 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_labels.zip', '50231', '2.36 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_labels.zip', '74281', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_labels.zip', '88765', '4 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_labels.zip', '65903', '3.22 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_labels.zip', '75911', '3.76 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_labels.zip', '71008', '3.35 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_labels.zip', '79176', '3.7 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_labels.zip', '33761', '1.53 MB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_labels.zip', '44333', '2.01 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_labels.zip', '31503', '929.28 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_labels.zip', '29602', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_labels.zip', '25962', '1.19 MB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_labels.zip', '34638', '1.54 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_labels.zip', '45092', '1.88 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_labels.zip', '32181', '1.43 MB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_labels.zip', '37475', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_labels.zip', '37720', '1.67 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_labels.zip', '41515', '1.82 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '25576', '725.36 KB'),
'2007': HmdaDataFile('hmda_2007_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28469', '820.97 KB'),
'2017': HmdaDataFile('hmda_2017_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23089', '468.44 KB'),
'2015': HmdaDataFile('hmda_2015_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '22073', '702.08 KB'),
'2014': HmdaDataFile('hmda_2014_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '18699', '527.54 KB'),
'2008': HmdaDataFile('hmda_2008_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '23318', '660.77 KB'),
'2009': HmdaDataFile('hmda_2009_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '35977', '975.94 KB'),
'2011': HmdaDataFile('hmda_2011_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '24858', '674.23 KB'),
'2010': HmdaDataFile('hmda_2010_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '29685', '841.65 KB'),
'2013': HmdaDataFile('hmda_2013_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '28892', '797.28 KB'),
'2012': HmdaDataFile('hmda_2012_me_first-lien-owner-occupied-1-4-family-records_codes.zip', '32810', '894.15 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_me_all-records_codes.zip', '64142', '1.89 MB'),
'2007': HmdaDataFile('hmda_2007_me_all-records_codes.zip', '102877', '3.1 MB'),
'2017': HmdaDataFile('hmda_2017_me_all-records_codes.zip', '58188', '1.16 MB'),
'2015': HmdaDataFile('hmda_2015_me_all-records_codes.zip', '56450', '1.88 MB'),
'2014': HmdaDataFile('hmda_2014_me_all-records_codes.zip', '50231', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_me_all-records_codes.zip', '74281', '2.32 MB'),
'2009': HmdaDataFile('hmda_2009_me_all-records_codes.zip', '88765', '2.65 MB'),
'2011': HmdaDataFile('hmda_2011_me_all-records_codes.zip', '65903', '2.01 MB'),
'2010': HmdaDataFile('hmda_2010_me_all-records_codes.zip', '75911', '2.35 MB'),
'2013': HmdaDataFile('hmda_2013_me_all-records_codes.zip', '71008', '2.07 MB'),
'2012': HmdaDataFile('hmda_2012_me_all-records_codes.zip', '79176', '2.29 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_me_originated-records_codes.zip', '33761', '987 KB'),
'2007': HmdaDataFile('hmda_2007_me_originated-records_codes.zip', '44333', '1.33 MB'),
'2017': HmdaDataFile('hmda_2017_me_originated-records_codes.zip', '31503', '654.38 KB'),
'2015': HmdaDataFile('hmda_2015_me_originated-records_codes.zip', '29602', '954.33 KB'),
'2014': HmdaDataFile('hmda_2014_me_originated-records_codes.zip', '25962', '765.27 KB'),
'2008': HmdaDataFile('hmda_2008_me_originated-records_codes.zip', '34638', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_me_originated-records_codes.zip', '45092', '1.26 MB'),
'2011': HmdaDataFile('hmda_2011_me_originated-records_codes.zip', '32181', '909.63 KB'),
'2010': HmdaDataFile('hmda_2010_me_originated-records_codes.zip', '37475', '1.11 MB'),
'2013': HmdaDataFile('hmda_2013_me_originated-records_codes.zip', '37720', '1.06 MB'),
'2012': HmdaDataFile('hmda_2012_me_originated-records_codes.zip', '41515', '1.15 MB')
}
}
},
'md': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '151314', '7.51 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '189965', '9.14 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '124206', '4 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '131786', '7.26 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '98965', '5.23 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '133536', '6.34 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '193122', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '142588', '7.11 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '166872', '8.22 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '161491', '8.05 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_labels.zip', '195908', '9.85 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_labels.zip', '358958', '19.06 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_labels.zip', '656616', '31.96 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_labels.zip', '301879', '10.75 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_labels.zip', '316012', '18.68 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_labels.zip', '247561', '14.04 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_labels.zip', '393039', '19.26 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_labels.zip', '467697', '21.73 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_labels.zip', '347645', '18.89 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_labels.zip', '385128', '20.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_labels.zip', '385383', '20.95 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_labels.zip', '439566', '23.91 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_labels.zip', '171556', '8.64 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_labels.zip', '261984', '12.73 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_labels.zip', '144610', '4.78 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_labels.zip', '152541', '8.53 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_labels.zip', '118429', '6.33 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_labels.zip', '162234', '7.83 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_labels.zip', '210794', '9.48 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_labels.zip', '159707', '8.13 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_labels.zip', '182102', '9.04 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_labels.zip', '187825', '9.46 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_labels.zip', '219387', '11.11 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '151314', '5.32 MB'),
'2007': HmdaDataFile('hmda_2007_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '189965', '6.47 MB'),
'2017': HmdaDataFile('hmda_2017_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '124206', '2.79 MB'),
'2015': HmdaDataFile('hmda_2015_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '131786', '5.19 MB'),
'2014': HmdaDataFile('hmda_2014_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '98965', '3.67 MB'),
'2008': HmdaDataFile('hmda_2008_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '133536', '4.46 MB'),
'2009': HmdaDataFile('hmda_2009_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '193122', '6.08 MB'),
'2011': HmdaDataFile('hmda_2011_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '142588', '4.91 MB'),
'2010': HmdaDataFile('hmda_2010_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '166872', '5.7 MB'),
'2013': HmdaDataFile('hmda_2013_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '161491', '5.68 MB'),
'2012': HmdaDataFile('hmda_2012_md_first-lien-owner-occupied-1-4-family-records_codes.zip', '195908', '6.88 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_md_all-records_codes.zip', '358958', '13.18 MB'),
'2007': HmdaDataFile('hmda_2007_md_all-records_codes.zip', '656616', '22.08 MB'),
'2017': HmdaDataFile('hmda_2017_md_all-records_codes.zip', '301879', '6.99 MB'),
'2015': HmdaDataFile('hmda_2015_md_all-records_codes.zip', '316012', '12.83 MB'),
'2014': HmdaDataFile('hmda_2014_md_all-records_codes.zip', '247561', '9.61 MB'),
'2008': HmdaDataFile('hmda_2008_md_all-records_codes.zip', '393039', '13.28 MB'),
'2009': HmdaDataFile('hmda_2009_md_all-records_codes.zip', '467697', '15.2 MB'),
'2011': HmdaDataFile('hmda_2011_md_all-records_codes.zip', '347645', '12.83 MB'),
'2010': HmdaDataFile('hmda_2010_md_all-records_codes.zip', '385128', '14.08 MB'),
'2013': HmdaDataFile('hmda_2013_md_all-records_codes.zip', '385383', '14.56 MB'),
'2012': HmdaDataFile('hmda_2012_md_all-records_codes.zip', '439566', '16.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_md_originated-records_codes.zip', '171556', '6.1 MB'),
'2007': HmdaDataFile('hmda_2007_md_originated-records_codes.zip', '261984', '9.04 MB'),
'2017': HmdaDataFile('hmda_2017_md_originated-records_codes.zip', '144610', '3.3 MB'),
'2015': HmdaDataFile('hmda_2015_md_originated-records_codes.zip', '152541', '6.06 MB'),
'2014': HmdaDataFile('hmda_2014_md_originated-records_codes.zip', '118429', '4.41 MB'),
'2008': HmdaDataFile('hmda_2008_md_originated-records_codes.zip', '162234', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_md_originated-records_codes.zip', '210794', '6.75 MB'),
'2011': HmdaDataFile('hmda_2011_md_originated-records_codes.zip', '159707', '5.59 MB'),
'2010': HmdaDataFile('hmda_2010_md_originated-records_codes.zip', '182102', '6.24 MB'),
'2013': HmdaDataFile('hmda_2013_md_originated-records_codes.zip', '187825', '6.64 MB'),
'2012': HmdaDataFile('hmda_2012_md_originated-records_codes.zip', '219387', '7.7 MB')
}
}
},
'ma': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '168663', '8.43 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '148532', '7.1 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '129891', '4.16 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '144027', '7.64 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '109057', '5.41 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '126855', '6.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '229420', '10.04 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '177890', '8.46 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '212589', '10.01 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '186856', '9.2 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_labels.zip', '252396', '12.13 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_labels.zip', '350131', '18.93 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_labels.zip', '507509', '24.85 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_labels.zip', '282546', '10.31 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_labels.zip', '299991', '17.01 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_labels.zip', '246533', '13.15 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_labels.zip', '337077', '17.11 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_labels.zip', '493549', '22.81 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_labels.zip', '400586', '20.86 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_labels.zip', '458768', '23.77 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_labels.zip', '405166', '21.77 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_labels.zip', '516205', '27.13 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_labels.zip', '201756', '10.26 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_labels.zip', '214170', '10.35 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_labels.zip', '162474', '5.34 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_labels.zip', '173355', '9.34 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_labels.zip', '137873', '6.99 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_labels.zip', '159312', '7.79 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_labels.zip', '255679', '11.37 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_labels.zip', '205164', '9.93 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_labels.zip', '239023', '11.5 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_labels.zip', '224809', '11.32 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_labels.zip', '288584', '14 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '168663', '5.88 MB'),
'2007': HmdaDataFile('hmda_2007_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '148532', '5 MB'),
'2017': HmdaDataFile('hmda_2017_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '129891', '2.95 MB'),
'2015': HmdaDataFile('hmda_2015_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '144027', '5.53 MB'),
'2014': HmdaDataFile('hmda_2014_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '109057', '3.83 MB'),
'2008': HmdaDataFile('hmda_2008_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '126855', '4.32 MB'),
'2009': HmdaDataFile('hmda_2009_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '229420', '7.29 MB'),
'2011': HmdaDataFile('hmda_2011_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '177890', '5.8 MB'),
'2010': HmdaDataFile('hmda_2010_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '212589', '6.88 MB'),
'2013': HmdaDataFile('hmda_2013_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '186856', '6.35 MB'),
'2012': HmdaDataFile('hmda_2012_ma_first-lien-owner-occupied-1-4-family-records_codes.zip', '252396', '8.35 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ma_all-records_codes.zip', '350131', '13.03 MB'),
'2007': HmdaDataFile('hmda_2007_ma_all-records_codes.zip', '507509', '17.13 MB'),
'2017': HmdaDataFile('hmda_2017_ma_all-records_codes.zip', '282546', '6.92 MB'),
'2015': HmdaDataFile('hmda_2015_ma_all-records_codes.zip', '299991', '11.87 MB'),
'2014': HmdaDataFile('hmda_2014_ma_all-records_codes.zip', '246533', '9.05 MB'),
'2008': HmdaDataFile('hmda_2008_ma_all-records_codes.zip', '337077', '11.88 MB'),
'2009': HmdaDataFile('hmda_2009_ma_all-records_codes.zip', '493549', '16.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_all-records_codes.zip', '400586', '14.19 MB'),
'2010': HmdaDataFile('hmda_2010_ma_all-records_codes.zip', '458768', '16.27 MB'),
'2013': HmdaDataFile('hmda_2013_ma_all-records_codes.zip', '405166', '14.9 MB'),
'2012': HmdaDataFile('hmda_2012_ma_all-records_codes.zip', '516205', '18.65 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ma_originated-records_codes.zip', '201756', '7.12 MB'),
'2007': HmdaDataFile('hmda_2007_ma_originated-records_codes.zip', '214170', '7.34 MB'),
'2017': HmdaDataFile('hmda_2017_ma_originated-records_codes.zip', '162474', '3.73 MB'),
'2015': HmdaDataFile('hmda_2015_ma_originated-records_codes.zip', '173355', '6.7 MB'),
'2014': HmdaDataFile('hmda_2014_ma_originated-records_codes.zip', '137873', '4.9 MB'),
'2008': HmdaDataFile('hmda_2008_ma_originated-records_codes.zip', '159312', '5.51 MB'),
'2009': HmdaDataFile('hmda_2009_ma_originated-records_codes.zip', '255679', '8.23 MB'),
'2011': HmdaDataFile('hmda_2011_ma_originated-records_codes.zip', '205164', '6.77 MB'),
'2010': HmdaDataFile('hmda_2010_ma_originated-records_codes.zip', '239023', '7.91 MB'),
'2013': HmdaDataFile('hmda_2013_ma_originated-records_codes.zip', '224809', '7.79 MB'),
'2012': HmdaDataFile('hmda_2012_ma_originated-records_codes.zip', '288584', '9.58 MB')
}
}
},
'oh': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '232207', '11.88 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '224598', '11.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '208362', '6.69 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '204738', '11.72 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '174084', '8.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '186092', '9.03 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '282249', '12.44 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '219789', '10.95 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '258240', '12.72 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '267654', '13.76 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_labels.zip', '302970', '15.14 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_labels.zip', '493271', '27.33 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_labels.zip', '774401', '39.17 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_labels.zip', '448269', '16.35 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_labels.zip', '439676', '27.18 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_labels.zip', '394459', '21.93 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_labels.zip', '533639', '27.2 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_labels.zip', '624555', '29.69 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_labels.zip', '489066', '26.92 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_labels.zip', '555119', '30.3 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_labels.zip', '578940', '32.56 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_labels.zip', '618867', '34.01 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_labels.zip', '262449', '13.71 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_labels.zip', '313279', '15.77 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_labels.zip', '241167', '7.96 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_labels.zip', '235058', '13.65 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_labels.zip', '203927', '10.71 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_labels.zip', '231697', '11.5 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_labels.zip', '309496', '13.92 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_labels.zip', '245688', '12.6 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_labels.zip', '283698', '14.38 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_labels.zip', '306698', '15.95 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_labels.zip', '336141', '16.99 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '232207', '8.28 MB'),
'2007': HmdaDataFile('hmda_2007_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '224598', '7.85 MB'),
'2017': HmdaDataFile('hmda_2017_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '208362', '4.84 MB'),
'2015': HmdaDataFile('hmda_2015_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '204738', '8.2 MB'),
'2014': HmdaDataFile('hmda_2014_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '174084', '6.26 MB'),
'2008': HmdaDataFile('hmda_2008_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '186092', '6.43 MB'),
'2009': HmdaDataFile('hmda_2009_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '282249', '9.02 MB'),
'2011': HmdaDataFile('hmda_2011_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '219789', '7.49 MB'),
'2010': HmdaDataFile('hmda_2010_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '258240', '8.73 MB'),
'2013': HmdaDataFile('hmda_2013_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '267654', '9.51 MB'),
'2012': HmdaDataFile('hmda_2012_oh_first-lien-owner-occupied-1-4-family-records_codes.zip', '302970', '10.48 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_oh_all-records_codes.zip', '493271', '18.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_all-records_codes.zip', '774401', '27.06 MB'),
'2017': HmdaDataFile('hmda_2017_oh_all-records_codes.zip', '448269', '10.89 MB'),
'2015': HmdaDataFile('hmda_2015_oh_all-records_codes.zip', '439676', '18.32 MB'),
'2014': HmdaDataFile('hmda_2014_oh_all-records_codes.zip', '394459', '14.89 MB'),
'2008': HmdaDataFile('hmda_2008_oh_all-records_codes.zip', '533639', '18.82 MB'),
'2009': HmdaDataFile('hmda_2009_oh_all-records_codes.zip', '624555', '20.88 MB'),
'2011': HmdaDataFile('hmda_2011_oh_all-records_codes.zip', '489066', '17.97 MB'),
'2010': HmdaDataFile('hmda_2010_oh_all-records_codes.zip', '555119', '20.35 MB'),
'2013': HmdaDataFile('hmda_2013_oh_all-records_codes.zip', '578940', '22.01 MB'),
'2012': HmdaDataFile('hmda_2012_oh_all-records_codes.zip', '618867', '23.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_oh_originated-records_codes.zip', '262449', '9.52 MB'),
'2007': HmdaDataFile('hmda_2007_oh_originated-records_codes.zip', '313279', '11.21 MB'),
'2017': HmdaDataFile('hmda_2017_oh_originated-records_codes.zip', '241167', '5.68 MB'),
'2015': HmdaDataFile('hmda_2015_oh_originated-records_codes.zip', '235058', '9.5 MB'),
'2014': HmdaDataFile('hmda_2014_oh_originated-records_codes.zip', '203927', '7.49 MB'),
'2008': HmdaDataFile('hmda_2008_oh_originated-records_codes.zip', '231697', '8.17 MB'),
'2009': HmdaDataFile('hmda_2009_oh_originated-records_codes.zip', '309496', '10.06 MB'),
'2011': HmdaDataFile('hmda_2011_oh_originated-records_codes.zip', '245688', '8.63 MB'),
'2010': HmdaDataFile('hmda_2010_oh_originated-records_codes.zip', '283698', '9.88 MB'),
'2013': HmdaDataFile('hmda_2013_oh_originated-records_codes.zip', '306698', '10.96 MB'),
'2012': HmdaDataFile('hmda_2012_oh_originated-records_codes.zip', '336141', '11.68 MB')
}
}
},
'ut': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '106374', '4.72 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '94558', '4.04 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '87920', '2.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '88686', '4.42 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '62864', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '83133', '3.43 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '117282', '4.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '66630', '2.88 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '90322', '3.86 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '82759', '3.71 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_labels.zip', '108573', '4.87 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_labels.zip', '227871', '11.21 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_labels.zip', '319327', '14.16 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_labels.zip', '198425', '6.62 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_labels.zip', '192509', '10.37 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_labels.zip', '144848', '7.1 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_labels.zip', '226654', '9.81 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_labels.zip', '279791', '11.41 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_labels.zip', '166439', '7.98 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_labels.zip', '212181', '10.08 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_labels.zip', '192653', '9.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_labels.zip', '230544', '11.39 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_labels.zip', '126325', '5.77 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_labels.zip', '136628', '5.96 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_labels.zip', '108018', '3.29 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_labels.zip', '105929', '5.36 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_labels.zip', '76563', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_labels.zip', '98555', '4.16 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_labels.zip', '127680', '5.13 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_labels.zip', '77763', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_labels.zip', '101108', '4.4 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_labels.zip', '99424', '4.53 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_labels.zip', '125331', '5.73 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '106374', '3.15 MB'),
'2007': HmdaDataFile('hmda_2007_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '94558', '2.78 MB'),
'2017': HmdaDataFile('hmda_2017_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '87920', '1.85 MB'),
'2015': HmdaDataFile('hmda_2015_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '88686', '3.08 MB'),
'2014': HmdaDataFile('hmda_2014_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '62864', '1.88 MB'),
'2008': HmdaDataFile('hmda_2008_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '83133', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '117282', '3.31 MB'),
'2011': HmdaDataFile('hmda_2011_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '66630', '1.92 MB'),
'2010': HmdaDataFile('hmda_2010_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '90322', '2.56 MB'),
'2013': HmdaDataFile('hmda_2013_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '82759', '2.48 MB'),
'2012': HmdaDataFile('hmda_2012_ut_first-lien-owner-occupied-1-4-family-records_codes.zip', '108573', '3.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ut_all-records_codes.zip', '227871', '7.32 MB'),
'2007': HmdaDataFile('hmda_2007_ut_all-records_codes.zip', '319327', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ut_all-records_codes.zip', '198425', '4.41 MB'),
'2015': HmdaDataFile('hmda_2015_ut_all-records_codes.zip', '192509', '6.96 MB'),
'2014': HmdaDataFile('hmda_2014_ut_all-records_codes.zip', '144848', '4.66 MB'),
'2008': HmdaDataFile('hmda_2008_ut_all-records_codes.zip', '226654', '6.76 MB'),
'2009': HmdaDataFile('hmda_2009_ut_all-records_codes.zip', '279791', '7.94 MB'),
'2011': HmdaDataFile('hmda_2011_ut_all-records_codes.zip', '166439', '5.23 MB'),
'2010': HmdaDataFile('hmda_2010_ut_all-records_codes.zip', '212181', '6.6 MB'),
'2013': HmdaDataFile('hmda_2013_ut_all-records_codes.zip', '192653', '6.29 MB'),
'2012': HmdaDataFile('hmda_2012_ut_all-records_codes.zip', '230544', '7.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ut_originated-records_codes.zip', '126325', '3.86 MB'),
'2007': HmdaDataFile('hmda_2007_ut_originated-records_codes.zip', '136628', '4.14 MB'),
'2017': HmdaDataFile('hmda_2017_ut_originated-records_codes.zip', '108018', '2.31 MB'),
'2015': HmdaDataFile('hmda_2015_ut_originated-records_codes.zip', '105929', '3.72 MB'),
'2014': HmdaDataFile('hmda_2014_ut_originated-records_codes.zip', '76563', '2.34 MB'),
'2008': HmdaDataFile('hmda_2008_ut_originated-records_codes.zip', '98555', '2.91 MB'),
'2009': HmdaDataFile('hmda_2009_ut_originated-records_codes.zip', '127680', '3.64 MB'),
'2011': HmdaDataFile('hmda_2011_ut_originated-records_codes.zip', '77763', '2.3 MB'),
'2010': HmdaDataFile('hmda_2010_ut_originated-records_codes.zip', '101108', '2.91 MB'),
'2013': HmdaDataFile('hmda_2013_ut_originated-records_codes.zip', '99424', '3.03 MB'),
'2012': HmdaDataFile('hmda_2012_ut_originated-records_codes.zip', '125331', '3.84 MB')
}
}
},
'mo': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '139247', '6.75 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '154268', '7.4 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '118901', '3.83 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '121654', '6.63 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '95718', '4.8 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '132777', '6.15 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '186579', '8.07 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '130134', '6.22 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '158585', '7.57 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '146638', '7.2 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_labels.zip', '175614', '8.39 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_labels.zip', '312237', '16.09 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_labels.zip', '531617', '25.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_labels.zip', '277843', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_labels.zip', '276661', '16.28 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_labels.zip', '232023', '12.42 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_labels.zip', '379587', '17.83 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_labels.zip', '447918', '19.75 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_labels.zip', '309645', '15.9 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_labels.zip', '360738', '18.59 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_labels.zip', '347186', '18.19 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_labels.zip', '384551', '19.69 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_labels.zip', '165943', '8.21 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_labels.zip', '218490', '10.7 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_labels.zip', '145419', '4.81 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_labels.zip', '147519', '8.15 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_labels.zip', '120463', '6.22 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_labels.zip', '169405', '8.03 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_labels.zip', '211924', '9.41 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_labels.zip', '154134', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_labels.zip', '182546', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_labels.zip', '178486', '8.86 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_labels.zip', '204935', '9.93 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '139247', '4.66 MB'),
'2007': HmdaDataFile('hmda_2007_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '154268', '5.21 MB'),
'2017': HmdaDataFile('hmda_2017_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '118901', '2.73 MB'),
'2015': HmdaDataFile('hmda_2015_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '121654', '4.64 MB'),
'2014': HmdaDataFile('hmda_2014_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '95718', '3.31 MB'),
'2008': HmdaDataFile('hmda_2008_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '132777', '4.34 MB'),
'2009': HmdaDataFile('hmda_2009_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '186579', '5.81 MB'),
'2011': HmdaDataFile('hmda_2011_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '130134', '4.23 MB'),
'2010': HmdaDataFile('hmda_2010_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '158585', '5.17 MB'),
'2013': HmdaDataFile('hmda_2013_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '146638', '4.98 MB'),
'2012': HmdaDataFile('hmda_2012_mo_first-lien-owner-occupied-1-4-family-records_codes.zip', '175614', '5.75 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mo_all-records_codes.zip', '312237', '10.71 MB'),
'2007': HmdaDataFile('hmda_2007_mo_all-records_codes.zip', '531617', '17.33 MB'),
'2017': HmdaDataFile('hmda_2017_mo_all-records_codes.zip', '277843', '6.33 MB'),
'2015': HmdaDataFile('hmda_2015_mo_all-records_codes.zip', '276661', '10.94 MB'),
'2014': HmdaDataFile('hmda_2014_mo_all-records_codes.zip', '232023', '8.21 MB'),
'2008': HmdaDataFile('hmda_2008_mo_all-records_codes.zip', '379587', '12.23 MB'),
'2009': HmdaDataFile('hmda_2009_mo_all-records_codes.zip', '447918', '13.74 MB'),
'2011': HmdaDataFile('hmda_2011_mo_all-records_codes.zip', '309645', '10.46 MB'),
'2010': HmdaDataFile('hmda_2010_mo_all-records_codes.zip', '360738', '12.3 MB'),
'2013': HmdaDataFile('hmda_2013_mo_all-records_codes.zip', '347186', '12.14 MB'),
'2012': HmdaDataFile('hmda_2012_mo_all-records_codes.zip', '384551', '13.07 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mo_originated-records_codes.zip', '165943', '5.62 MB'),
'2007': HmdaDataFile('hmda_2007_mo_originated-records_codes.zip', '218490', '7.52 MB'),
'2017': HmdaDataFile('hmda_2017_mo_originated-records_codes.zip', '145419', '3.37 MB'),
'2015': HmdaDataFile('hmda_2015_mo_originated-records_codes.zip', '147519', '5.65 MB'),
'2014': HmdaDataFile('hmda_2014_mo_originated-records_codes.zip', '120463', '4.26 MB'),
'2008': HmdaDataFile('hmda_2008_mo_originated-records_codes.zip', '169405', '5.64 MB'),
'2009': HmdaDataFile('hmda_2009_mo_originated-records_codes.zip', '211924', '6.73 MB'),
'2011': HmdaDataFile('hmda_2011_mo_originated-records_codes.zip', '154134', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_mo_originated-records_codes.zip', '182546', '5.99 MB'),
'2013': HmdaDataFile('hmda_2013_mo_originated-records_codes.zip', '178486', '6.06 MB'),
'2012': HmdaDataFile('hmda_2012_mo_originated-records_codes.zip', '204935', '6.74 MB')
}
}
},
'mn': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '150929', '7.76 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '114958', '5.46 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '127113', '3.8 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '135112', '7.39 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '100659', '5.19 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '101137', '4.61 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '172464', '7.33 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '130723', '6.42 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '155626', '7.58 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '158942', '7.98 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_labels.zip', '195258', '9.69 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_labels.zip', '308571', '16.84 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_labels.zip', '396721', '18.97 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_labels.zip', '269551', '8.68 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_labels.zip', '280012', '16.44 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_labels.zip', '220146', '12.02 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_labels.zip', '272913', '12.87 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_labels.zip', '379860', '16.84 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_labels.zip', '282982', '14.91 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_labels.zip', '332542', '17.42 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_labels.zip', '332594', '17.76 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_labels.zip', '382934', '20.31 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_labels.zip', '176922', '9.23 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_labels.zip', '171217', '8.27 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_labels.zip', '154164', '4.74 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_labels.zip', '160605', '8.94 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_labels.zip', '123374', '6.47 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_labels.zip', '130815', '6.14 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_labels.zip', '195958', '8.53 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_labels.zip', '151782', '7.55 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_labels.zip', '177556', '8.84 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_labels.zip', '187475', '9.52 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_labels.zip', '222116', '11.23 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '150929', '5.39 MB'),
'2007': HmdaDataFile('hmda_2007_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '114958', '3.87 MB'),
'2017': HmdaDataFile('hmda_2017_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '127113', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '135112', '5.14 MB'),
'2014': HmdaDataFile('hmda_2014_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '100659', '3.62 MB'),
'2008': HmdaDataFile('hmda_2008_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '101137', '3.28 MB'),
'2009': HmdaDataFile('hmda_2009_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '172464', '5.3 MB'),
'2011': HmdaDataFile('hmda_2011_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '130723', '4.45 MB'),
'2010': HmdaDataFile('hmda_2010_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '155626', '5.29 MB'),
'2013': HmdaDataFile('hmda_2013_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '158942', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_mn_first-lien-owner-occupied-1-4-family-records_codes.zip', '195258', '6.78 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mn_all-records_codes.zip', '308571', '11.31 MB'),
'2007': HmdaDataFile('hmda_2007_mn_all-records_codes.zip', '396721', '13.04 MB'),
'2017': HmdaDataFile('hmda_2017_mn_all-records_codes.zip', '269551', '5.62 MB'),
'2015': HmdaDataFile('hmda_2015_mn_all-records_codes.zip', '280012', '10.99 MB'),
'2014': HmdaDataFile('hmda_2014_mn_all-records_codes.zip', '220146', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mn_all-records_codes.zip', '272913', '8.89 MB'),
'2009': HmdaDataFile('hmda_2009_mn_all-records_codes.zip', '379860', '11.74 MB'),
'2011': HmdaDataFile('hmda_2011_mn_all-records_codes.zip', '282982', '10.01 MB'),
'2010': HmdaDataFile('hmda_2010_mn_all-records_codes.zip', '332542', '11.73 MB'),
'2013': HmdaDataFile('hmda_2013_mn_all-records_codes.zip', '332594', '12.03 MB'),
'2012': HmdaDataFile('hmda_2012_mn_all-records_codes.zip', '382934', '13.81 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mn_originated-records_codes.zip', '176922', '6.37 MB'),
'2007': HmdaDataFile('hmda_2007_mn_originated-records_codes.zip', '171217', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_mn_originated-records_codes.zip', '154164', '3.32 MB'),
'2015': HmdaDataFile('hmda_2015_mn_originated-records_codes.zip', '160605', '6.15 MB'),
'2014': HmdaDataFile('hmda_2014_mn_originated-records_codes.zip', '123374', '4.48 MB'),
'2008': HmdaDataFile('hmda_2008_mn_originated-records_codes.zip', '130815', '4.36 MB'),
'2009': HmdaDataFile('hmda_2009_mn_originated-records_codes.zip', '195958', '6.13 MB'),
'2011': HmdaDataFile('hmda_2011_mn_originated-records_codes.zip', '151782', '5.2 MB'),
'2010': HmdaDataFile('hmda_2010_mn_originated-records_codes.zip', '177556', '6.12 MB'),
'2013': HmdaDataFile('hmda_2013_mn_originated-records_codes.zip', '187475', '6.6 MB'),
'2012': HmdaDataFile('hmda_2012_mn_originated-records_codes.zip', '222116', '7.82 MB')
}
}
},
'mi': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '225509', '11.38 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '202641', '10.01 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '200696', '6.21 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '198320', '11.13 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '162424', '8.1 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '149102', '7.25 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '194235', '8.77 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '168948', '8.4 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '175445', '8.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '239554', '11.94 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_labels.zip', '274203', '13.44 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_labels.zip', '470652', '25.31 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_labels.zip', '780713', '38.5 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_labels.zip', '437181', '14.82 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_labels.zip', '424672', '25.71 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_labels.zip', '361546', '19.18 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_labels.zip', '472702', '23.46 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_labels.zip', '504304', '23.62 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_labels.zip', '396764', '21.31 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_labels.zip', '419300', '22.02 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_labels.zip', '521030', '27.85 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_labels.zip', '573645', '30.22 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_labels.zip', '262757', '13.58 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_labels.zip', '294627', '14.81 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_labels.zip', '241945', '7.74 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_labels.zip', '233520', '13.34 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_labels.zip', '194126', '9.91 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_labels.zip', '191860', '9.52 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_labels.zip', '224166', '10.41 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_labels.zip', '196360', '10.06 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_labels.zip', '202252', '10.19 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_labels.zip', '280253', '14.18 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_labels.zip', '312194', '15.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '225509', '7.89 MB'),
'2007': HmdaDataFile('hmda_2007_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '202641', '7.07 MB'),
'2017': HmdaDataFile('hmda_2017_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '200696', '4.39 MB'),
'2015': HmdaDataFile('hmda_2015_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '198320', '7.82 MB'),
'2014': HmdaDataFile('hmda_2014_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '162424', '5.66 MB'),
'2008': HmdaDataFile('hmda_2008_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '149102', '5.08 MB'),
'2009': HmdaDataFile('hmda_2009_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '194235', '6.2 MB'),
'2011': HmdaDataFile('hmda_2011_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '168948', '5.72 MB'),
'2010': HmdaDataFile('hmda_2010_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '175445', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '239554', '8.26 MB'),
'2012': HmdaDataFile('hmda_2012_mi_first-lien-owner-occupied-1-4-family-records_codes.zip', '274203', '9.28 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mi_all-records_codes.zip', '470652', '16.94 MB'),
'2007': HmdaDataFile('hmda_2007_mi_all-records_codes.zip', '780713', '26.45 MB'),
'2017': HmdaDataFile('hmda_2017_mi_all-records_codes.zip', '437181', '9.61 MB'),
'2015': HmdaDataFile('hmda_2015_mi_all-records_codes.zip', '424672', '17.31 MB'),
'2014': HmdaDataFile('hmda_2014_mi_all-records_codes.zip', '361546', '12.87 MB'),
'2008': HmdaDataFile('hmda_2008_mi_all-records_codes.zip', '472702', '16.04 MB'),
'2009': HmdaDataFile('hmda_2009_mi_all-records_codes.zip', '504304', '16.25 MB'),
'2011': HmdaDataFile('hmda_2011_mi_all-records_codes.zip', '396764', '14.07 MB'),
'2010': HmdaDataFile('hmda_2010_mi_all-records_codes.zip', '419300', '14.52 MB'),
'2013': HmdaDataFile('hmda_2013_mi_all-records_codes.zip', '521030', '18.59 MB'),
'2012': HmdaDataFile('hmda_2012_mi_all-records_codes.zip', '573645', '20.21 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mi_originated-records_codes.zip', '262757', '9.39 MB'),
'2007': HmdaDataFile('hmda_2007_mi_originated-records_codes.zip', '294627', '10.53 MB'),
'2017': HmdaDataFile('hmda_2017_mi_originated-records_codes.zip', '241945', '5.41 MB'),
'2015': HmdaDataFile('hmda_2015_mi_originated-records_codes.zip', '233520', '9.29 MB'),
'2014': HmdaDataFile('hmda_2014_mi_originated-records_codes.zip', '194126', '6.88 MB'),
'2008': HmdaDataFile('hmda_2008_mi_originated-records_codes.zip', '191860', '6.67 MB'),
'2009': HmdaDataFile('hmda_2009_mi_originated-records_codes.zip', '224166', '7.37 MB'),
'2011': HmdaDataFile('hmda_2011_mi_originated-records_codes.zip', '196360', '6.83 MB'),
'2010': HmdaDataFile('hmda_2010_mi_originated-records_codes.zip', '202252', '6.96 MB'),
'2013': HmdaDataFile('hmda_2013_mi_originated-records_codes.zip', '280253', '9.73 MB'),
'2012': HmdaDataFile('hmda_2012_mi_originated-records_codes.zip', '312194', '10.66 MB')
}
}
},
'ri': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '22014', '1 MB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24278', '1.09 MB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19545', '600.94 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '19238', '896.87 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '14179', '687.6 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '18140', '802.68 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '29449', '1.2 MB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '20369', '903.7 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24263', '1.09 MB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '24398', '1.11 MB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_labels.zip', '28863', '1.3 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_labels.zip', '49775', '2.45 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_labels.zip', '88662', '4.02 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_labels.zip', '44038', '1.51 MB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_labels.zip', '43611', '2.22 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_labels.zip', '33941', '1.8 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_labels.zip', '51710', '2.4 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_labels.zip', '64057', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_labels.zip', '48785', '2.39 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_labels.zip', '55842', '2.75 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_labels.zip', '52960', '2.62 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_labels.zip', '61996', '3.04 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_labels.zip', '27005', '1.25 MB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_labels.zip', '36923', '1.65 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_labels.zip', '24598', '766.74 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_labels.zip', '23923', '1.13 MB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_labels.zip', '18005', '891.11 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_labels.zip', '23931', '1.07 MB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_labels.zip', '33549', '1.38 MB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_labels.zip', '24337', '1.11 MB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_labels.zip', '28439', '1.3 MB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_labels.zip', '29485', '1.37 MB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_labels.zip', '33911', '1.55 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '22014', '662.47 KB'),
'2007': HmdaDataFile('hmda_2007_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24278', '740.23 KB'),
'2017': HmdaDataFile('hmda_2017_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19545', '422.08 KB'),
'2015': HmdaDataFile('hmda_2015_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '19238', '594.54 KB'),
'2014': HmdaDataFile('hmda_2014_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '14179', '455.38 KB'),
'2008': HmdaDataFile('hmda_2008_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '18140', '535.59 KB'),
'2009': HmdaDataFile('hmda_2009_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '29449', '820.78 KB'),
'2011': HmdaDataFile('hmda_2011_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '20369', '581.77 KB'),
'2010': HmdaDataFile('hmda_2010_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24263', '698.2 KB'),
'2013': HmdaDataFile('hmda_2013_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '24398', '725.42 KB'),
'2012': HmdaDataFile('hmda_2012_ri_first-lien-owner-occupied-1-4-family-records_codes.zip', '28863', '844.23 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ri_all-records_codes.zip', '49775', '1.57 MB'),
'2007': HmdaDataFile('hmda_2007_ri_all-records_codes.zip', '88662', '2.64 MB'),
'2017': HmdaDataFile('hmda_2017_ri_all-records_codes.zip', '44038', '997.28 KB'),
'2015': HmdaDataFile('hmda_2015_ri_all-records_codes.zip', '43611', '1.42 MB'),
'2014': HmdaDataFile('hmda_2014_ri_all-records_codes.zip', '33941', '1.14 MB'),
'2008': HmdaDataFile('hmda_2008_ri_all-records_codes.zip', '51710', '1.58 MB'),
'2009': HmdaDataFile('hmda_2009_ri_all-records_codes.zip', '64057', '1.89 MB'),
'2011': HmdaDataFile('hmda_2011_ri_all-records_codes.zip', '48785', '1.49 MB'),
'2010': HmdaDataFile('hmda_2010_ri_all-records_codes.zip', '55842', '1.73 MB'),
'2013': HmdaDataFile('hmda_2013_ri_all-records_codes.zip', '52960', '1.66 MB'),
'2012': HmdaDataFile('hmda_2012_ri_all-records_codes.zip', '61996', '1.93 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ri_originated-records_codes.zip', '27005', '818.97 KB'),
'2007': HmdaDataFile('hmda_2007_ri_originated-records_codes.zip', '36923', '1.12 MB'),
'2017': HmdaDataFile('hmda_2017_ri_originated-records_codes.zip', '24598', '532.25 KB'),
'2015': HmdaDataFile('hmda_2015_ri_originated-records_codes.zip', '23923', '744.4 KB'),
'2014': HmdaDataFile('hmda_2014_ri_originated-records_codes.zip', '18005', '586.22 KB'),
'2008': HmdaDataFile('hmda_2008_ri_originated-records_codes.zip', '23931', '716.77 KB'),
'2009': HmdaDataFile('hmda_2009_ri_originated-records_codes.zip', '33549', '938.52 KB'),
'2011': HmdaDataFile('hmda_2011_ri_originated-records_codes.zip', '24337', '705.92 KB'),
'2010': HmdaDataFile('hmda_2010_ri_originated-records_codes.zip', '28439', '835.63 KB'),
'2013': HmdaDataFile('hmda_2013_ri_originated-records_codes.zip', '29485', '889.58 KB'),
'2012': HmdaDataFile('hmda_2012_ri_originated-records_codes.zip', '33911', '1.01 MB')
}
}
},
'ks': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '57549', '2.68 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62611', '2.93 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '50202', '1.64 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '52685', '2.73 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '43216', '2.18 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56265', '2.59 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '78703', '3.32 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '56542', '2.6 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '67768', '3.07 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '62047', '2.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_labels.zip', '73455', '3.41 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_labels.zip', '124922', '6.39 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_labels.zip', '194856', '9.65 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_labels.zip', '107563', '3.99 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_labels.zip', '113367', '6.35 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_labels.zip', '96245', '5.33 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_labels.zip', '146968', '7.27 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_labels.zip', '175095', '8.11 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_labels.zip', '126214', '6.44 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_labels.zip', '149351', '7.61 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_labels.zip', '134547', '7.17 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_labels.zip', '149627', '7.77 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_labels.zip', '69335', '3.31 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_labels.zip', '86578', '4.16 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_labels.zip', '61197', '2.05 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_labels.zip', '63448', '3.34 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_labels.zip', '53984', '2.79 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_labels.zip', '72280', '3.42 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_labels.zip', '90077', '3.93 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_labels.zip', '66876', '3.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_labels.zip', '78256', '3.64 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_labels.zip', '74582', '3.64 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_labels.zip', '84924', '4.01 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '57549', '1.78 MB'),
'2007': HmdaDataFile('hmda_2007_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62611', '2.02 MB'),
'2017': HmdaDataFile('hmda_2017_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '50202', '1.17 MB'),
'2015': HmdaDataFile('hmda_2015_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '52685', '1.86 MB'),
'2014': HmdaDataFile('hmda_2014_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '43216', '1.46 MB'),
'2008': HmdaDataFile('hmda_2008_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56265', '1.79 MB'),
'2009': HmdaDataFile('hmda_2009_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '78703', '2.28 MB'),
'2011': HmdaDataFile('hmda_2011_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '56542', '1.71 MB'),
'2010': HmdaDataFile('hmda_2010_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '67768', '2.02 MB'),
'2013': HmdaDataFile('hmda_2013_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '62047', '1.98 MB'),
'2012': HmdaDataFile('hmda_2012_ks_first-lien-owner-occupied-1-4-family-records_codes.zip', '73455', '2.25 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ks_all-records_codes.zip', '124922', '4.16 MB'),
'2007': HmdaDataFile('hmda_2007_ks_all-records_codes.zip', '194856', '6.5 MB'),
'2017': HmdaDataFile('hmda_2017_ks_all-records_codes.zip', '107563', '2.69 MB'),
'2015': HmdaDataFile('hmda_2015_ks_all-records_codes.zip', '113367', '4.18 MB'),
'2014': HmdaDataFile('hmda_2014_ks_all-records_codes.zip', '96245', '3.49 MB'),
'2008': HmdaDataFile('hmda_2008_ks_all-records_codes.zip', '146968', '4.88 MB'),
'2009': HmdaDataFile('hmda_2009_ks_all-records_codes.zip', '175095', '5.5 MB'),
'2011': HmdaDataFile('hmda_2011_ks_all-records_codes.zip', '126214', '4.14 MB'),
'2010': HmdaDataFile('hmda_2010_ks_all-records_codes.zip', '149351', '4.91 MB'),
'2013': HmdaDataFile('hmda_2013_ks_all-records_codes.zip', '134547', '4.67 MB'),
'2012': HmdaDataFile('hmda_2012_ks_all-records_codes.zip', '149627', '5.06 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ks_originated-records_codes.zip', '69335', '2.18 MB'),
'2007': HmdaDataFile('hmda_2007_ks_originated-records_codes.zip', '86578', '2.85 MB'),
'2017': HmdaDataFile('hmda_2017_ks_originated-records_codes.zip', '61197', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_ks_originated-records_codes.zip', '63448', '2.26 MB'),
'2014': HmdaDataFile('hmda_2014_ks_originated-records_codes.zip', '53984', '1.86 MB'),
'2008': HmdaDataFile('hmda_2008_ks_originated-records_codes.zip', '72280', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ks_originated-records_codes.zip', '90077', '2.69 MB'),
'2011': HmdaDataFile('hmda_2011_ks_originated-records_codes.zip', '66876', '2.05 MB'),
'2010': HmdaDataFile('hmda_2010_ks_originated-records_codes.zip', '78256', '2.38 MB'),
'2013': HmdaDataFile('hmda_2013_ks_originated-records_codes.zip', '74582', '2.39 MB'),
'2012': HmdaDataFile('hmda_2012_ks_originated-records_codes.zip', '84924', '2.62 MB')
}
}
},
'mt': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21366', '875.05 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '20722', '828.42 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18983', '556.44 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19758', '888.25 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '15841', '699.12 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '19303', '733.23 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '30035', '1.08 MB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '18104', '695.47 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '21854', '845.34 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '23631', '959.77 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_labels.zip', '25225', '1 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_labels.zip', '50537', '2.27 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_labels.zip', '72952', '3 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_labels.zip', '45597', '1.44 MB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_labels.zip', '46419', '2.32 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_labels.zip', '38476', '1.85 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_labels.zip', '55795', '2.26 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_labels.zip', '72632', '2.81 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_labels.zip', '45730', '1.97 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_labels.zip', '54464', '2.34 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_labels.zip', '56588', '2.52 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_labels.zip', '57320', '2.52 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_labels.zip', '26863', '1.12 MB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_labels.zip', '31811', '1.27 MB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_labels.zip', '24577', '727.45 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_labels.zip', '25376', '1.17 MB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_labels.zip', '21159', '958.54 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_labels.zip', '26278', '1.02 MB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_labels.zip', '36202', '1.33 MB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_labels.zip', '23529', '929.24 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_labels.zip', '27263', '1.08 MB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_labels.zip', '31007', '1.28 MB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_labels.zip', '31452', '1.28 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21366', '564.56 KB'),
'2007': HmdaDataFile('hmda_2007_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '20722', '538.06 KB'),
'2017': HmdaDataFile('hmda_2017_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18983', '389.92 KB'),
'2015': HmdaDataFile('hmda_2015_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19758', '575.24 KB'),
'2014': HmdaDataFile('hmda_2014_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '15841', '453.36 KB'),
'2008': HmdaDataFile('hmda_2008_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '19303', '479.86 KB'),
'2009': HmdaDataFile('hmda_2009_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '30035', '726.19 KB'),
'2011': HmdaDataFile('hmda_2011_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '18104', '448.42 KB'),
'2010': HmdaDataFile('hmda_2010_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '21854', '543.67 KB'),
'2013': HmdaDataFile('hmda_2013_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '23631', '616.38 KB'),
'2012': HmdaDataFile('hmda_2012_mt_first-lien-owner-occupied-1-4-family-records_codes.zip', '25225', '642.31 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_mt_all-records_codes.zip', '50537', '1.41 MB'),
'2007': HmdaDataFile('hmda_2007_mt_all-records_codes.zip', '72952', '1.92 MB'),
'2017': HmdaDataFile('hmda_2017_mt_all-records_codes.zip', '45597', '947.96 KB'),
'2015': HmdaDataFile('hmda_2015_mt_all-records_codes.zip', '46419', '1.45 MB'),
'2014': HmdaDataFile('hmda_2014_mt_all-records_codes.zip', '38476', '1.16 MB'),
'2008': HmdaDataFile('hmda_2008_mt_all-records_codes.zip', '55795', '1.44 MB'),
'2009': HmdaDataFile('hmda_2009_mt_all-records_codes.zip', '72632', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_mt_all-records_codes.zip', '45730', '1.22 MB'),
'2010': HmdaDataFile('hmda_2010_mt_all-records_codes.zip', '54464', '1.45 MB'),
'2013': HmdaDataFile('hmda_2013_mt_all-records_codes.zip', '56588', '1.56 MB'),
'2012': HmdaDataFile('hmda_2012_mt_all-records_codes.zip', '57320', '1.57 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_mt_originated-records_codes.zip', '26863', '722 KB'),
'2007': HmdaDataFile('hmda_2007_mt_originated-records_codes.zip', '31811', '828.36 KB'),
'2017': HmdaDataFile('hmda_2017_mt_originated-records_codes.zip', '24577', '501.64 KB'),
'2015': HmdaDataFile('hmda_2015_mt_originated-records_codes.zip', '25376', '750.19 KB'),
'2014': HmdaDataFile('hmda_2014_mt_originated-records_codes.zip', '21159', '615.26 KB'),
'2008': HmdaDataFile('hmda_2008_mt_originated-records_codes.zip', '26278', '663.11 KB'),
'2009': HmdaDataFile('hmda_2009_mt_originated-records_codes.zip', '36202', '887.48 KB'),
'2011': HmdaDataFile('hmda_2011_mt_originated-records_codes.zip', '23529', '592.46 KB'),
'2010': HmdaDataFile('hmda_2010_mt_originated-records_codes.zip', '27263', '689.27 KB'),
'2013': HmdaDataFile('hmda_2013_mt_originated-records_codes.zip', '31007', '815.5 KB'),
'2012': HmdaDataFile('hmda_2012_mt_originated-records_codes.zip', '31452', '815.03 KB')
}
}
},
'ms': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '37426', '1.74 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '50509', '2.25 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '36762', '1.08 MB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '33087', '1.71 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '29718', '1.39 MB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '41719', '1.93 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '46880', '1.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '34477', '1.51 MB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '39727', '1.77 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '40639', '1.88 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_labels.zip', '43832', '1.98 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_labels.zip', '107199', '5.36 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_labels.zip', '173130', '7.83 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_labels.zip', '101384', '3.15 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_labels.zip', '95532', '5.35 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_labels.zip', '89193', '4.45 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_labels.zip', '136596', '6.41 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_labels.zip', '137988', '6 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_labels.zip', '106833', '5.1 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_labels.zip', '120079', '5.8 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_labels.zip', '115511', '5.75 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_labels.zip', '119816', '5.82 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_labels.zip', '51684', '2.47 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_labels.zip', '75757', '3.36 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_labels.zip', '51038', '1.5 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_labels.zip', '47435', '2.5 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_labels.zip', '44303', '2.12 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_labels.zip', '63435', '2.92 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_labels.zip', '64926', '2.75 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_labels.zip', '49962', '2.26 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_labels.zip', '55509', '2.53 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_labels.zip', '57688', '2.73 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_labels.zip', '59972', '2.76 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '37426', '1.15 MB'),
'2007': HmdaDataFile('hmda_2007_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '50509', '1.5 MB'),
'2017': HmdaDataFile('hmda_2017_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '36762', '770.12 KB'),
'2015': HmdaDataFile('hmda_2015_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '33087', '1.14 MB'),
'2014': HmdaDataFile('hmda_2014_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '29718', '914.92 KB'),
'2008': HmdaDataFile('hmda_2008_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '41719', '1.3 MB'),
'2009': HmdaDataFile('hmda_2009_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '46880', '1.32 MB'),
'2011': HmdaDataFile('hmda_2011_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '34477', '986.29 KB'),
'2010': HmdaDataFile('hmda_2010_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '39727', '1.16 MB'),
'2013': HmdaDataFile('hmda_2013_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '40639', '1.23 MB'),
'2012': HmdaDataFile('hmda_2012_ms_first-lien-owner-occupied-1-4-family-records_codes.zip', '43832', '1.29 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ms_all-records_codes.zip', '107199', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_ms_all-records_codes.zip', '173130', '5.1 MB'),
'2017': HmdaDataFile('hmda_2017_ms_all-records_codes.zip', '101384', '2.07 MB'),
'2015': HmdaDataFile('hmda_2015_ms_all-records_codes.zip', '95532', '3.43 MB'),
'2014': HmdaDataFile('hmda_2014_ms_all-records_codes.zip', '89193', '2.8 MB'),
'2008': HmdaDataFile('hmda_2008_ms_all-records_codes.zip', '136596', '4.22 MB'),
'2009': HmdaDataFile('hmda_2009_ms_all-records_codes.zip', '137988', '3.96 MB'),
'2011': HmdaDataFile('hmda_2011_ms_all-records_codes.zip', '106833', '3.19 MB'),
'2010': HmdaDataFile('hmda_2010_ms_all-records_codes.zip', '120079', '3.65 MB'),
'2013': HmdaDataFile('hmda_2013_ms_all-records_codes.zip', '115511', '3.61 MB'),
'2012': HmdaDataFile('hmda_2012_ms_all-records_codes.zip', '119816', '3.68 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ms_originated-records_codes.zip', '51684', '1.61 MB'),
'2007': HmdaDataFile('hmda_2007_ms_originated-records_codes.zip', '75757', '2.24 MB'),
'2017': HmdaDataFile('hmda_2017_ms_originated-records_codes.zip', '51038', '1.05 MB'),
'2015': HmdaDataFile('hmda_2015_ms_originated-records_codes.zip', '47435', '1.65 MB'),
'2014': HmdaDataFile('hmda_2014_ms_originated-records_codes.zip', '44303', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_ms_originated-records_codes.zip', '63435', '1.96 MB'),
'2009': HmdaDataFile('hmda_2009_ms_originated-records_codes.zip', '64926', '1.84 MB'),
'2011': HmdaDataFile('hmda_2011_ms_originated-records_codes.zip', '49962', '1.46 MB'),
'2010': HmdaDataFile('hmda_2010_ms_originated-records_codes.zip', '55509', '1.64 MB'),
'2013': HmdaDataFile('hmda_2013_ms_originated-records_codes.zip', '57688', '1.76 MB'),
'2012': HmdaDataFile('hmda_2012_ms_originated-records_codes.zip', '59972', '1.79 MB')
}
}
},
'sc': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '108076', '5.05 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106873', '4.81 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100333', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '92167', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '73174', '3.73 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '86677', '3.98 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '110931', '4.77 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '78145', '3.46 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '85858', '3.92 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '100643', '4.76 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_labels.zip', '106107', '5 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_labels.zip', '257644', '13.17 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_labels.zip', '383001', '17.86 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_labels.zip', '242772', '8.25 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_labels.zip', '225542', '13.41 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_labels.zip', '191197', '10.66 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_labels.zip', '271908', '13.26 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_labels.zip', '291014', '13.32 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_labels.zip', '218369', '11.01 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_labels.zip', '235957', '12.07 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_labels.zip', '259782', '13.73 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_labels.zip', '267040', '14.05 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_labels.zip', '131134', '6.36 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_labels.zip', '161777', '7.5 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_labels.zip', '123971', '3.87 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_labels.zip', '114336', '6.37 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_labels.zip', '93412', '4.86 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_labels.zip', '118458', '5.61 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_labels.zip', '133057', '5.91 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_labels.zip', '97461', '4.61 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_labels.zip', '105626', '4.87 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_labels.zip', '127479', '6.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_labels.zip', '130453', '6.21 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '108076', '3.37 MB'),
'2007': HmdaDataFile('hmda_2007_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106873', '3.2 MB'),
'2017': HmdaDataFile('hmda_2017_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100333', '2.11 MB'),
'2015': HmdaDataFile('hmda_2015_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '92167', '3.46 MB'),
'2014': HmdaDataFile('hmda_2014_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '73174', '2.49 MB'),
'2008': HmdaDataFile('hmda_2008_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '86677', '2.67 MB'),
'2009': HmdaDataFile('hmda_2009_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '110931', '3.27 MB'),
'2011': HmdaDataFile('hmda_2011_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '78145', '2.24 MB'),
'2010': HmdaDataFile('hmda_2010_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '85858', '2.58 MB'),
'2013': HmdaDataFile('hmda_2013_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '100643', '3.16 MB'),
'2012': HmdaDataFile('hmda_2012_sc_first-lien-owner-occupied-1-4-family-records_codes.zip', '106107', '3.31 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sc_all-records_codes.zip', '257644', '8.51 MB'),
'2007': HmdaDataFile('hmda_2007_sc_all-records_codes.zip', '383001', '11.71 MB'),
'2017': HmdaDataFile('hmda_2017_sc_all-records_codes.zip', '242772', '5.31 MB'),
'2015': HmdaDataFile('hmda_2015_sc_all-records_codes.zip', '225542', '8.88 MB'),
'2014': HmdaDataFile('hmda_2014_sc_all-records_codes.zip', '191197', '6.91 MB'),
'2008': HmdaDataFile('hmda_2008_sc_all-records_codes.zip', '271908', '8.77 MB'),
'2009': HmdaDataFile('hmda_2009_sc_all-records_codes.zip', '291014', '8.92 MB'),
'2011': HmdaDataFile('hmda_2011_sc_all-records_codes.zip', '218369', '6.92 MB'),
'2010': HmdaDataFile('hmda_2010_sc_all-records_codes.zip', '235957', '7.67 MB'),
'2013': HmdaDataFile('hmda_2013_sc_all-records_codes.zip', '259782', '8.91 MB'),
'2012': HmdaDataFile('hmda_2012_sc_all-records_codes.zip', '267040', '9.12 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sc_originated-records_codes.zip', '131134', '4.24 MB'),
'2007': HmdaDataFile('hmda_2007_sc_originated-records_codes.zip', '161777', '5.03 MB'),
'2017': HmdaDataFile('hmda_2017_sc_originated-records_codes.zip', '123971', '2.67 MB'),
'2015': HmdaDataFile('hmda_2015_sc_originated-records_codes.zip', '114336', '4.36 MB'),
'2014': HmdaDataFile('hmda_2014_sc_originated-records_codes.zip', '93412', '3.22 MB'),
'2008': HmdaDataFile('hmda_2008_sc_originated-records_codes.zip', '118458', '3.78 MB'),
'2009': HmdaDataFile('hmda_2009_sc_originated-records_codes.zip', '133057', '4.05 MB'),
'2011': HmdaDataFile('hmda_2011_sc_originated-records_codes.zip', '97461', '2.99 MB'),
'2010': HmdaDataFile('hmda_2010_sc_originated-records_codes.zip', '105626', '3.17 MB'),
'2013': HmdaDataFile('hmda_2013_sc_originated-records_codes.zip', '127479', '4.06 MB'),
'2012': HmdaDataFile('hmda_2012_sc_originated-records_codes.zip', '130453', '4.07 MB')
}
}
},
'ky': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '81819', '3.99 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '82043', '3.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '72775', '2.34 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '71716', '3.98 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '59329', '3.1 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '73163', '3.45 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '103950', '4.54 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '80580', '3.74 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '91441', '4.28 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '89932', '4.39 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_labels.zip', '105634', '5.06 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_labels.zip', '185587', '10.06 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_labels.zip', '285560', '14.05 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_labels.zip', '173004', '6.52 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_labels.zip', '167714', '10.04 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_labels.zip', '149317', '8.6 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_labels.zip', '215096', '10.84 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_labels.zip', '246427', '11.78 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_labels.zip', '203934', '10.77 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_labels.zip', '222486', '11.61 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_labels.zip', '215281', '11.75 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_labels.zip', '239015', '12.85 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_labels.zip', '100105', '5.04 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_labels.zip', '121278', '5.84 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_labels.zip', '91096', '3.01 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_labels.zip', '88714', '5.02 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_labels.zip', '76520', '4.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_labels.zip', '99394', '4.77 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_labels.zip', '123485', '5.57 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_labels.zip', '98794', '4.7 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_labels.zip', '109716', '5.22 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_labels.zip', '110912', '5.58 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_labels.zip', '125050', '6.13 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '81819', '2.64 MB'),
'2007': HmdaDataFile('hmda_2007_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '82043', '2.59 MB'),
'2017': HmdaDataFile('hmda_2017_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '72775', '1.68 MB'),
'2015': HmdaDataFile('hmda_2015_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '71716', '2.68 MB'),
'2014': HmdaDataFile('hmda_2014_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '59329', '2.06 MB'),
'2008': HmdaDataFile('hmda_2008_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '73163', '2.34 MB'),
'2009': HmdaDataFile('hmda_2009_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '103950', '3.13 MB'),
'2011': HmdaDataFile('hmda_2011_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '80580', '2.43 MB'),
'2010': HmdaDataFile('hmda_2010_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '91441', '2.81 MB'),
'2013': HmdaDataFile('hmda_2013_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '89932', '2.91 MB'),
'2012': HmdaDataFile('hmda_2012_ky_first-lien-owner-occupied-1-4-family-records_codes.zip', '105634', '3.33 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_ky_all-records_codes.zip', '185587', '6.56 MB'),
'2007': HmdaDataFile('hmda_2007_ky_all-records_codes.zip', '285560', '9.35 MB'),
'2017': HmdaDataFile('hmda_2017_ky_all-records_codes.zip', '173004', '4.32 MB'),
'2015': HmdaDataFile('hmda_2015_ky_all-records_codes.zip', '167714', '6.5 MB'),
'2014': HmdaDataFile('hmda_2014_ky_all-records_codes.zip', '149317', '5.56 MB'),
'2008': HmdaDataFile('hmda_2008_ky_all-records_codes.zip', '215096', '7.24 MB'),
'2009': HmdaDataFile('hmda_2009_ky_all-records_codes.zip', '246427', '7.96 MB'),
'2011': HmdaDataFile('hmda_2011_ky_all-records_codes.zip', '203934', '6.87 MB'),
'2010': HmdaDataFile('hmda_2010_ky_all-records_codes.zip', '222486', '7.44 MB'),
'2013': HmdaDataFile('hmda_2013_ky_all-records_codes.zip', '215281', '7.63 MB'),
'2012': HmdaDataFile('hmda_2012_ky_all-records_codes.zip', '239015', '8.34 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_ky_originated-records_codes.zip', '100105', '3.34 MB'),
'2007': HmdaDataFile('hmda_2007_ky_originated-records_codes.zip', '121278', '3.97 MB'),
'2017': HmdaDataFile('hmda_2017_ky_originated-records_codes.zip', '91096', '2.12 MB'),
'2015': HmdaDataFile('hmda_2015_ky_originated-records_codes.zip', '88714', '3.36 MB'),
'2014': HmdaDataFile('hmda_2014_ky_originated-records_codes.zip', '76520', '2.67 MB'),
'2008': HmdaDataFile('hmda_2008_ky_originated-records_codes.zip', '99394', '3.22 MB'),
'2009': HmdaDataFile('hmda_2009_ky_originated-records_codes.zip', '123485', '3.84 MB'),
'2011': HmdaDataFile('hmda_2011_ky_originated-records_codes.zip', '98794', '3.04 MB'),
'2010': HmdaDataFile('hmda_2010_ky_originated-records_codes.zip', '109716', '3.39 MB'),
'2013': HmdaDataFile('hmda_2013_ky_originated-records_codes.zip', '110912', '3.68 MB'),
'2012': HmdaDataFile('hmda_2012_ky_originated-records_codes.zip', '125050', '4.01 MB')
}
}
},
'or': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '114167', '5.46 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '101187', '4.59 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '95129', '2.88 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '97022', '5.17 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '69787', '3.39 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '77187', '3.56 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '118063', '5.04 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '80824', '3.93 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102166', '4.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '102103', '4.92 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_labels.zip', '119486', '5.65 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_labels.zip', '249739', '12.75 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_labels.zip', '376732', '17.18 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_labels.zip', '211344', '7.17 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_labels.zip', '214365', '12.36 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_labels.zip', '168582', '8.75 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_labels.zip', '251125', '12 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_labels.zip', '300552', '13.38 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_labels.zip', '204085', '10.92 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_labels.zip', '244799', '12.74 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_labels.zip', '240614', '12.69 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_labels.zip', '269285', '14.02 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_labels.zip', '136083', '6.58 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_labels.zip', '151933', '7.06 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_labels.zip', '116700', '3.65 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_labels.zip', '117674', '6.39 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_labels.zip', '87626', '4.34 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_labels.zip', '97998', '4.65 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_labels.zip', '134377', '5.88 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_labels.zip', '98243', '4.9 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_labels.zip', '118373', '5.68 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_labels.zip', '128622', '6.28 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_labels.zip', '144891', '7.02 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '114167', '3.73 MB'),
'2007': HmdaDataFile('hmda_2007_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '101187', '3.17 MB'),
'2017': HmdaDataFile('hmda_2017_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '95129', '1.97 MB'),
'2015': HmdaDataFile('hmda_2015_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '97022', '3.6 MB'),
'2014': HmdaDataFile('hmda_2014_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '69787', '2.33 MB'),
'2008': HmdaDataFile('hmda_2008_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '77187', '2.47 MB'),
'2009': HmdaDataFile('hmda_2009_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '118063', '3.58 MB'),
'2011': HmdaDataFile('hmda_2011_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '80824', '2.65 MB'),
'2010': HmdaDataFile('hmda_2010_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102166', '3.28 MB'),
'2013': HmdaDataFile('hmda_2013_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '102103', '3.39 MB'),
'2012': HmdaDataFile('hmda_2012_or_first-lien-owner-occupied-1-4-family-records_codes.zip', '119486', '3.83 MB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_or_all-records_codes.zip', '249739', '8.44 MB'),
'2007': HmdaDataFile('hmda_2007_or_all-records_codes.zip', '376732', '11.64 MB'),
'2017': HmdaDataFile('hmda_2017_or_all-records_codes.zip', '211344', '4.61 MB'),
'2015': HmdaDataFile('hmda_2015_or_all-records_codes.zip', '214365', '8.29 MB'),
'2014': HmdaDataFile('hmda_2014_or_all-records_codes.zip', '168582', '5.8 MB'),
'2008': HmdaDataFile('hmda_2008_or_all-records_codes.zip', '251125', '8.16 MB'),
'2009': HmdaDataFile('hmda_2009_or_all-records_codes.zip', '300552', '9.29 MB'),
'2011': HmdaDataFile('hmda_2011_or_all-records_codes.zip', '204085', '7.17 MB'),
'2010': HmdaDataFile('hmda_2010_or_all-records_codes.zip', '244799', '8.44 MB'),
'2013': HmdaDataFile('hmda_2013_or_all-records_codes.zip', '240614', '8.5 MB'),
'2012': HmdaDataFile('hmda_2012_or_all-records_codes.zip', '269285', '9.32 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_or_originated-records_codes.zip', '136083', '4.48 MB'),
'2007': HmdaDataFile('hmda_2007_or_originated-records_codes.zip', '151933', '4.92 MB'),
'2017': HmdaDataFile('hmda_2017_or_originated-records_codes.zip', '116700', '2.48 MB'),
'2015': HmdaDataFile('hmda_2015_or_originated-records_codes.zip', '117674', '4.43 MB'),
'2014': HmdaDataFile('hmda_2014_or_originated-records_codes.zip', '87626', '2.98 MB'),
'2008': HmdaDataFile('hmda_2008_or_originated-records_codes.zip', '97998', '3.24 MB'),
'2009': HmdaDataFile('hmda_2009_or_originated-records_codes.zip', '134377', '4.19 MB'),
'2011': HmdaDataFile('hmda_2011_or_originated-records_codes.zip', '98243', '3.29 MB'),
'2010': HmdaDataFile('hmda_2010_or_originated-records_codes.zip', '118373', '3.84 MB'),
'2013': HmdaDataFile('hmda_2013_or_originated-records_codes.zip', '128622', '4.29 MB'),
'2012': HmdaDataFile('hmda_2012_or_originated-records_codes.zip', '144891', '4.75 MB')
}
}
},
'sd': {
'labels': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17608', '715.46 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15984', '612.26 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15341', '394.73 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '15835', '706.73 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '12839', '526.79 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '16258', '623.5 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24417', '866.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '17828', '666.34 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '20738', '786.61 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '19523', '775.49 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_labels.zip', '24459', '981.4 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_labels.zip', '37648', '1.67 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_labels.zip', '47432', '1.97 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_labels.zip', '33167', '980.45 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_labels.zip', '35072', '1.72 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_labels.zip', '29763', '1.37 MB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_labels.zip', '41213', '1.78 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_labels.zip', '53033', '2.13 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_labels.zip', '38426', '1.67 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_labels.zip', '45150', '1.96 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_labels.zip', '43401', '1.95 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_labels.zip', '48753', '2.2 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_labels.zip', '20937', '887.21 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_labels.zip', '24091', '970 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_labels.zip', '18871', '517.46 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_labels.zip', '19418', '890.97 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_labels.zip', '16136', '696.36 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_labels.zip', '22824', '912.18 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_labels.zip', '29867', '1.11 MB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_labels.zip', '21818', '862.15 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_labels.zip', '25068', '980.72 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_labels.zip', '23684', '969.63 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_labels.zip', '28651', '1.18 MB')
}
},
'codes': {
'first-lien-owner-occupied-1-4-family-records': {
'2016': HmdaDataFile('hmda_2016_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17608', '452.93 KB'),
'2007': HmdaDataFile('hmda_2007_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15984', '392.92 KB'),
'2017': HmdaDataFile('hmda_2017_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15341', '275.01 KB'),
'2015': HmdaDataFile('hmda_2015_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '15835', '442.1 KB'),
'2014': HmdaDataFile('hmda_2014_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '12839', '334.62 KB'),
'2008': HmdaDataFile('hmda_2008_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '16258', '411.49 KB'),
'2009': HmdaDataFile('hmda_2009_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24417', '586.34 KB'),
'2011': HmdaDataFile('hmda_2011_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '17828', '429.4 KB'),
'2010': HmdaDataFile('hmda_2010_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '20738', '504.37 KB'),
'2013': HmdaDataFile('hmda_2013_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '19523', '492.06 KB'),
'2012': HmdaDataFile('hmda_2012_sd_first-lien-owner-occupied-1-4-family-records_codes.zip', '24459', '623.71 KB')
},
'all-records': {
'2016': HmdaDataFile('hmda_2016_sd_all-records_codes.zip', '37648', '1.02 MB'),
'2007': HmdaDataFile('hmda_2007_sd_all-records_codes.zip', '47432', '1.24 MB'),
'2017': HmdaDataFile('hmda_2017_sd_all-records_codes.zip', '33167', '657.41 KB'),
'2015': HmdaDataFile('hmda_2015_sd_all-records_codes.zip', '35072', '1.04 MB'),
'2014': HmdaDataFile('hmda_2014_sd_all-records_codes.zip', '29763', '835.84 KB'),
'2008': HmdaDataFile('hmda_2008_sd_all-records_codes.zip', '41213', '1.13 MB'),
'2009': HmdaDataFile('hmda_2009_sd_all-records_codes.zip', '53033', '1.39 MB'),
'2011': HmdaDataFile('hmda_2011_sd_all-records_codes.zip', '38426', '1.03 MB'),
'2010': HmdaDataFile('hmda_2010_sd_all-records_codes.zip', '45150', '1.22 MB'),
'2013': HmdaDataFile('hmda_2013_sd_all-records_codes.zip', '43401', '1.19 MB'),
'2012': HmdaDataFile('hmda_2012_sd_all-records_codes.zip', '48753', '1.35 MB')
},
'originated-records': {
'2016': HmdaDataFile('hmda_2016_sd_originated-records_codes.zip', '20937', '557.49 KB'),
'2007': HmdaDataFile('hmda_2007_sd_originated-records_codes.zip', '24091', '623.6 KB'),
'2017': HmdaDataFile('hmda_2017_sd_originated-records_codes.zip', '18871', '357.8 KB'),
'2015': HmdaDataFile('hmda_2015_sd_originated-records_codes.zip', '19418', '553.25 KB'),
'2014': HmdaDataFile('hmda_2014_sd_originated-records_codes.zip', '16136', '436.32 KB'),
'2008': HmdaDataFile('hmda_2008_sd_originated-records_codes.zip', '22824', '595.53 KB'),
'2009': HmdaDataFile('hmda_2009_sd_originated-records_codes.zip', '29867', '746.74 KB'),
'2011': HmdaDataFile('hmda_2011_sd_originated-records_codes.zip', '21818', '549.43 KB'),
'2010': HmdaDataFile('hmda_2010_sd_originated-records_codes.zip', '25068', '623.74 KB'),
'2013': HmdaDataFile('hmda_2013_sd_originated-records_codes.zip', '23684', '608.21 KB'),
'2012': HmdaDataFile('hmda_2012_sd_originated-records_codes.zip', '28651', '744.18 KB')
}
}
}
}
|
#!/usr/bin/python
# CALCULATION OF GC CONTENT (GENES)
import os
import logging
import argparse
import fastalib
from fileutils import safe_filename
def get_option_args():
args_parser = argparse.ArgumentParser(
description='Found GC in FASTA files of a directory',
)
args_parser.add_argument(
'path',
help='Path where look for FASTA files, use . to use current working dir',
)
args_parser.add_argument(
'--tron',
dest='tron',
action='store_const',
const=True,
default=False,
help='Show trace of activity (Disabled by default)',
)
args = args_parser.parse_args()
logging.basicConfig(
level=logging.INFO if args.tron else logging.ERROR,
format='%(asctime)s %(levelname)s %(message)s',
)
return args
if __name__ == '__main__':
args = get_option_args()
genomes = [fn for fn in os.listdir(args.path) if fn.endswith(".faa")]
for filename in genomes:
if args.tron:
logging.info('Processing {}'.format(filename))
full_name = os.path.join(args.path, filename)
data = fastalib.read_fasta_file(full_name)
if args.tron:
logging.info('Generating output files')
num_outputs = 0
for key in data:
lines = data[key]
filename = safe_filename('result_id_{}.fasta'.format(key))
with open(filename, 'w') as f1:
for l in lines:
f1.write('{}\n'.format(l))
num_outputs += 1
g, a, t, c = fastalib.count_nucleotydes_gatc(lines)
filename = safe_filename('result_GC_{}.fasta'.format(key))
with open(filename, 'w') as f2:
f2.write('Guanine: {:d}\n'.format(g))
f2.write('Adenine: {:d}\n'.format(a))
f2.write('Thymine: {:d}\n'.format(t))
f2.write('Cytosine: {:d}\n'.format(c))
p = round(float(c+g)/(a+c+g+t), 9)
f2.write('CG proportion: {:9f}\n'.format(p))
num_outputs += 1
if args.tron:
logging.info('Finished: files processed {}, generated {}'.format(
len(genomes),
num_outputs,
))
|
'''
BGP Genie Ops Object Outputs for IOSXR.
'''
import xml.etree.ElementTree as ET
class BgpOutput(object):
############################################################################
# BGP INFO
############################################################################
ShowBgpInstances = {
"instance": {
"test1": {
"num_vrfs": 0,
"instance_id": 1,
"placed_grp": "bgp2_1",
"bgp_id": 333
},
"default": {
"num_vrfs": 2,
"instance_id": 3,
"address_families": [
"ipv4 unicast",
"vpnv4 unicast",
"ipv6 unicast",
"vpnv6 unicast"
],
"placed_grp": "bgp4_1",
"bgp_id": 100
},
"test": {
"num_vrfs": 0,
"instance_id": 0,
"placed_grp": "v4_routing",
"bgp_id": 333
},
"test2": {
"num_vrfs": 0,
"instance_id": 2,
"placed_grp": "bgp3_1",
"bgp_id": 333}}}
ShowPlacementProgramAll = {
'program':
{'rcp_fs':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'central-services',
'jid': '1168',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'bgp':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v4-routing',
'jid': '1018',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'ospf':
{'instance':
{'1':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v4-routing',
'jid': '1018',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'statsd_manager_g':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'netmgmt',
'jid': '1141',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'pim':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'mcast-routing',
'jid': '1158',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}},
'ipv6_local':
{'instance':
{'default':
{'active': '0/0/CPU0',
'active_state': 'RUNNING',
'group': 'v6-routing',
'jid': '1156',
'standby': 'NONE',
'standby_state': 'NOT_SPAWNED'}}}}
}
ShowBgpInstanceSessionGroupConfiguration = {
"default": {
"peer_session": {
"SG": {
"remote_as": 333,
"fall_over_bfd": True,
"password_text": "094F471A1A0A464058",
"holdtime": 30,
"transport_connection_mode": "active-only",
"ebgp_multihop_max_hop": 254,
"local_replace_as": True,
"ps_minimum_holdtime": 3,
"keepalive_interval": 10,
"shutdown": True,
"local_dual_as": True,
"local_no_prepend": True,
"ebgp_multihop_enable": True,
"suppress_four_byte_as_capability": True,
"local_as_as_no": 200,
"description": "SG_group",
"update_source": 'loopback0',
"disable_connected_check": True
}
}
}
}
ShowBgpInstanceAfGroupConfiguration = {
"instance": {
"default": {
"pp_name": {
"af_group": {
"address_family": "ipv4 unicast",
"default_originate": True,
"default_originate_route_map": "allpass",
"maximum_prefix_max_prefix_no": 429,
"maximum_prefix_threshold": 75,
"maximum_prefix_restart": 35,
"next_hop_self": True,
"route_map_name_in": "allpass",
"route_map_name_out": "allpass",
"route_reflector_client": True,
"send_community": "both",
"send_comm_ebgp": True,
"send_ext_comm_ebgp": True,
"soo": "100:1",
"soft_reconfiguration": "inbound always",
"allowas_in_as_number": 10,
"allowas_in": True,
"as_override": True
}
}
}
}
}
# =====================
# Process Detail Output
# =====================
# 'all all all'
ProcessAllOutput = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all all all process detail
BGP instance 0: 'default'
=========================
BGP Process Information:
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.4.1.1 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
Address family: VPNv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: VPNv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 0
RIB has not converged: version 0
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Maximum supported label-stack depth:
For IPv4 Nexthop: 0
For IPv6 Nexthop: 0
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Number Mem Used
Remote Prefixes: 0 0
Remote Paths: 0 0
Remote Path-elems: 0 0
Remote RDs: 0 0
Local Prefixes: 0 0
Local Paths: 0 0
Local RDs: 2 160
Total Prefixes: 0 0
Total Paths: 0 0
Total Path-elems: 0 0
Imported Paths: 0 0
Total RDs: 2 160
Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
IGP notification: IGPs notified
RIB has converged: version 4
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.881 0 2 18
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.881 1 2 6
Total triggers: 5
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.885 2 2 4
Aug 18 12:00:08.885 2 2 18
Aug 18 12:00:08.884 2 2 18
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 2
Table version synced to RIB: 2
Table version acked by RIB: 2
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 2
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Total triggers: 0
Import Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 0 2 19
Total triggers: 3
RIB Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 12:00:08.882 1 2 6
Total triggers: 4
Update Thread Aug 18 12:00:11.883 2 2 3
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.886 2 2 19
Aug 18 12:00:08.882 1 2 4
Aug 18 11:55:08.888 1 2 3
Aug 18 11:55:08.883 1 2 9
Total triggers: 6
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# 'all vrf all ipv4 unicast'
ProcessIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv4 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
IGP notification: IGPs notified
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.887 3 3 18
Aug 18 12:00:08.885 0 3 18
Total triggers: 3
RIB Thread Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.885 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.887 3 3 8
Aug 18 12:00:08.887 3 3 18
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# 'all vrf all ipv6 unicast'
ProcessIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast process detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP Process Information: VRF VRF1
BGP Route Distinguisher: 200:1
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF1 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
VRF: VRF2
---------
BGP Process Information: VRF VRF2
BGP Route Distinguisher: 200:2
BGP is operating in STANDALONE mode
Autonomous System number format: ASPLAIN
Autonomous System: 100
Router ID: 10.229.11.11 (manually configured)
Default Cluster ID: 10.4.1.1
Active Cluster IDs: 10.4.1.1
Fast external fallover enabled
Platform RLIMIT max: 2281701376 bytes
Maximum limit for BMP buffer size: 435 MB
Default value for BMP buffer size: 326 MB
Current limit for BMP buffer size: 326 MB
Current utilization of BMP buffer limit: 0 B
Neighbor logging is enabled
Enforce first AS enabled
iBGP to IGP redistribution enabled
Default local preference: 100
Default keepalive: 60
Non-stop routing is enabled
Update delay: 120
Generic scan interval: 60
BGP Speaker process: 3, Node: node0_RSP1_CPU0
Restart count: 32
Total Nbrs Estab/Cfg
Default VRFs: 1 0/3
Non-Default VRFs: 2 0/4
This VRF: 0/2
Sent Received
Updates: 0 0
Notifications: 0 0
Number Memory Used
Attributes: 0 0
AS Paths: 0 0
Communities: 0 0
Large Communities: 0 0
Extended communities: 0 0
PMSI Tunnel attr: 0 0
RIBRNH Tunnel attr: 0 0
PPMP attr: 0 0
Tunnel Encap attr: 0 0
PE distinguisher labels: 0 0
Route Reflector Entries: 0 0
Nexthop Entries: 27 10800
Alloc Free
Pool 200: 0 0
Pool 300: 1 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5000: 0 0
Pool 20000: 0 0
Message logging pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 500: 0 0
Pool 2200: 0 0
Pool 4500: 0 0
BMP pool summary:
Alloc Free
Pool 100: 0 0
Pool 200: 0 0
Pool 300: 0 0
Pool 400: 0 0
Pool 500: 0 0
Pool 600: 0 0
Pool 700: 0 0
Pool 800: 0 0
Pool 900: 0 0
Pool 1200: 0 0
Pool 2200: 0 0
Pool 3300: 0 0
Pool 4000: 0 0
Pool 4500: 0 0
Pool 5500: 0 0
Pool 6500: 0 0
Pool 7500: 0 0
Pool 8500: 0 0
Pool 10000: 0 0
Pool 20000: 0 0
VRF VRF2 Address family: IPv6 Unicast
Dampening is not enabled
Client reflection is not enabled in global config
Dynamic MED is Disabled
Dynamic MED interval : 10 minutes
Dynamic MED Timer : Not Running
Dynamic MED Periodic Timer : Not Running
Scan interval: 60
Total prefixes scanned: 0
Prefixes scanned per segment: 100000
Number of scan segments: 1
Nexthop resolution minimum prefix-length: 0 (not configured)
Main Table Version: 3
Table version synced to RIB: 3
Table version acked by RIB: 3
RIB has converged: version 2
RIB table prefix-limit reached ? [No], version 0
Permanent Network Unconfigured
State: Normal mode.
BGP Table Version: 3
Attribute download: Disabled
Label retention timer value 5 mins
Soft Reconfig Entries: 0
Table bit-field size : 1 Chunk element size : 3
Last 8 Triggers Ver Tbl Ver Trig TID
Label Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 4
Total triggers: 3
Import Thread Aug 18 12:00:11.883 3 3 3
Aug 18 12:00:08.890 3 3 19
Aug 18 12:00:08.882 0 3 19
Total triggers: 3
RIB Thread Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.883 1 3 8
Aug 18 12:00:08.882 1 3 6
Total triggers: 3
Update Thread Aug 18 12:00:11.883 3 3 8
Aug 18 12:00:08.890 3 3 8
Aug 18 12:00:08.890 3 3 19
Total triggers: 3
Allocated Freed
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
Number Mem Used
Prefixes: 0 0
Paths: 0 0
Path-elems: 0 0
BMP Prefixes: 0 0
BMP Paths: 0 0
'''
# =======================
# Neighbors Detail Output
# =======================
# 'all all all'
NeighborsAllOutput = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all all all neighbors detail
BGP instance 0: 'default'
=========================
BGP neighbor is 10.16.2.2
Remote AS 100, local AS 100, internal link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No route to multi-hop neighbor)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: VPNv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 2097152
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
For Address Family: VPNv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
AIGP is enabled
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Send Multicast Attributes
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.16.2.2, Foreign port: 0
Last reset 00:00:00
'''
# 'all vrf all ipv4 unicast'
NeighborsIpv4Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv4 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 10.1.5.5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 1048576
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.1.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 10.186.5.5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv4 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Inbound soft reconfiguration allowed (override route-refresh)
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 495
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: 0.0.0.0, Local port: 0, IF Handle: 0x00000000
Foreign host: 10.186.5.5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
# 'all vrf all ipv6 unicast'
NeighborsIpv6Output = '''\
RP/0/RSP1/CPU0:PE1#show bgp instance all vrf all ipv6 unicast neighbors detail
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP neighbor is 2001:db8:1:5::5, vrf VRF1
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
VRF: VRF2
---------
BGP neighbor is 2001:db8:20:1:5::5, vrf VRF2
Remote AS 200, local AS 100, external link
Remote router ID 0.0.0.0
Speaker ID 3
BGP state = Idle (No best local address found)
NSR State: None
Last read 00:00:00, Last read before reset 00:00:00
Hold time is 180, keepalive interval is 60 seconds
Configured hold time: 180, keepalive: 60, min acceptable hold time: 3
Last write 00:00:00, attempted 0, written 0
Second last write 00:00:00, attempted 0, written 0
Last write before reset 00:00:00, attempted 0, written 0
Second last write before reset 00:00:00, attempted 0, written 0
Last write pulse rcvd not set last full not set pulse count 0
Last write pulse rcvd before reset 00:00:00
Socket not armed for io, not armed for read, not armed for write
Last write thread event before reset 00:00:00, second last 00:00:00
Last KA expiry before reset 00:00:00, second last 00:00:00
Last KA error before reset 00:00:00, KA not sent 00:00:00
Last KA start before reset 00:00:00, second last 00:00:00
Precedence: internet
Non-stop routing is enabled
Entered Neighbor NSR TCP mode:
TCP Initial Sync : ---
TCP Initial Sync Phase Two : ---
TCP Initial Sync Done : ---
Enforcing first AS is enabled
Multi-protocol capability not received
Message stats:
InQ depth: 0, OutQ depth: 0
Last_Sent Sent Last_Rcvd Rcvd
Open: --- 0 --- 0
Notification: --- 0 --- 0
Update: --- 0 --- 0
Keepalive: --- 0 --- 0
Route_Refresh: --- 0 --- 0
Total: 0 0
Minimum time between advertisement runs is 0 secs
Inbound message logging enabled, 3 messages buffered
Outbound message logging enabled, 3 messages buffered
For Address Family: IPv6 Unicast
BGP neighbor version 0
Update group: 3.1 Filter-group: 3.0 No Refresh request being processed
Route refresh request: received 0, sent 0
Policy for incoming advertisements is all-pass
Policy for outgoing advertisements is all-pass
0 accepted prefixes, 0 are bestpaths
Exact no. of prefixes denied : 0.
Cumulative no. of prefixes denied: 0.
Prefix advertised 0, suppressed 0, withdrawn 0
Maximum prefixes allowed 524288
Threshold for warning message 75%, restart interval 0 min
An EoR was not received during read-only mode
Last ack version 1, Last synced ack version 0
Outstanding version objects: current 0, max 0
Additional-paths operation: None
Advertise routes with local-label via Unicast SAFI
Connections established 0; dropped 0
Local host: ::, Local port: 0, IF Handle: 0x00000000
Foreign host: 2001:db8:20:1:5::5, Foreign port: 0
Last reset 00:00:00
External BGP neighbor not directly connected.
'''
############################################################################
# BGP TABLE
############################################################################
# =============
# AllAll Output
# =============
# 'all all all'
InstanceAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 10.1.1.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 300:1
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 40 prefixes, 55 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 32
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 2001:db8:cdc9:121::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:1:5::5 2219 0 200 33299 51178 47751 {27016} e
* 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 2001:db8:cdc9:121::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:144::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:169::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:190::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*> 2001:db8:cdc9:1b9::/64 2001:db8:20:1:5::5
2219 0 200 33299 51178 47751 {27016} e
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 300:1
*>i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:169::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:190::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 25 prefixes, 35 paths
'''
# 'all vrf all ipv4 unicast'
InstanceIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:1
VRF ID: 0x60000001
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF1)
*> 10.1.1.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.1.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 20 paths
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF2)
*> 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.3.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.4.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*> 10.1.5.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
*>i10.205.1.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.3.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.4.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.205.5.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
*>i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.2.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.3.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.4.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
*>i10.169.5.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Processed 15 prefixes, 15 paths
'''
# 'all vrf all ipv6 unicast'
InstanceIpv6Output = '''\
'''
############################################################################
# BGP ROUTES PER PEER
############################################################################
# ==============
# Summary Output
# ==============
# 'all all all'
SummaryAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.16.2.2 0 100 11875 11874 47 0 0 1w1d 10
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 32
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 32 32 32 32 32 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.16.2.2 0 100 11875 11874 32 0 0 1w1d 5
'''
# 'all vrf all ipv4'
SummaryIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:1
VRF ID: 0x60000001
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
10.1.5.5 0 200 11858 11864 47 0 0 1w1d 5
'''
# 'all vrf all ipv6'
SummaryIpv6Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 300:1
VRF ID: 0x50000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000010 RD version: 47
BGP main routing table version 47
BGP NSR Initial initsync version 5 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP is operating in STANDALONE mode.
Process RcvTblVer bRIB/RIB LabelVer ImportVer SendTblVer StandbyVer
Speaker 47 47 47 47 47 0
Neighbor Spk AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down St/PfxRcd
2001:db8:20:1:5::5
0 200 11858 11864 47 0 0 1w1d 5
'''
# ========================
# Advertised Routes Output
# ========================
# 'all all all'
AdvertisedAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
Network Next Hop From AS Path
Route Distinguisher: 200:1
10.1.1.0/24 10.4.1.1 10.186.5.5 200 33299 51178 47751 {27016}e
10.1.2.0/24 10.4.1.1 10.186.5.5 200 33299 51178 47751 {27016}e
Processed 2 prefixes, 2 paths
Address Family: VPNv6 Unicast
-----------------------------
Network Next Hop From AS Path
Route Distinguisher: 200:1
2001:db8:cdc9:121::/64 10.4.1.1 2001:db8:20:1:5::5
200 33299 51178 47751 {27017}e
2001:db8:cdc9:144::/64 10.4.1.1 2001:db8:20:1:5::5
200 33299 51178 47751 {27016}e
Processed 2 prefixes, 2 paths
'''
# ======================
# Received Routes Output
# ======================
# 'all all all'
ReceivedAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:1b9::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
* i2001:db8:a69:4c9::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 10 prefixes, 10 paths
'''
# 'all vrf all ipv4 unicast'
ReceivedIpv4Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF1
---------
BGP VRF VRF1, state: Active
BGP Route Distinguisher: 200:2
VRF ID: 0x60000002
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:1 (default for vrf VRF2)
* 10.1.1.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.1.2.0/24 10.186.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
# 'all vrf all ipv6 unicast'
ReceivedIpv6Output = '''\
BGP instance 0: 'default'
=========================
VRF: VRF2
---------
BGP VRF VRF2, state: Active
BGP Route Distinguisher: 200:3
VRF ID: 0x50000006
BGP router identifier 10.229.11.11, local AS number 100
Non-stop routing is enabled
BGP table state: Active
Table ID: 0xe0000011 RD version: 63
BGP main routing table version 63
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 200:2 (default for vrf VRF1)
* 10.34.1.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
* 10.34.2.0/24 10.196.5.5 2219 0 200 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
'''
# =============
# Routes Output
# =============
# 'all all all'
RoutesAllOutput = '''\
BGP instance 0: 'default'
=========================
Address Family: VPNv4 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i10.169.1.0/24 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i10.9.2.0/24 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 2 prefixes, 2 paths
Address Family: VPNv6 Unicast
-----------------------------
BGP router identifier 10.4.1.1, local AS number 100
BGP generic scan interval 60 secs
Non-stop routing is enabled
BGP table state: Active
Table ID: 0x0 RD version: 0
BGP main routing table version 43
BGP NSR Initial initsync version 11 (Reached)
BGP NSR/ISSU Sync-Group versions 0/0
BGP scan interval 60 secs
Status codes: s suppressed, d damped, h history, * valid, > best
i - internal, r RIB-failure, S stale, N Nexthop-discard
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 300:1
* i2001:db8:31b9:121::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
* i2001:db8:31b9:144::/64 10.64.4.4 2219 100 0 300 33299 51178 47751 {27016} e
Route Distinguisher: 400:1
* i2001:db8:a69:484::/64 10.64.4.4 2219 100 0 400 33299 51178 47751 {27016} e
Processed 3 prefixes, 3 paths
'''
# ==============
# BGP Ops Output
# ==============
BgpInfo = {
'instance':
{'default':
{'bgp_id': 100,
'peer_policy':
{'af_group':
{'allowas_in': True,
'allowas_in_as_number': 10,
'as_override': True,
'default_originate': True,
'default_originate_route_map': 'allpass',
'maximum_prefix_max_prefix_no': 429,
'maximum_prefix_restart': 35,
'maximum_prefix_threshold': 75,
'next_hop_self': True,
'route_map_name_in': 'allpass',
'route_map_name_out': 'allpass',
'route_reflector_client': True,
'send_community': 'both',
'soft_reconfiguration': 'inbound '
'always',
'soo': '100:1'}},
'peer_session':
{'SG':
{'description': 'SG_group',
'disable_connected_check': True,
'ebgp_multihop_enable': True,
'ebgp_multihop_max_hop': 254,
'fall_over_bfd': True,
'holdtime': 30,
'keepalive_interval': 10,
'local_as_as_no': 200,
'local_dual_as': True,
'local_no_prepend': True,
'local_replace_as': True,
'password_text': '094F471A1A0A464058',
'remote_as': 333,
'shutdown': True,
'suppress_four_byte_as_capability': True,
'transport_connection_mode': 'active-only',
'update_source': 'loopback0'}},
'protocol_state': 'RUNNING',
'vrf':
{'default':
{'neighbor':
{'10.16.2.2':
{'bgp_neighbor_counters':
{'messages':
{'received':
{'notifications': 0,
'updates': 0},
'sent':
{'notifications': 0,
'updates': 0}}},
'bgp_session_transport':
{'transport':
{'foreign_host': '0',
'foreign_port': '10.16.2.2',
'local_host': 'Loopback0',
'local_port': '0'}},
'holdtime': 180,
'remote_as': 100,
'session_state': 'idle'}},
'router_id': '10.4.1.1'}}},
'test':
{'bgp_id': 333},
'test1':
{'bgp_id': 333},
'test2':
{'bgp_id': 333}}}
BgpTable = {
'instance':
{'default':
{'vrf':
{'VRF1':
{'address_family':
{'vpnv4 unicast':
{'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1':
{'default_vrf': 'vrf1',
'prefixes':
{'10.1.1.0/24':
{'index':
{1:
{'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'}}},
'VRF2': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'}}},
'default': {'address_family': {'vpnv4 unicast': {'bgp_table_version': 47,
'local_as': 100},
'vpnv4 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.1.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'},
2: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv4 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'10.1.1.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.2.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.3.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.4.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.1.5.0/24': {'index': {1: {'metric': '2219',
'next_hop': '10.186.5.5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv4 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.169.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'},
'vpnv4 unicast RD 400:1': {'default_vrf': 'none',
'prefixes': {'10.205.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.3.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.4.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'10.205.5.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '400:1'},
'vpnv6 unicast': {'bgp_table_version': 32,
'local_as': 100},
'vpnv6 unicast RD 200:1': {'default_vrf': 'vrf1',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:1'},
'vpnv6 unicast RD 200:2': {'default_vrf': 'vrf2',
'prefixes': {'2001:db8:cdc9:144::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:169::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:190::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:1b9::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'metric': '2219',
'next_hop': '2001:db8:20:1:5::5',
'origin_codes': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>',
'weight': '0'}}},
'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'}}}},
'route_distinguisher': '200:2'},
'vpnv6 unicast RD 300:1': {'default_vrf': 'none',
'prefixes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:169::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:190::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*>i',
'weight': '0'},
2: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'route_distinguisher': '300:1'}}}}}}}
BgpRoutesPerPeer = {
'instance': {'default': {'vrf': {'VRF1': {'neighbor': {'10.1.5.5': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '200:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'VRF2': {'neighbor': {'2001:db8:20:1:5::5': {'address_family': {'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11858,
'msg_sent': 11864,
'output_queue': 0,
'route_distinguisher': '300:1',
'state_pfxrcd': '5',
'tbl_ver': 47,
'up_down': '1w1d'}},
'remote_as': 200}}},
'default': {'neighbor': {'10.16.2.2': {'address_family': {'vpnv4 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '10',
'tbl_ver': 47,
'up_down': '1w1d'},
'vpnv4 unicast RD 200:1': {'advertised': {'10.1.1.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'10.1.2.0/24': {'index': {1: {'froms': '10.186.5.5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}}}},
'vpnv4 unicast RD 300:1': {'received_routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.169.1.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv4 unicast RD 400:1': {'received_routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'10.9.2.0/24': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast': {'input_queue': 0,
'msg_rcvd': 11875,
'msg_sent': 11874,
'output_queue': 0,
'state_pfxrcd': '5',
'tbl_ver': 32,
'up_down': '1w1d'},
'vpnv6 unicast RD 200:1': {'advertised': {'2001:db8:cdc9:144::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27016}'}}},
'2001:db8:cdc9:121::/64': {'index': {1: {'froms': '2001:db8:20:1:5::5',
'next_hop': '10.4.1.1',
'origin_code': 'e',
'path': '200 '
'33299 '
'51178 '
'47751 '
'{27017}'}}}}},
'vpnv6 unicast RD 300:1': {'received_routes': {'2001:db8:31b9:1b9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:31b9:144::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:31b9:121::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '300 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}},
'vpnv6 unicast RD 400:1': {'received_routes': {'2001:db8:a69:4c9::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}},
'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}},
'routes': {'2001:db8:a69:484::/64': {'index': {1: {'locprf': '100',
'metric': '2219',
'next_hop': '10.64.4.4',
'origin_codes': 'e',
'path': '400 '
'33299 '
'51178 '
'47751 '
'{27016}',
'status_codes': '*i',
'weight': '0'}}}}}},
'remote_as': 100}}}}}}}
# ============
# GET OPER RPC
# ============
class etree_holder():
def __init__(self):
self.data_ele = ET.fromstring('''
<data>
<bgp xmlns="http://openconfig.net/yang/bgp">
<global>
<config>
<as>100</as>
<router-id>10.4.1.1</router-id>
</config>
<state>
<as>100</as>
<router-id>10.4.1.1</router-id>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<total-paths>0</total-paths>
<total-prefixes>0</total-prefixes>
</state>
</afi-safi>
</afi-safis>
</global>
<neighbors>
<neighbor>
<neighbor-address>10.16.2.2</neighbor-address>
<config>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
</config>
<state>
<neighbor-address>10.16.2.2</neighbor-address>
<peer-as>100</peer-as>
<queues>
<input>0</input>
<output>0</output>
</queues>
<session-state>IDLE</session-state>
<messages>
<sent>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</sent>
<received>
<NOTIFICATION>0</NOTIFICATION>
<UPDATE>0</UPDATE>
</received>
</messages>
</state>
<transport>
<config>
<local-address>Loopback0</local-address>
</config>
<state>
<local-address>Loopback0</local-address>
<local-port>0</local-port>
<remote-address>10.16.2.2</remote-address>
<remote-port>0</remote-port>
</state>
</transport>
<afi-safis>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv4-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
<afi-safi>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<config>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
</config>
<state>
<afi-safi-name xmlns:idx="http://openconfig.net/yang/bgp-types">idx:l3vpn-ipv6-unicast</afi-safi-name>
<enabled>true</enabled>
<active>false</active>
<prefixes>
<received>0</received>
<sent>0</sent>
</prefixes>
</state>
</afi-safi>
</afi-safis>
<timers>
<state>
<negotiated-hold-time>180</negotiated-hold-time>
</state>
</timers>
<graceful-restart>
<state>
<peer-restart-time>120</peer-restart-time>
</state>
</graceful-restart>
</neighbor>
</neighbors>
</bgp>
</data>
''')
yang_output = etree_holder()
|
import PIL.Image, PIL.ImageDraw
from roomEditor import RoomEditor, ObjectHorizontal, ObjectVertical, ObjectWarp
class RenderedMap:
WALL_UP = 0x01
WALL_DOWN = 0x02
WALL_LEFT = 0x04
WALL_RIGHT = 0x08
def __init__(self, floor_object, overworld=False):
self.objects = {}
self.overworld = overworld
for y in range(8):
for x in range(10):
self.objects[(x, y)] = floor_object
def addWalls(self, flags):
for x in range(0, 10):
if flags & RenderedMap.WALL_UP:
self.placeObject(x, 0, 0x21)
if flags & RenderedMap.WALL_DOWN:
self.placeObject(x, 7, 0x22)
for y in range(0, 8):
if flags & RenderedMap.WALL_LEFT:
self.placeObject(0, y, 0x23)
if flags & RenderedMap.WALL_RIGHT:
self.placeObject(9, y, 0x24)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_UP:
self.placeObject(0, 0, 0x25)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_UP:
self.placeObject(9, 0, 0x26)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_DOWN:
self.placeObject(0, 7, 0x27)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_DOWN:
self.placeObject(9, 7, 0x28)
def placeObject(self, x, y, type_id):
if self.overworld:
if type_id == 0xF5:
if self.getObject(x, y) in (0x28, 0x83, 0x90):
self.placeObject(x, y, 0x29)
else:
self.placeObject(x, y, 0x25)
if self.getObject(x + 1, y) in (0x27, 0x82, 0x90):
self.placeObject(x + 1, y, 0x2A)
else:
self.placeObject(x + 1, y, 0x26)
if self.getObject(x, y + 1) in (0x26, 0x2A):
self.placeObject(x, y + 1, 0x2A)
elif self.getObject(x, y + 1) == 0x90:
self.placeObject(x, y + 1, 0x82)
else:
self.placeObject(x, y + 1, 0x27)
if self.getObject(x + 1, y + 1) in (0x25, 0x29):
self.placeObject(x + 1, y + 1, 0x29)
elif self.getObject(x + 1, y + 1) == 0x90:
self.placeObject(x + 1, y + 1, 0x83)
else:
self.placeObject(x + 1, y + 1, 0x28)
elif type_id == 0xF6: # two door house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x5A)
self.placeObject(x + 3, y, 0x5A)
self.placeObject(x + 4, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x59)
self.placeObject(x + 3, y + 1, 0x59)
self.placeObject(x + 4, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
self.placeObject(x + 3, y + 2, 0xE2)
self.placeObject(x + 4, y + 2, 0x5B)
elif type_id == 0xF7: # large house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
elif type_id == 0xF8: # catfish
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 1, y, 0xB7)
self.placeObject(x + 2, y, 0x66)
self.placeObject(x + 0, y + 1, 0x67)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0x68)
elif type_id == 0xF9: # palace door
self.placeObject(x + 0, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
self.placeObject(x + 2, y, 0xA6)
self.placeObject(x + 0, y + 1, 0xA7)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0xA8)
elif type_id == 0xFA: # stone pig head
self.placeObject(x + 0, y, 0xBB)
self.placeObject(x + 1, y, 0xBC)
self.placeObject(x + 0, y + 1, 0xBD)
self.placeObject(x + 1, y + 1, 0xBE)
elif type_id == 0xFB: # palmtree
if x == 15:
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 1, y + 2, 0xCE)
else:
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 0, y + 1, 0xCD)
self.placeObject(x + 1, y + 0, 0xB7)
self.placeObject(x + 1, y + 1, 0xCE)
elif type_id == 0xFC: # square "hill with hole" (seen near lvl4 entrance)
self.placeObject(x + 0, y, 0x2B)
self.placeObject(x + 1, y, 0x2C)
self.placeObject(x + 2, y, 0x2D)
self.placeObject(x + 0, y + 1, 0x37)
self.placeObject(x + 1, y + 1, 0xE8)
self.placeObject(x + 2, y + 1, 0x38)
self.placeObject(x - 1, y + 2, 0x0A)
self.placeObject(x + 0, y + 2, 0x33)
self.placeObject(x + 1, y + 2, 0x2F)
self.placeObject(x + 2, y + 2, 0x34)
self.placeObject(x + 0, y + 3, 0x0A)
self.placeObject(x + 1, y + 3, 0x0A)
self.placeObject(x + 2, y + 3, 0x0A)
self.placeObject(x + 3, y + 3, 0x0A)
elif type_id == 0xFD: # small house
self.placeObject(x + 0, y, 0x52)
self.placeObject(x + 1, y, 0x52)
self.placeObject(x + 2, y, 0x52)
self.placeObject(x + 0, y + 1, 0x5B)
self.placeObject(x + 1, y + 1, 0xE2)
self.placeObject(x + 2, y + 1, 0x5B)
else:
self.objects[(x & 15), (y & 15)] = type_id
else:
if type_id == 0xEC: # key door
self.placeObject(x, y, 0x2D)
self.placeObject(x + 1, y, 0x2E)
elif type_id == 0xED:
self.placeObject(x, y, 0x2F)
self.placeObject(x + 1, y, 0x30)
elif type_id == 0xEE:
self.placeObject(x, y, 0x31)
self.placeObject(x, y + 1, 0x32)
elif type_id == 0xEF:
self.placeObject(x, y, 0x33)
self.placeObject(x, y + 1, 0x34)
elif type_id == 0xF0: # closed door
self.placeObject(x, y, 0x35)
self.placeObject(x + 1, y, 0x36)
elif type_id == 0xF1:
self.placeObject(x, y, 0x37)
self.placeObject(x + 1, y, 0x38)
elif type_id == 0xF2:
self.placeObject(x, y, 0x39)
self.placeObject(x, y + 1, 0x3A)
elif type_id == 0xF3:
self.placeObject(x, y, 0x3B)
self.placeObject(x, y + 1, 0x3C)
elif type_id == 0xF4: # open door
self.placeObject(x, y, 0x43)
self.placeObject(x + 1, y, 0x44)
elif type_id == 0xF5:
self.placeObject(x, y, 0x8C)
self.placeObject(x + 1, y, 0x08)
elif type_id == 0xF6:
self.placeObject(x, y, 0x09)
self.placeObject(x, y + 1, 0x0A)
elif type_id == 0xF7:
self.placeObject(x, y, 0x0B)
self.placeObject(x, y + 1, 0x0C)
elif type_id == 0xF8: # boss door
self.placeObject(x, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
elif type_id == 0xF9: # stairs door
self.placeObject(x, y, 0xAF)
self.placeObject(x + 1, y, 0xB0)
elif type_id == 0xFA: # flipwall
self.placeObject(x, y, 0xB1)
self.placeObject(x + 1, y, 0xB2)
elif type_id == 0xFB: # one way arrow
self.placeObject(x, y, 0x45)
self.placeObject(x + 1, y, 0x46)
elif type_id == 0xFC: # entrance
self.placeObject(x + 0, y, 0xB3)
self.placeObject(x + 1, y, 0xB4)
self.placeObject(x + 2, y, 0xB4)
self.placeObject(x + 3, y, 0xB5)
self.placeObject(x + 0, y + 1, 0xB6)
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 2, y + 1, 0xB8)
self.placeObject(x + 3, y + 1, 0xB9)
self.placeObject(x + 0, y + 2, 0xBA)
self.placeObject(x + 1, y + 2, 0xBB)
self.placeObject(x + 2, y + 2, 0xBC)
self.placeObject(x + 3, y + 2, 0xBD)
elif type_id == 0xFD: # entrance
self.placeObject(x, y, 0xC1)
self.placeObject(x + 1, y, 0xC2)
else:
self.objects[(x & 15), (y & 15)] = type_id
def getObject(self, x, y):
return self.objects.get(((x & 15), (y & 15)), None)
class MapExport:
def __init__(self, rom):
self.__rom = rom
self.__tiles = {
0x0C: self.getTiles(0x0C),
0x0D: self.getTiles(0x0D),
0x0F: self.getTiles(0x0F),
0x12: self.getTiles(0x12),
}
self.__room_map_info = {}
f = open("test.html", "wt")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n), (x * 20 * 8, y * 16 * 8))
result.save("overworld.png")
f.write("<img src='overworld.png'><br><br>")
self.exportMetaTiles(f, "metatiles_main.png", 0x0F, 0, lambda n: n >= 32 and (n < 0x6C or n >= 0x70))
for n in (0x1A, 0x1C, 0x1E, 0x20, 0x22, 0x24, 0x26, 0x28, 0x2A, 0x2C, 0x2E, 0x30, 0x32, 0x34, 0x36, 0x38, 0x3A, 0x3C, 0x3E):
self.exportMetaTiles(f, "metatiles_%02x.png" % (n), n, 0, lambda n: n < 32)
for n in range(2, 17):
self.exportMetaTiles(f, "metatiles_anim_%02x.png" % (n), 0x0F, n, lambda n: n >= 0x6C and n < 0x70)
for n in (0,1,2,3,4,5,6,7, 10, 11):
addr = 0x0220 + n * 8 * 8
result = PIL.Image.new("L", (8 * 20 * 8, 8 * 16 * 8))
for y in range(8):
for x in range(8):
room = rom.banks[0x14][addr] + 0x100
if n > 5:
room += 0x100
if n == 11:
room += 0x100
addr += 1
if (room & 0xFF) == 0 and (n != 11 or x != 1 or y != 3): # ignore room nr 0, except on a very specific spot in the color dungeon.
continue
self.__room_map_info[room] = (x, y, n)
result.paste(self.exportRoom(room), (x * 20 * 8, y * 16 * 8))
result.save("dungeon_%d.png" % (n))
f.write("<img src='dungeon_%d.png'><br><br>" % (n))
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
if n + 0x100 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x100), (x * 20 * 8, y * 16 * 8))
result.save("caves1.png")
f.write("<img src='caves1.png'><br><br>")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x0FF):
if n + 0x200 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x200), (x * 20 * 8, y * 16 * 8))
result.save("caves2.png")
f.write("<img src='caves2.png'>")
f.close()
def exportMetaTiles(self, f, name, main_set, animation_set, condition_func):
condition = lambda n: condition_func(n) and (n < 0x80 or n >= 0xF0)
metatile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
metatile_info = self.__rom.banks[0x1A][metatile_info_offset:metatile_info_offset + 0x100 * 4]
result = PIL.Image.new("L", (16 * 16, 16 * 16))
sub_tileset_offset = main_set * 0x10
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
addr = (0x000, 0x000, 0x2B0, 0x2C0, 0x2D0, 0x2E0, 0x2F0, 0x2D0, 0x300, 0x310, 0x320, 0x2A0, 0x330, 0x350, 0x360, 0x340, 0x370)[animation_set]
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
for x in range(16):
for y in range(16):
obj = x + y * 16
if condition(metatile_info[obj*4+0]):
result.paste(tilemap[metatile_info[obj*4+0]], (x*16+0, y*16+0))
if condition(metatile_info[obj*4+1]):
result.paste(tilemap[metatile_info[obj*4+1]], (x*16+8, y*16+0))
if condition(metatile_info[obj*4+2]):
result.paste(tilemap[metatile_info[obj*4+2]], (x*16+0, y*16+8))
if condition(metatile_info[obj*4+3]):
result.paste(tilemap[metatile_info[obj*4+3]], (x*16+8, y*16+8))
result.save(name)
f.write("%s<br><img src='%s'><br><br>" % (name, name))
def exportRoom(self, room_nr):
re = RoomEditor(self.__rom, room_nr)
if room_nr < 0x100:
tile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
tile_info = self.__rom.banks[0x1A][tile_info_offset:tile_info_offset + 0x100 * 4]
else:
tile_info_offset = self.__rom.banks[0x08].find(b'\x7F\x7F\x7F\x7F\x7E\x7E\x7E\x7E')
tile_info = self.__rom.banks[0x08][tile_info_offset:tile_info_offset+0x100*4]
if room_nr >= 0x100:
rendered_map = RenderedMap(re.floor_object & 0x0F)
else:
rendered_map = RenderedMap(re.floor_object, True)
def objHSize(type_id):
if type_id == 0xF5:
return 2
return 1
def objVSize(type_id):
if type_id == 0xF5:
return 2
return 1
if room_nr >= 0x100:
if re.floor_object & 0xF0 == 0x00:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x10:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x20:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x30:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x40:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x50:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x60:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x70:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x80:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP)
for obj in re.objects:
if isinstance(obj, ObjectWarp):
pass
elif isinstance(obj, ObjectHorizontal):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x + n * objHSize(obj.type_id), obj.y, obj.type_id)
elif isinstance(obj, ObjectVertical):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x, obj.y + n * objVSize(obj.type_id), obj.type_id)
else:
rendered_map.placeObject(obj.x, obj.y, obj.type_id)
tiles = [0] * 20 * 16
for y in range(8):
for x in range(10):
obj = rendered_map.objects[(x, y)]
tiles[x*2 + y*2*20] = tile_info[obj*4]
tiles[x*2+1 + y*2*20] = tile_info[obj*4+1]
tiles[x*2 + (y*2+1)*20] = tile_info[obj*4+2]
tiles[x*2+1 + (y*2+1)*20] = tile_info[obj*4+3]
if room_nr < 0x100:
sub_tileset_offset = self.__rom.banks[0x20][0x2E73 + (room_nr & 0x0F) // 2 + ((room_nr >> 5) * 8)] << 4
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
else:
# TODO: The whole indoor tileset loading seems complex...
tileset_nr = self.__rom.banks[0x20][0x2eB3 + room_nr - 0x100]
tilemap = [None] * 0x100
tilemap[0x20:0x80] = self.__tiles[0x0D][0x000:0x060]
if tileset_nr != 0xFF:
tilemap[0x00:0x10] = self.__tiles[0x0D][0x100 + tileset_nr * 0x10:0x110 + tileset_nr * 0x10]
tilemap[0x10:0x20] = self.__tiles[0x0D][0x210:0x220]
tilemap[0xF0:0x100] = self.__tiles[0x12][0x380:0x390]
if re.animation_id == 2:
addr = 0x2B0
elif re.animation_id == 3:
addr = 0x2C0
elif re.animation_id == 4:
addr = 0x2D0
elif re.animation_id == 5:
addr = 0x2E0
elif re.animation_id == 6:
addr = 0x2F0
elif re.animation_id == 7:
addr = 0x2D0
elif re.animation_id == 8:
addr = 0x300
elif re.animation_id == 9:
addr = 0x310
elif re.animation_id == 10:
addr = 0x320
elif re.animation_id == 11:
addr = 0x2A0
elif re.animation_id == 12:
addr = 0x330
elif re.animation_id == 13:
addr = 0x350
elif re.animation_id == 14:
addr = 0x360
elif re.animation_id == 15:
addr = 0x340
elif re.animation_id == 16:
addr = 0x370
else:
print(hex(room_nr), re.animation_id)
addr = 0x000
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
assert len(tilemap) == 0x100
result = PIL.Image.new('L', (8 * 20, 8 * 16))
draw = PIL.ImageDraw.Draw(result)
for y in range(16):
for x in range(20):
tile = tilemap[tiles[x+y*20]]
if tile is not None:
result.paste(tile, (x * 8, y * 8))
warp_pos = []
for y in range(8):
for x in range(10):
if rendered_map.objects[(x, y)] in (0xE1, 0xE2, 0xE3, 0xBA, 0xD5, 0xA8, 0xBE, 0xCB):
warp_pos.append((x, y))
for x, y, type_id in re.entities:
draw.rectangle([(x * 16, y * 16), (x * 16 + 15, y * 16 + 15)], outline=0)
draw.text((x * 16 + 3, y * 16 + 2), "%02X" % (type_id))
y = 8
for obj in re.objects:
if isinstance(obj, ObjectWarp):
draw.text((8, y), "W%d:%02x:%03x:%d,%d" % (obj.warp_type, obj.map_nr, obj.room, obj.target_x, obj.target_y))
y += 16
return result
def getTiles(self, bank_nr):
bank = self.__rom.banks[bank_nr]
buffer = bytearray(b'\x00' * 16 * 16)
result = []
for n in range(0, len(bank), 16):
for y in range(8):
a = bank[n + y * 2]
b = bank[n + y * 2 + 1]
for x in range(8):
v = 0x3F
if not a & (0x80 >> x):
v |= 0x40
if not b & (0x80 >> x):
v |= 0x80
buffer[x+y*8] = v
result.append(PIL.Image.frombytes('L', (8, 8), bytes(buffer)))
return result
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .layer_function_generator import generate_layer_fn
__activations__ = [
'sigmoid',
'logsigmoid',
'exp',
'tanh',
'tanh_shrink',
'softshrink',
'sqrt',
'abs',
'ceil',
'floor',
'cos',
'sin',
'round',
'reciprocal',
'square',
'softplus',
'softsign',
'brelu',
'leaky_relu',
'soft_relu',
'elu',
'relu6',
'pow',
'stanh',
'hard_sigmoid',
'swish',
]
__all__ = [
'mean',
'mul',
'scale',
'sigmoid_cross_entropy_with_logits',
'elementwise_add',
'elementwise_div',
'elementwise_sub',
'elementwise_mul',
'elementwise_max',
'elementwise_min',
'elementwise_pow',
'clip',
'clip_by_norm',
'logical_and',
'logical_or',
'logical_xor',
'logical_not',
'uniform_random_batch_size_like',
'gaussian_random',
'gaussian_random_batch_size_like',
'scatter',
'sum',
'slice',
'shape',
'maxout',
] + __activations__
for _OP in set(__all__):
globals()[_OP] = generate_layer_fn(_OP)
__all__ += ["uniform_random"]
_uniform_random_ = generate_layer_fn('uniform_random')
def uniform_random(shape, dtype=None, min=None, max=None, seed=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _uniform_random_(**kwargs)
uniform_random.__doc__ = _uniform_random_.__doc__ + """
Examples:
>>> result = fluid.layers.uniform_random(shape=[32, 784])
"""
__all__ += ['hard_shrink']
_hard_shrink_ = generate_layer_fn('hard_shrink')
def hard_shrink(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _hard_shrink_(**kwargs)
hard_shrink.__doc__ = _hard_shrink_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[784])
>>> result = fluid.layers.hard_shrink(x=data, threshold=0.3)
"""
__all__ += ['cumsum']
_cum_sum_ = generate_layer_fn('cumsum')
def cumsum(x, axis=None, exclusive=None, reverse=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
return _cum_sum_(**kwargs)
cumsum.__doc__ = _cum_sum_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[32, 784])
>>> result = fluid.layers.cumsum(data, axis=0)
"""
__all__ += ['thresholded_relu']
_thresholded_relu_ = generate_layer_fn('thresholded_relu')
def thresholded_relu(x, threshold=None):
kwargs = dict()
for name in locals():
val = locals()[name]
if val is not None:
kwargs[name] = val
_thresholded_relu_(**kwargs)
thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """
Examples:
>>> data = fluid.layers.data(name="input", shape=[1])
>>> result = fluid.layers.thresholded_relu(data, threshold=0.4)
"""
|
#!/usr/bin/python
import os
import sys
import time
from configparser import ConfigParser
from plexapi.server import PlexServer
import re
config_object = ConfigParser()
config_object.read("/config/config.ini")
server = config_object["PLEXSERVER"]
options = config_object["OPTIONS"]
baseurl = (server["PLEX_URL"])
token = (server["TOKEN"])
plex = PlexServer(baseurl, token)
plexlibrary = (server["FILMSLIBRARY"])
films = plex.library.section(plexlibrary)
ppath = (server["PLEXPATH"])
mpath = (server["MOUNTEDPATH"])
xdays = int(options["CHECK_FILES_HISTORY"])
xsize = 100000000
now = time.time()
for i in films.search():
dir = os.path.dirname(re.sub(ppath, mpath, i.media[0].parts[0].file))
for root, dirs, files in os.walk(dir):
for name in files:
filename = os.path.join(root, name)
if os.stat(filename).st_mtime > now - (xdays * 86400):
if os.stat(filename).st_size > xsize:
print('checking', i.title)
command = "ffmpeg -v error -i \"" + filename + "\" -c:v rawvideo -map 0:1 -f null - 2>&1"
output = os.popen(command).read()
print(output)
if output.lower().find('error') == -1:
print(i.title, 'is OK!')
else:
print('Oh Bugger!', filename, 'is completely buggered')
|
import logging
from blspy import PrivateKey
from mint.cmds.init_funcs import check_keys
from mint.util.keychain import Keychain
from pathlib import Path
from typing import Any, Dict, List, Optional, cast
# Commands that are handled by the KeychainServer
keychain_commands = [
"add_private_key",
"check_keys",
"delete_all_keys",
"delete_key_by_fingerprint",
"get_all_private_keys",
"get_first_private_key",
"get_key_for_fingerprint",
]
log = logging.getLogger(__name__)
KEYCHAIN_ERR_KEYERROR = "key error"
KEYCHAIN_ERR_LOCKED = "keyring is locked"
KEYCHAIN_ERR_NO_KEYS = "no keys present"
KEYCHAIN_ERR_MALFORMED_REQUEST = "malformed request"
class KeychainServer:
"""
Implements a remote keychain service for clients to perform key operations on
"""
def __init__(self):
self._default_keychain = Keychain()
self._alt_keychains = {}
def get_keychain_for_request(self, request: Dict[str, Any]):
"""
Keychain instances can have user and service strings associated with them.
The keychain backends ultimately point to the same data stores, but the user
and service strings are used to partition those data stores. We attempt to
maintain a mapping of user/service pairs to their corresponding Keychain.
"""
keychain = None
user = request.get("kc_user", self._default_keychain.user)
service = request.get("kc_service", self._default_keychain.service)
if user == self._default_keychain.user and service == self._default_keychain.service:
keychain = self._default_keychain
else:
key = (user or "unnamed") + (service or "")
if key in self._alt_keychains:
keychain = self._alt_keychains[key]
else:
keychain = Keychain(user=user, service=service)
self._alt_keychains[key] = keychain
return keychain
async def handle_command(self, command, data) -> Dict[str, Any]:
if command == "add_private_key":
return await self.add_private_key(cast(Dict[str, Any], data))
elif command == "check_keys":
return await self.check_keys(cast(Dict[str, Any], data))
elif command == "delete_all_keys":
return await self.delete_all_keys(cast(Dict[str, Any], data))
elif command == "delete_key_by_fingerprint":
return await self.delete_key_by_fingerprint(cast(Dict[str, Any], data))
elif command == "get_all_private_keys":
return await self.get_all_private_keys(cast(Dict[str, Any], data))
elif command == "get_first_private_key":
return await self.get_first_private_key(cast(Dict[str, Any], data))
elif command == "get_key_for_fingerprint":
return await self.get_key_for_fingerprint(cast(Dict[str, Any], data))
return {}
async def add_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
mnemonic = request.get("mnemonic", None)
passphrase = request.get("passphrase", None)
if mnemonic is None or passphrase is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing mnemonic and/or passphrase"},
}
try:
self.get_keychain_for_request(request).add_private_key(mnemonic, passphrase)
except KeyError as e:
return {
"success": False,
"error": KEYCHAIN_ERR_KEYERROR,
"error_details": {"message": f"The word '{e.args[0]}' is incorrect.'", "word": e.args[0]},
}
return {"success": True}
async def check_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
root_path = request.get("root_path", None)
if root_path is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing root_path"},
}
check_keys(Path(root_path))
return {"success": True}
async def delete_all_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
self.get_keychain_for_request(request).delete_all_keys()
return {"success": True}
async def delete_key_by_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
fingerprint = request.get("fingerprint", None)
if fingerprint is None:
return {
"success": False,
"error": KEYCHAIN_ERR_MALFORMED_REQUEST,
"error_details": {"message": "missing fingerprint"},
}
self.get_keychain_for_request(request).delete_key_by_fingerprint(fingerprint)
return {"success": True}
async def get_all_private_keys(self, request: Dict[str, Any]) -> Dict[str, Any]:
all_keys: List[Dict[str, Any]] = []
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
for sk, entropy in private_keys:
all_keys.append({"pk": bytes(sk.get_g1()).hex(), "entropy": entropy.hex()})
return {"success": True, "private_keys": all_keys}
async def get_first_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]:
key: Dict[str, Any] = {}
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
sk_ent = self.get_keychain_for_request(request).get_first_private_key()
if sk_ent is None:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
pk_str = bytes(sk_ent[0].get_g1()).hex()
ent_str = sk_ent[1].hex()
key = {"pk": pk_str, "entropy": ent_str}
return {"success": True, "private_key": key}
async def get_key_for_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]:
if self.get_keychain_for_request(request).is_keyring_locked():
return {"success": False, "error": KEYCHAIN_ERR_LOCKED}
private_keys = self.get_keychain_for_request(request).get_all_private_keys()
if len(private_keys) == 0:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
fingerprint = request.get("fingerprint", None)
private_key: Optional[PrivateKey] = None
entropy: Optional[bytes] = None
if fingerprint is not None:
for sk, entropy in private_keys:
if sk.get_g1().get_fingerprint() == fingerprint:
private_key = sk
break
else:
private_key, entropy = private_keys[0]
if not private_key or not entropy:
return {"success": False, "error": KEYCHAIN_ERR_NO_KEYS}
else:
return {"success": True, "pk": bytes(private_key.get_g1()).hex(), "entropy": entropy.hex()}
|
#Importing packages
import shutil
import os
import csv
#Preprocessing first dataset
file = "./Pokemon/data/pokemon_types_names.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None) #Skip the header
pkm_dict1 = {rows[1]:rows[2] for rows in reader}
for pkm_name, pkm_type in pkm_dict1.items():
source = './Pokemon/pokemon-generation-one/{pkm_name}/'.format(pkm_name=pkm_name)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
for f in os.listdir(source):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source+f, dest)
#Preprocessing second dataset
file = "./Pokemon/data/pokemon_types.csv"
with open(file,'r') as f:
reader = csv.reader(f)
next(reader,None) #Skip the header
pkm_dict2 = {rows[0]:rows[1] for rows in reader}
for pkm_no, pkm_type in pkm_dict2.items():
source = './Pokemon/pokemon-images-dataset/{pkm_no}.png'.format(pkm_no=pkm_no)
dest = './Pokemon/dataset/{pkm_type}'.format(pkm_type=pkm_type)
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(source, dest)
|
# Copyright 2020-present Kensho Technologies, LLC.
"""Implementing client-side grpc interceptors"""
import functools
import json
import backoff
import grpc
import prometheus_client
CLIENTSIDE_METRICS_HISTO = prometheus_client.Histogram(
"clientside_grpc_endpoint",
"Response time histogram for grpc endpoints from the client-side",
labelnames=("client_name", "server_name", "service", "endpoint"),
)
CLIENTSIDE_ERROR_COUNTER = prometheus_client.Counter(
"clientside_grpc_endpoint_error",
"Clientside exception counts for grpc methods",
labelnames=("client_name", "server_name", "service", "endpoint", "exception"),
)
GRPC_RENDEZVOUS_ERROR = "_Rendezvous"
def get_service_and_method_from_url(method_url):
"""Extract service and method names from the method url string.
Returns strings that are applicable as prometheus metrics and/or labels.
Args:
method_url: string
Returns:
tuple(service_name, method_name)
"""
name_parts = method_url.split("/")
if len(name_parts) != 3 or name_parts[0] != "" or name_parts[1] == "" or name_parts[2] == "":
raise AssertionError("Invalid method name: {}".format(method_url))
return (name_parts[1].replace(".", "_"), name_parts[2].replace(".", "_"))
class GRPCClientGeneralInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
"""General GRPC client interceptor that intercepts all functions."""
def __init__(self, decorator_fn):
"""Initialize interceptor with a factory function producing decorators."""
super(GRPCClientGeneralInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
"""Interceptor implementation."""
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
"""Intercept unary-unary."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
"""Intercept stream-unary."""
return self._intercept_call(continuation, client_call_details, request_iterator)
def intercept_unary_stream(self, continuation, client_call_details, request):
"""Intercept unary-stream."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_stream(self, continuation, client_call_details, request_iterator):
"""Intercept stream-stream."""
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientUnaryOutputInterceptor(
grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor
):
"""GRPC interceptor that makes intercepts only unary-output grpcs."""
def __init__(self, decorator_fn):
"""Initialize interceptor with a factory function producing decorators."""
super(GRPCClientUnaryOutputInterceptor, self).__init__()
self._decorator_fn = decorator_fn
def _intercept_call(self, continuation, client_call_details, request_or_iterator):
"""Interceptor implementation"""
metadata = _get_metadata_map_from_client_details(client_call_details)
decorator = self._decorator_fn(client_call_details.method, metadata)
if not decorator:
handler = continuation
else:
handler = decorator(continuation)
return handler(client_call_details, request_or_iterator)
def intercept_unary_unary(self, continuation, client_call_details, request):
"""Intercept unary-unary."""
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details, request_iterator):
"""Intercept stream-unary."""
return self._intercept_call(continuation, client_call_details, request_iterator)
class GRPCClientMiddleware(object):
"""Base class for GRPC client-side middleware.
GRPCMiddleware implementations must provide a get_decorator method:
# def get_decorator(self, method_name, metadata)
Which takes a string method name, and dict of rpc leading metadata and
returns a decorator that can be applied to the underlying rpc method.
Additionally:
__init__ is guaranteed to be called before the server is started.
get_interceptors(self) will be called to retrieve all GRPC interceptors
necessary for the middleware. Users may extend this method to include
additional interceptors.
"""
def __init__(self, client_label, server_label, interceptor_class):
"""Initialize"""
super(GRPCClientMiddleware, self).__init__()
self._server_label = server_label
self._client_label = client_label
self._interceptor_class = interceptor_class
@property
def server_label(self):
"""Get server label."""
return self._server_label
@property
def client_label(self):
"""Get client label."""
return self._client_label
def get_interceptors(self):
"""Get a list of interceptors needed by the middleware."""
return [self._interceptor_class(self.get_decorator)]
class ClientSideMetricsMiddleware(GRPCClientMiddleware):
"""GRPC middleware that captures prometheus metrics."""
def __init__(self, client_label, server_label):
"""Initialize"""
super(ClientSideMetricsMiddleware, self).__init__(
client_label, server_label, GRPCClientGeneralInterceptor
)
class Timer(object):
"""Decorator that wraps a function in a prometheus histogram."""
def __init__(self, histogram):
"""Initializes with the histogram object."""
self._histogram = histogram
def __call__(self, fn):
"""Wrap a method with a histogram."""
@functools.wraps(fn)
def wrap(request, context):
"""Inner wrapper."""
with self._histogram.time():
return fn(request, context)
return wrap
def get_decorator(self, method_name, _):
"""Normalize metric name and return decorator that captures metrics."""
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Timer(
CLIENTSIDE_METRICS_HISTO.labels(
client_name=self.client_label,
server_name=self.server_label,
service=service_label,
endpoint=endpoint_label,
)
)
class ClientSideExceptionCountMiddleware(GRPCClientMiddleware):
"""GRPC middleware that captures prometheus metrics for unary outputs."""
def __init__(self, client_label, server_label):
"""Initialize"""
super(ClientSideExceptionCountMiddleware, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
class Counter(object):
"""Decorator that wraps a function in a exception counter."""
def __init__(self, counter, client_name, server_name, service, endpoint):
"""Initializes with the counter object."""
self._counter = counter
self._client_name = client_name
self._server_name = server_name
self._service = service
self._endpoint = endpoint
def __call__(self, fn):
"""Wrap a method with an exception counter."""
@functools.wraps(fn)
def wrap(request, context):
"""Inner wrapper."""
r = fn(request, context)
if r.exception():
# If we get a Rendezvous error, we want some more information about the type
# of error we are getting. For example, a GRPC timeout error will be labelled as
# exception "_Rendezvous: <StatusCode.DEADLINE_EXCEEDED: 4>". All errors can be
# found at https://grpc.github.io/grpc/python/grpc.html#grpc-status-code
if type(r.exception()).__name__ == GRPC_RENDEZVOUS_ERROR:
exception = GRPC_RENDEZVOUS_ERROR + ": " + repr(r.exception().code())
# No guarantees of status code for other errors--only report error type.
else:
exception = type(r.exception()).__name__
self._counter.labels(
client_name=self._client_name,
server_name=self._server_name,
service=self._service,
endpoint=self._endpoint,
exception=exception,
).inc()
return r
return wrap
def get_decorator(self, method_name, _):
"""Normalize method name and return decorator that captures exceptions"""
service_label, endpoint_label = get_service_and_method_from_url(method_name)
return self.Counter(
CLIENTSIDE_ERROR_COUNTER,
self.client_label,
self.server_label,
service_label,
endpoint_label,
)
class ClientExceptionTranslationMiddlewareUnaryOutput(GRPCClientMiddleware):
"""Translate client exception"""
def __init__(self, client_label, server_label, code_to_exception_class_func):
"""Initialize"""
super(ClientExceptionTranslationMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._code_to_exception_class_func = code_to_exception_class_func
class Translator(object):
"""Decorator that wraps a function in a exception translator"""
def __init__(self, code_to_exception_class_func):
"""Initializes with the counter object"""
self._code_to_exception_class_func = code_to_exception_class_func
def __call__(self, fn):
"""Wrap a method with an exception counter"""
@functools.wraps(fn)
def wrap(request, context):
"""Execute a function, if an exception is raised, change its type if necessary"""
try:
result = fn(request, context)
if result.code() is grpc.StatusCode.OK:
return result
else:
raise result
except grpc.RpcError as exc:
raise_exception_from_grpc_exception(self._code_to_exception_class_func, exc)
return wrap
def get_decorator(self, method_name, _):
"""Return exception translator decorator"""
return self.Translator(self._code_to_exception_class_func)
class ClientRetryingMiddlewareUnaryOutput(GRPCClientMiddleware):
"""Translate client exception"""
def __init__(self, client_label, server_label, exceptions_to_retry, max_retries):
"""Initialize"""
super(ClientRetryingMiddlewareUnaryOutput, self).__init__(
client_label, server_label, GRPCClientUnaryOutputInterceptor
)
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
class Retrier(object):
"""Decorator that wraps a function in a exception translator"""
def __init__(self, exceptions_to_retry, max_retries):
"""Initializes with the counter object"""
self._exceptions_to_retry = exceptions_to_retry
self._max_retries = max_retries
def __call__(self, fn):
"""Wrap a method with an exception counter"""
return backoff.on_exception(backoff.expo, self._exceptions_to_retry, self._max_retries)(
fn
)
def get_decorator(self, method_name, _):
"""Return exception translator decorator"""
return self.Retrier(self._exceptions_to_retry, self._max_retries)
def raise_exception_from_grpc_exception(code_to_exception_class_func, exc):
"""Raise exception from exc, translating with code_to_exception_class_func"""
code = None
details = "[]" # Details are expected to be jsondeserializable
if exc.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
raise TimeoutError()
elif exc.code() == grpc.StatusCode.UNIMPLEMENTED:
raise NotImplementedError()
elif exc.code() == grpc.StatusCode.UNAVAILABLE:
raise ConnectionRefusedError()
for key, value in exc.trailing_metadata():
if key == "error_code":
try:
code = int(value)
except (TypeError, ValueError):
pass
elif key == "error_details":
details = value
if code_to_exception_class_func:
exception_class = code_to_exception_class_func(code)
if exception_class:
exception_args = json.loads(details)
raise exception_class(*exception_args)
raise exc
def _get_metadata_map_from_client_details(client_call_details):
"""Get metadata key->value map from client_call_details"""
metadata = {metadatum[0]: metadatum[1] for metadatum in (client_call_details.metadata or [])}
return metadata
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import copy
import urllib
import subprocess
from flask import current_app
from string import ascii_letters
from flask import request, make_response
from flask_restful.reqparse import Argument
from flask_restful.reqparse import RequestParser
from contextlib import contextmanager
from manager_rest import manager_exceptions, config
from manager_rest.constants import REST_SERVICE_NAME
from manager_rest.storage.models_states import VisibilityState
try:
from cloudify_premium.ha import node_status
except ImportError:
node_status = {'initialized': False}
states_except_private = copy.deepcopy(VisibilityState.STATES)
states_except_private.remove('private')
VISIBILITY_EXCEPT_PRIVATE = states_except_private
@contextmanager
def skip_nested_marshalling():
request.__skip_marshalling = True
yield
delattr(request, '__skip_marshalling')
def get_json_and_verify_params(params=None):
params = params or []
if request.content_type != 'application/json':
raise manager_exceptions.UnsupportedContentTypeError(
'Content type must be application/json')
request_dict = request.json
is_params_dict = isinstance(params, dict)
def is_optional(param_name):
return is_params_dict and params[param_name].get('optional', False)
def check_type(param_name):
return is_params_dict and params[param_name].get('type', None)
for param in params:
if param not in request_dict:
if is_optional(param):
continue
raise manager_exceptions.BadParametersError(
'Missing {0} in json request body'.format(param))
param_type = check_type(param)
if param_type and not isinstance(request_dict[param], param_type):
raise manager_exceptions.BadParametersError(
'{0} parameter is expected to be of type {1} but is of type '
'{2}'.format(param,
param_type.__name__,
type(request_dict[param]).__name__))
return request_dict
def get_args_and_verify_arguments(arguments):
request_parser = RequestParser()
for argument in arguments:
argument.location = 'args'
request_parser.args.append(argument)
return request_parser.parse_args()
def verify_and_convert_bool(attribute_name, str_bool):
if isinstance(str_bool, bool):
return str_bool
if str_bool.lower() == 'true':
return True
if str_bool.lower() == 'false':
return False
raise manager_exceptions.BadParametersError(
'{0} must be <true/false>, got {1}'.format(attribute_name, str_bool))
def convert_to_int(value):
try:
return int(value)
except Exception:
raise manager_exceptions.BadParametersError(
'invalid parameter, should be int, got: {0}'.format(value))
def make_streaming_response(res_id, res_path, content_length, archive_type):
response = make_response()
response.headers['Content-Description'] = 'File Transfer'
response.headers['Cache-Control'] = 'no-cache'
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = \
'attachment; filename={0}.{1}'.format(res_id, archive_type)
response.headers['Content-Length'] = content_length
response.headers['X-Accel-Redirect'] = res_path
response.headers['X-Accel-Buffering'] = 'yes'
return response
def set_restart_task(delay=1):
current_app.logger.info('Restarting the rest service')
cmd = 'sleep {0}; sudo systemctl restart {1}' \
.format(delay, REST_SERVICE_NAME)
subprocess.Popen(cmd, shell=True)
def validate_inputs(input_dict):
for input_name, input_value in input_dict.iteritems():
prefix = 'The `{0}` argument'.format(input_name)
if not input_value:
raise manager_exceptions.BadParametersError(
'{0} is empty'.format(prefix)
)
if len(input_value) > 256:
raise manager_exceptions.BadParametersError(
'{0} is too long. Maximum allowed length is 256 '
'characters'.format(prefix)
)
# urllib.quote changes all chars except alphanumeric chars and _-.
quoted_value = urllib.quote(input_value, safe='')
if quoted_value != input_value:
raise manager_exceptions.BadParametersError(
'{0} contains illegal characters. Only letters, digits and the'
' characters "-", "." and "_" are allowed'.format(prefix)
)
if input_value[0] not in ascii_letters:
raise manager_exceptions.BadParametersError(
'{0} must begin with a letter'.format(prefix)
)
def validate_and_decode_password(password):
if not password:
raise manager_exceptions.BadParametersError('The password is empty')
if len(password) > 256:
raise manager_exceptions.BadParametersError(
'The password is too long. Maximum allowed length is 256 '
'characters'
)
if len(password) < 5:
raise manager_exceptions.BadParametersError(
'The password is too short. Minimum allowed length is 5 '
'characters'
)
return password
def is_clustered():
return node_status.get('initialized')
def verify_role(role_name, is_system_role=False):
"""Make sure that role name is present in the system.
:param role_name: Role name to validate against database content.
:param is_system_role: True if system_role, False if tenant_role
:raises: BadParametersError when role is not found in the system or is
not from the right type
"""
expected_role_type = 'system_role' if is_system_role else 'tenant_role'
# Get role by name
role = next(
(
r
for r in config.instance.authorization_roles
if r['name'] == role_name
),
None
)
# Role not found
if role is None:
valid_roles = [
r['name']
for r in config.instance.authorization_roles
if r['type'] in (expected_role_type, 'any')
]
raise manager_exceptions.BadParametersError(
'Invalid role: `{0}`. Valid {1} roles are: {2}'
.format(role_name, expected_role_type, valid_roles)
)
# Role type doesn't match
if role['type'] not in (expected_role_type, 'any'):
raise manager_exceptions.BadParametersError(
'Role `{0}` is a {1} and cannot be assigned as a {2}'
.format(role_name, role['type'], expected_role_type)
)
def request_use_all_tenants():
return verify_and_convert_bool('all_tenants',
request.args.get('_all_tenants', False))
def get_visibility_parameter(optional=False,
is_argument=False,
valid_values=VISIBILITY_EXCEPT_PRIVATE):
if is_argument:
args = get_args_and_verify_arguments(
[Argument('visibility', type=unicode, default=None)]
)
visibility = args.visibility
else:
request_dict = get_json_and_verify_params({
'visibility': {'optional': optional, 'type': unicode}
})
visibility = request_dict.get('visibility', None)
if visibility is not None and visibility not in valid_values:
raise manager_exceptions.BadParametersError(
"Invalid visibility: `{0}`. Valid visibility's values are: {1}"
.format(visibility, valid_values)
)
return visibility
|
"""Ensemble plan manually split by type moode/theme."""
import json
from dbispipeline.evaluators import FixedSplitEvaluator
from dbispipeline.evaluators import ModelCallbackWrapper
import numpy as np
from sklearn.pipeline import Pipeline
from mediaeval2021 import common
from mediaeval2021.dataloaders.melspectrograms import MelSpectPickleLoader
from mediaeval2021.models.ensemble import Ensemble
from mediaeval2021.models.wrapper import TorchWrapper
dataloader = MelSpectPickleLoader('data/mediaeval2020/melspect_1366.pickle')
label_splits = [
np.arange(0, 14, 1),
np.arange(14, 28, 1),
np.arange(28, 42, 1),
np.arange(42, 56, 1),
]
pipeline = Pipeline([
('model',
Ensemble(
base_estimator=TorchWrapper(
model_name='CNN',
dataloader=dataloader,
batch_size=64,
),
label_splits=label_splits,
epochs=100,
)),
])
evaluator = ModelCallbackWrapper(
FixedSplitEvaluator(**common.fixed_split_params()),
lambda model: common.store_prediction(model, dataloader),
)
result_handlers = [
lambda results: print(json.dumps(results, indent=4)),
]
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
from gdbclientutils import *
class TestGDBRemoteLoad(GDBRemoteTestBase):
def setUp(self):
super(TestGDBRemoteLoad, self).setUp()
self._initial_platform = lldb.DBG.GetSelectedPlatform()
def tearDown(self):
lldb.DBG.SetSelectedPlatform(self._initial_platform)
super(TestGDBRemoteLoad, self).tearDown()
def test_module_load_address(self):
"""Test that setting the load address of a module uses virtual addresses"""
target = self.createTarget("a.yaml")
process = self.connect(target)
module = target.GetModuleAtIndex(0)
self.assertTrue(module.IsValid())
self.assertTrue(target.SetModuleLoadAddress(module, 0).Success())
address = target.ResolveLoadAddress(0x2001)
self.assertTrue(address.IsValid())
self.assertEqual(".data", address.GetSection().GetName())
def test_ram_load(self):
"""Test loading an object file to a target's ram"""
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"M1000,4:c3c3c3c3",
"M1004,2:3232"
])
@skipIfXmlSupportMissing
def test_flash_load(self):
"""Test loading an object file to a target's flash memory"""
class Responder(MockGDBServerResponder):
def qSupported(self, client_supported):
return "PacketSize=3fff;QStartNoAckMode+;qXfer:memory-map:read+"
def qXferRead(self, obj, annex, offset, length):
if obj == "memory-map":
return (self.MEMORY_MAP[offset:offset + length],
offset + length < len(self.MEMORY_MAP))
return None, False
def other(self, packet):
if packet[0:11] == "vFlashErase":
return "OK"
if packet[0:11] == "vFlashWrite":
return "OK"
if packet == "vFlashDone":
return "OK"
return ""
MEMORY_MAP = """<?xml version="1.0"?>
<memory-map>
<memory type="ram" start="0x0" length="0x1000"/>
<memory type="flash" start="0x1000" length="0x1000">
<property name="blocksize">0x100</property>
</memory>
<memory type="ram" start="0x2000" length="0x1D400"/>
</memory-map>
"""
self.server.responder = Responder()
target = self.createTarget("a.yaml")
process = self.connect(target)
self.dbg.HandleCommand("target modules load -l -s0")
self.assertPacketLogContains([
"vFlashErase:1000,100",
"vFlashWrite:1000:\xc3\xc3\xc3\xc3",
"vFlashWrite:1004:\x32\x32",
"vFlashDone"
])
|
"""createbulk
This module illustrates how to connect to MySQL and load bulk data in python
using the mysql-connector-python library.
"""
import mysql.connector
from faker import Faker
# Create a connection to MySQL
db = mysql.connector.connect(option_files="../sql-user/my.ini")
cursor = db.cursor()
# Create Faker instance
fake = Faker()
# Define the query template and the parameters to submit with it
sql = """INSERT INTO
dataengineering.people (name, age, street, city, state, zip, lng, lat)
VALUES
(%(name)s, %(age)s, %(street)s, %(city)s, %(state)s, %(zip)s, %(lng)s, %(lat)s);
"""
params = [
{'name': fake.name(),
'age': fake.random_int(min=18, max=80, step=1),
'street': fake.street_address(),
'city': fake.city(),
'state': fake.state(),
'zip': fake.zipcode(),
'lng': fake.longitude(),
'lat': fake.latitude(),
}
for _ in range(1000)
]
# Execute queries
cursor.executemany(sql, params)
print("Row count: {0}".format(cursor.rowcount))
db.commit()
db.close()
|
# -*- coding: utf-8 -*-
import os
import pickle
from autobahn.twisted.util import sleep
from mdstudio.deferred.chainable import chainable
from mdstudio.component.session import ComponentSession
from mdstudio.runner import main
from mdstudio_workflow import Workflow
class LIEPredictionWorkflow(ComponentSession):
"""
This workflow will perform a binding affinity prediction for CYP 1A2 with
applicability domain analysis using the Linear Interaction Energy (LIE)
method as described in:
Capoferri L, Verkade-Vreeker MCA, Buitenhuis D, Commandeur JNM, Pastor M,
Vermeulen NPE, et al. (2015) "Linear Interaction Energy Based Prediction
of Cytochrome P450 1A2 Binding Affinities with Reliability Estimation."
PLoS ONE 10(11): e0142232. https://doi.org/10.1371/journal.pone.0142232
The workflow uses data from the pre-calibrated CYP1A2 model created using
the eTOX ALLIES Linear Interaction Energy pipeline (liemodel parameter).
Pre-calculated molecular dynamics trajectory LIE energy values are
available for bound and unbound ligand cases (bound_trajectory,
unbound_trajectory respectively)
"""
def authorize_request(self, uri, claims):
"""
Microservice specific authorization method.
Will always be called when the service first tries to register with the
broker. It returns True (= authorized) by default.
"""
return True
@chainable
def on_run(self):
# Ligand to make prediction for
ligand = 'O1[C@@H](CCC1=O)CCC'
ligand_format = 'smi'
liemodel = os.path.join(os.getcwd(), '1A2_model')
# CYP1A2 pre-calibrated model
modelpicklefile = os.path.join(liemodel, 'params.pkl')
modelfile = pickle.load(open(modelpicklefile))
unbound_trajectory = os.path.join(os.getcwd(), "unbound_trajectory.ene")
bound_trajectory = [os.path.join(os.getcwd(), "bound_trajectory.ene")]
decompose_files = [os.path.join(os.getcwd(), "decompose_dataframe.ene")]
# Build Workflow
wf = Workflow(project_dir='./lie_prediction')
wf.task_runner = self
# STAGE 5. PYLIE FILTERING, AD ANALYSIS AND BINDING-AFFINITY PREDICTION
# Collect Gromacs bound and unbound MD energy trajectories in a dataframe
t18 = wf.add_task('Create mdframe',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.collect_energy_trajectories')
t18.set_input(unbound_trajectory=unbound_trajectory,
bound_trajectory=bound_trajectory,
lie_vdw_header="Ligand-Ligenv-vdw",
lie_ele_header="Ligand-Ligenv-ele")
# Determine stable regions in MDFrame and filter
t19 = wf.add_task('Detect stable regions',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.filter_stable_trajectory')
t19.set_input(do_plot=True,
minlength=45,
workdir='/tmp/mdstudio/lie_pylie')
wf.connect_task(t18.nid, t19.nid, 'mdframe')
# Extract average LIE energy values from the trajectory
t20 = wf.add_task('LIE averages',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.calculate_lie_average')
wf.connect_task(t19.nid, t20.nid, filtered_mdframe='mdframe')
# Calculate dG using pre-calibrated model parameters
t21 = wf.add_task('Calc dG',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.liedeltag')
t21.set_input(alpha_beta_gamma=modelfile['LIE']['params'])
wf.connect_task(t20.nid, t21.nid, 'averaged', averaged='dataframe')
# Applicability domain: 1. Tanimoto similarity with training set
t22 = wf.add_task('AD1 tanimoto simmilarity',
task_type='WampTask',
uri='mdgroup.mdstudio_structures.endpoint.chemical_similarity')
t22.set_input(test_set=[ligand], mol_format=ligand_format, reference_set=modelfile['AD']['Tanimoto']['smi'],
ci_cutoff=modelfile['AD']['Tanimoto']['Furthest'])
wf.connect_task(t18.nid, t22.nid)
# Applicability domain: 2. residue decomposition
t23 = wf.add_task('AD2 residue decomposition',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_residue_decomp',
inline_files=False)
t23.set_input(model_pkl=modelpicklefile, decompose_files=decompose_files)
wf.connect_task(t18.nid, t23.nid)
# Applicability domain: 3. deltaG energy range
t24 = wf.add_task('AD3 dene yrange',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene_yrange')
t24.set_input(ymin=modelfile['AD']['Yrange']['min'],
ymax=modelfile['AD']['Yrange']['max'])
wf.connect_task(t21.nid, t24.nid, 'liedeltag_file', liedeltag_file='dataframe')
# Applicability domain: 4. deltaG energy distribution
t25 = wf.add_task('AD4 dene distribution',
task_type='WampTask',
uri='mdgroup.lie_pylie.endpoint.adan_dene')
t25.set_input(model_pkl=modelpicklefile,
center=list(modelfile['AD']['Dene']['Xmean']),
ci_cutoff=modelfile['AD']['Dene']['Maxdist'])
wf.connect_task(t21.nid, t25.nid, 'liedeltag_file', liedeltag_file='dataframe')
wf.run()
while wf.is_running:
yield sleep(1)
if __name__ == "__main__":
main(LIEPredictionWorkflow, auto_reconnect=False, daily_log=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. py:currentmodule:: test_direct_metric_tensor
:synopsis: Tests for the module :py:mod:`direct_metric_tensor`
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests for the module :py:mod:`direct_metric_tensor`.
"""
###############################################################################
# Copyright 2017 Hendrix Demers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import unittest
# Third party modules.
# Local modules.
# Project modules.
import electrondiffraction.crystallography.direct_metric_tensor as direct_metric_tensor
# Globals and constants variables.
class Test_direct_metric_tensor(unittest.TestCase):
"""
TestCase class for the module `${moduleName}`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
if __name__ == '__main__': # pragma: no cover
import nose
nose.runmodule()
|
import setuptools
setuptools.setup(name="librespot",
version="0.0.1",
description="Open Source Spotify Client",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="kokarare1212",
url="https://github.com/kokarare1212/librespot-python",
license="Apache-2.0",
packages=setuptools.find_packages("."),
install_requires=[
"defusedxml", "protobuf", "pycryptodomex", "pyogg",
"requests", "websocket-client", "zeroconf"
],
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: Apache Software License",
"Topic :: Multimedia :: Sound/Audio"
])
|
# BSD 3-Clause License
# Copyright (c) 2017, Federico T.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Sparse inverse covariance selection over time via ADMM.
More information can be found in the paper linked at:
https://arxiv.org/abs/1703.01958
"""
from __future__ import division
import warnings
import numpy as np
from scipy import linalg
from six.moves import map, range, zip
from sklearn.covariance import empirical_covariance, log_likelihood
from sklearn.utils.extmath import squared_norm
from sklearn.utils.validation import check_X_y
from regain.covariance.graphical_lasso_ import GraphicalLasso, logl
from regain.norm import l1_od_norm
from regain.prox import prox_logdet, soft_thresholding
from regain.update_rules import update_rho
from regain.utils import convergence, error_norm_time
from regain.validation import check_norm_prox
def loss(S, K, n_samples=None):
"""Loss function for time-varying graphical lasso."""
if n_samples is None:
n_samples = np.ones(S.shape[0])
return sum(
-ni * logl(emp_cov, precision)
for emp_cov, precision, ni in zip(S, K, n_samples))
def objective(n_samples, S, K, Z_0, Z_1, Z_2, alpha, beta, psi):
"""Objective function for time-varying graphical lasso."""
obj = loss(S, K, n_samples=n_samples)
if isinstance(alpha, np.ndarray):
obj += sum(l1_od_norm(a * z) for a, z in zip(alpha, Z_0))
else:
obj += alpha * sum(map(l1_od_norm, Z_0))
if isinstance(beta, np.ndarray):
obj += sum(b[0][0] * m for b, m in zip(beta, map(psi, Z_2 - Z_1)))
else:
obj += beta * sum(map(psi, Z_2 - Z_1))
return obj
def init_precision(emp_cov, mode='empirical'):
if isinstance(mode, np.ndarray):
return mode.copy()
if mode == 'empirical':
n_times, _, n_features = emp_cov.shape
covariance_ = emp_cov.copy()
covariance_ *= 0.95
K = np.empty_like(emp_cov)
for i, (c, e) in enumerate(zip(covariance_, emp_cov)):
c.flat[::n_features + 1] = e.flat[::n_features + 1]
K[i] = linalg.pinvh(c)
elif mode == 'zeros':
K = np.zeros_like(emp_cov)
return K
def time_graphical_lasso(
emp_cov, alpha=0.01, rho=1, beta=1, max_iter=100, n_samples=None,
verbose=False, psi='laplacian', tol=1e-4, rtol=1e-4,
return_history=False, return_n_iter=True, mode='admm',
compute_objective=True, stop_at=None, stop_when=1e-4,
update_rho_options=None, init='empirical'):
"""Time-varying graphical lasso solver.
Solves the following problem via ADMM:
min sum_{i=1}^T -n_i log_likelihood(S_i, K_i) + alpha*||K_i||_{od,1}
+ beta sum_{i=2}^T Psi(K_i - K_{i-1})
where S_i = (1/n_i) X_i^T \times X_i is the empirical covariance of data
matrix X (training observations by features).
Parameters
----------
emp_cov : ndarray, shape (n_features, n_features)
Empirical covariance of data.
alpha, beta : float, optional
Regularisation parameter.
rho : float, optional
Augmented Lagrangian parameter.
max_iter : int, optional
Maximum number of iterations.
n_samples : ndarray
Number of samples available for each time point.
tol : float, optional
Absolute tolerance for convergence.
rtol : float, optional
Relative tolerance for convergence.
return_history : bool, optional
Return the history of computed values.
return_n_iter : bool, optional
Return the number of iteration before convergence.
verbose : bool, default False
Print info at each iteration.
update_rho_options : dict, optional
Arguments for the rho update.
See regain.update_rules.update_rho function for more information.
compute_objective : bool, default True
Choose to compute the objective value.
init : {'empirical', 'zero', ndarray}
Choose how to initialize the precision matrix, with the inverse
empirical covariance, zero matrix or precomputed.
Returns
-------
K : numpy.array, 3-dimensional (T x d x d)
Solution to the problem for each time t=1...T .
history : list
If return_history, then also a structure that contains the
objective value, the primal and dual residual norms, and tolerances
for the primal and dual residual norms at each iteration.
"""
psi, prox_psi, psi_node_penalty = check_norm_prox(psi)
Z_0 = init_precision(emp_cov, mode=init)
Z_1 = Z_0.copy()[:-1] # np.zeros_like(emp_cov)[:-1]
Z_2 = Z_0.copy()[1:] # np.zeros_like(emp_cov)[1:]
U_0 = np.zeros_like(Z_0)
U_1 = np.zeros_like(Z_1)
U_2 = np.zeros_like(Z_2)
Z_0_old = np.zeros_like(Z_0)
Z_1_old = np.zeros_like(Z_1)
Z_2_old = np.zeros_like(Z_2)
# divisor for consensus variables, accounting for two less matrices
divisor = np.full(emp_cov.shape[0], 3, dtype=float)
divisor[0] -= 1
divisor[-1] -= 1
if n_samples is None:
n_samples = np.ones(emp_cov.shape[0])
checks = [
convergence(
obj=objective(
n_samples, emp_cov, Z_0, Z_0, Z_1, Z_2, alpha, beta, psi))
]
for iteration_ in range(max_iter):
# update K
A = Z_0 - U_0
A[:-1] += Z_1 - U_1
A[1:] += Z_2 - U_2
A /= divisor[:, None, None]
# soft_thresholding_ = partial(soft_thresholding, lamda=alpha / rho)
# K = np.array(map(soft_thresholding_, A))
A += A.transpose(0, 2, 1)
A /= 2.
A *= -rho * divisor[:, None, None] / n_samples[:, None, None]
A += emp_cov
K = np.array(
[
prox_logdet(a, lamda=ni / (rho * div))
for a, div, ni in zip(A, divisor, n_samples)
])
# update Z_0
A = K + U_0
A += A.transpose(0, 2, 1)
A /= 2.
Z_0 = soft_thresholding(A, lamda=alpha / rho)
# other Zs
A_1 = K[:-1] + U_1
A_2 = K[1:] + U_2
if not psi_node_penalty:
prox_e = prox_psi(A_2 - A_1, lamda=2. * beta / rho)
Z_1 = .5 * (A_1 + A_2 - prox_e)
Z_2 = .5 * (A_1 + A_2 + prox_e)
else:
Z_1, Z_2 = prox_psi(
np.concatenate((A_1, A_2), axis=1), lamda=.5 * beta / rho,
rho=rho, tol=tol, rtol=rtol, max_iter=max_iter)
# update residuals
U_0 += K - Z_0
U_1 += K[:-1] - Z_1
U_2 += K[1:] - Z_2
# diagnostics, reporting, termination checks
rnorm = np.sqrt(
squared_norm(K - Z_0) + squared_norm(K[:-1] - Z_1) +
squared_norm(K[1:] - Z_2))
snorm = rho * np.sqrt(
squared_norm(Z_0 - Z_0_old) + squared_norm(Z_1 - Z_1_old) +
squared_norm(Z_2 - Z_2_old))
obj = objective(
n_samples, emp_cov, Z_0, K, Z_1, Z_2, alpha, beta, psi) \
if compute_objective else np.nan
# if np.isinf(obj):
# Z_0 = Z_0_old
# break
check = convergence(
obj=obj,
rnorm=rnorm,
snorm=snorm,
e_pri=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * max(
np.sqrt(
squared_norm(Z_0) + squared_norm(Z_1) + squared_norm(Z_2)),
np.sqrt(
squared_norm(K) + squared_norm(K[:-1]) +
squared_norm(K[1:]))),
e_dual=np.sqrt(K.size + 2 * Z_1.size) * tol + rtol * rho *
np.sqrt(squared_norm(U_0) + squared_norm(U_1) + squared_norm(U_2)),
# precision=Z_0.copy()
)
Z_0_old = Z_0.copy()
Z_1_old = Z_1.copy()
Z_2_old = Z_2.copy()
if verbose:
print(
"obj: %.4f, rnorm: %.4f, snorm: %.4f,"
"eps_pri: %.4f, eps_dual: %.4f" % check[:5])
checks.append(check)
if stop_at is not None:
if abs(check.obj - stop_at) / abs(stop_at) < stop_when:
break
if check.rnorm <= check.e_pri and check.snorm <= check.e_dual:
break
rho_new = update_rho(
rho, rnorm, snorm, iteration=iteration_,
**(update_rho_options or {}))
# scaled dual variables should be also rescaled
U_0 *= rho / rho_new
U_1 *= rho / rho_new
U_2 *= rho / rho_new
rho = rho_new
# assert is_pos_def(Z_0)
else:
warnings.warn("Objective did not converge.")
covariance_ = np.array([linalg.pinvh(x) for x in Z_0])
return_list = [Z_0, covariance_]
if return_history:
return_list.append(checks)
if return_n_iter:
return_list.append(iteration_ + 1)
return return_list
class TimeGraphicalLasso(GraphicalLasso):
"""Sparse inverse covariance estimation with an l1-penalized estimator.
Parameters
----------
alpha : positive float, default 0.01
Regularization parameter for precision matrix. The higher alpha,
the more regularization, the sparser the inverse covariance.
beta : positive float, default 1
Regularization parameter to constrain precision matrices in time.
The higher beta, the more regularization,
and consecutive precision matrices in time are more similar.
psi : {'laplacian', 'l1', 'l2', 'linf', 'node'}, default 'laplacian'
Type of norm to enforce for consecutive precision matrices in time.
rho : positive float, default 1
Augmented Lagrangian parameter.
over_relax : positive float, deafult 1
Over-relaxation parameter (typically between 1.0 and 1.8).
tol : positive float, default 1e-4
Absolute tolerance to declare convergence.
rtol : positive float, default 1e-4
Relative tolerance to declare convergence.
max_iter : integer, default 100
The maximum number of iterations.
verbose : boolean, default False
If verbose is True, the objective function, rnorm and snorm are
printed at each iteration.
assume_centered : boolean, default False
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False, data are centered before computation.
time_on_axis : {'first', 'last'}, default 'first'
If data have time as the last dimension, set this to 'last'.
Useful to use scikit-learn functions as train_test_split.
update_rho_options : dict, default None
Options for the update of rho. See `update_rho` function for details.
compute_objective : boolean, default True
Choose if compute the objective function during iterations
(only useful if `verbose=True`).
init : {'empirical', 'zeros', ndarray}, default 'empirical'
How to initialise the inverse covariance matrix. Default is take
the empirical covariance and inverting it.
Attributes
----------
covariance_ : array-like, shape (n_times, n_features, n_features)
Estimated covariance matrix
precision_ : array-like, shape (n_times, n_features, n_features)
Estimated precision matrix.
n_iter_ : int
Number of iterations run.
"""
def __init__(
self, alpha=0.01, beta=1., mode='admm', rho=1., tol=1e-4,
rtol=1e-4, psi='laplacian', max_iter=100, verbose=False,
assume_centered=False, return_history=False,
update_rho_options=None, compute_objective=True, stop_at=None,
stop_when=1e-4, suppress_warn_list=False, init='empirical'):
super(TimeGraphicalLasso, self).__init__(
alpha=alpha, rho=rho, tol=tol, rtol=rtol, max_iter=max_iter,
verbose=verbose, assume_centered=assume_centered, mode=mode,
update_rho_options=update_rho_options,
compute_objective=compute_objective, init=init)
self.beta = beta
self.psi = psi
self.return_history = return_history
self.stop_at = stop_at
self.stop_when = stop_when
self.suppress_warn_list = suppress_warn_list
def get_observed_precision(self):
"""Getter for the observed precision matrix.
Returns
-------
precision_ : array-like,
The precision matrix associated to the current covariance object.
"""
return self.get_precision()
def _fit(self, emp_cov, n_samples):
"""Fit the TimeGraphicalLasso model to X.
Parameters
----------
emp_cov : ndarray, shape (n_time, n_features, n_features)
Empirical covariance of data.
"""
out = time_graphical_lasso(
emp_cov, alpha=self.alpha, rho=self.rho, beta=self.beta,
mode=self.mode, n_samples=n_samples, tol=self.tol, rtol=self.rtol,
psi=self.psi, max_iter=self.max_iter, verbose=self.verbose,
return_n_iter=True, return_history=self.return_history,
update_rho_options=self.update_rho_options,
compute_objective=self.compute_objective, stop_at=self.stop_at,
stop_when=self.stop_when, init=self.init)
if self.return_history:
self.precision_, self.covariance_, self.history_, self.n_iter_ = \
out
else:
self.precision_, self.covariance_, self.n_iter_ = out
return self
def fit(self, X, y):
"""Fit the TimeGraphicalLasso model to X.
Parameters
----------
X : ndarray, shape = (n_samples * n_times, n_dimensions)
Data matrix.
y : ndarray, shape = (n_times,)
Indicate the temporal belonging of each sample.
"""
# Covariance does not make sense for a single feature
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
n_dimensions = X.shape[1]
self.classes_, n_samples = np.unique(y, return_counts=True)
n_times = self.classes_.size
# n_samples = np.array([x.shape[0] for x in X])
if self.assume_centered:
self.location_ = np.zeros((n_times, n_dimensions))
else:
self.location_ = np.array(
[X[y == cl].mean(0) for cl in self.classes_])
emp_cov = np.array(
[
empirical_covariance(
X[y == cl], assume_centered=self.assume_centered)
for cl in self.classes_
])
return self._fit(emp_cov, n_samples)
def score(self, X, y):
"""Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y : array-like, shape = (n_samples,)
Class of samples.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix.
"""
# Covariance does not make sense for a single feature
X, y = check_X_y(
X, y, accept_sparse=False, dtype=np.float64, order="C",
ensure_min_features=2, estimator=self)
# compute empirical covariance of the test set
test_cov = np.array(
[
empirical_covariance(
X[y == cl] - self.location_[i], assume_centered=True)
for i, cl in enumerate(self.classes_)
])
res = sum(
X[y == cl].shape[0] * log_likelihood(S, K) for S, K, cl in zip(
test_cov, self.get_observed_precision(), self.classes_))
return res
def error_norm(
self, comp_cov, norm='frobenius', scaling=True, squared=True):
"""Compute the Mean Squared Error between two covariance estimators.
(In the sense of the Frobenius norm).
Parameters
----------
comp_cov : array-like, shape = [n_features, n_features]
The covariance to compare with.
norm : str
The type of norm used to compute the error. Available error types:
- 'frobenius' (default): sqrt(tr(A^t.A))
- 'spectral': sqrt(max(eigenvalues(A^t.A))
where A is the error ``(comp_cov - self.covariance_)``.
scaling : bool
If True (default), the squared error norm is divided by n_features.
If False, the squared error norm is not rescaled.
squared : bool
Whether to compute the squared error norm or the error norm.
If True (default), the squared error norm is returned.
If False, the error norm is returned.
Returns
-------
The Mean Squared Error (in the sense of the Frobenius norm) between
`self` and `comp_cov` covariance estimators.
"""
return error_norm_time(
self.covariance_, comp_cov, norm=norm, scaling=scaling,
squared=squared)
|
# -*- coding: utf-8 -*-
"""
Causal graph for the random utility model in Ben-Akiva et al. (2002).
References
----------
Ben-Akiva, Moshe, Joan Walker, Adriana T. Bernardino, Dinesh A. Gopinath,
Taka Morikawa, and Amalia Polydoropoulou. "Integration of choice and latent
variable models." Perpetual motion: Travel behaviour research opportunities and
application challenges (2002): 431-470.
"""
import graphviz
RUM_GRAPH = graphviz.Digraph("Random Utility Maximization")
# Add all nodes to the graph
# Use square nodes for observed variables and circular nodes for unoobserved
RUM_GRAPH.node("X", "Explanatory Variables", shape="box")
RUM_GRAPH.node("U", "Utility", shape="ellipse")
RUM_GRAPH.node("C", "Choice", shape="box")
# Create the graphical chain
RUM_GRAPH.edge("X", "U")
RUM_GRAPH.edge("U", "C")
|
import numpy as np
import pandas as pd
import pytest
from pytest import importorskip
from evalml.model_family import ModelFamily
from evalml.pipelines.components import ProphetRegressor
from evalml.problem_types import ProblemTypes
prophet = importorskip("prophet", reason="Skipping test because prophet not installed")
def test_model_family():
assert ProphetRegressor.model_family == ModelFamily.PROPHET
def test_cmdstanpy_backend():
m = prophet.Prophet(stan_backend="CMDSTANPY")
assert m.stan_backend.get_type() == "CMDSTANPY"
def test_problem_types():
assert set(ProphetRegressor.supported_problem_types) == {
ProblemTypes.TIME_SERIES_REGRESSION
}
def test_init_with_other_params():
clf = ProphetRegressor(
daily_seasonality=True,
mcmc_samples=5,
interval_width=0.8,
uncertainty_samples=0,
)
assert clf.parameters == {
"changepoint_prior_scale": 0.05,
"daily_seasonality": True,
"date_index": None,
"holidays_prior_scale": 10,
"interval_width": 0.8,
"mcmc_samples": 5,
"seasonality_mode": "additive",
"seasonality_prior_scale": 10,
"uncertainty_samples": 0,
"stan_backend": "CMDSTANPY",
}
def test_feature_importance(ts_data):
X, y = ts_data
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
clf.feature_importance == np.zeros(1)
def test_get_params(ts_data):
clf = ProphetRegressor()
assert clf.get_params() == {
"changepoint_prior_scale": 0.05,
"date_index": None,
"seasonality_prior_scale": 10,
"holidays_prior_scale": 10,
"seasonality_mode": "additive",
"stan_backend": "CMDSTANPY",
}
def test_fit_predict_ts_with_X_index(ts_data):
X, y = ts_data
assert isinstance(X.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_with_y_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
assert isinstance(y.index, pd.DatetimeIndex)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="ds")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X, y)
y_pred = clf.predict(X, y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_ts_no_X(ts_data):
y = pd.Series(
range(1, 32), name="dates", index=pd.date_range("2020-10-01", "2020-10-31")
)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(
X=pd.DataFrame(), y=y, date_column="ds"
)
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
clf = ProphetRegressor(uncertainty_samples=False, changepoint_prior_scale=2.0)
clf.fit(X=None, y=y)
y_pred = clf.predict(X=None, y=y)
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_date_col(ts_data):
X = pd.DataFrame(
{
"features": range(100),
"these_dates": pd.date_range("1/1/21", periods=100),
"more_dates": pd.date_range("7/4/1987", periods=100),
}
)
y = pd.Series(np.random.randint(1, 5, 100), name="y")
clf = ProphetRegressor(
date_index="these_dates", uncertainty_samples=False, changepoint_prior_scale=2.0
)
clf.fit(X, y)
y_pred = clf.predict(X)
p_clf = prophet.Prophet(uncertainty_samples=False, changepoint_prior_scale=2.0)
prophet_df = ProphetRegressor.build_prophet_df(X=X, y=y, date_column="these_dates")
p_clf.fit(prophet_df)
y_pred_p = p_clf.predict(prophet_df)["yhat"]
np.array_equal(y_pred_p.values, y_pred.values)
def test_fit_predict_no_date_col_or_index(ts_data):
X, y = ts_data
X = X.reset_index(drop=True)
y = y.reset_index(drop=True)
assert not isinstance(X.index, pd.DatetimeIndex)
assert not isinstance(y.index, pd.DatetimeIndex)
clf = ProphetRegressor()
with pytest.raises(
ValueError,
match="Prophet estimator requires input data X to have a datetime column",
):
clf.fit(X, y)
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify SConsignFile() when used with dbhash.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
try:
import dbm.bsd
except ImportError:
test.skip_test('No dbhash in this version of Python; skipping test.\n')
test.subdir('subdir')
test.write('build.py', r"""
import sys
contents = open(sys.argv[2], 'rb').read()
file = open(sys.argv[1], 'wb')
file.write(contents)
file.close()
sys.exit(0)
""")
#
test.write('SConstruct', """
import sys
import dbhash
SConsignFile('.sconsign', dbhash)
B = Builder(action = r'%(_python_)s build.py $TARGETS $SOURCES')
env = Environment(BUILDERS = { 'B' : B })
env.B(target = 'f1.out', source = 'f1.in')
env.B(target = 'f2.out', source = 'f2.in')
env.B(target = 'subdir/f3.out', source = 'subdir/f3.in')
env.B(target = 'subdir/f4.out', source = 'subdir/f4.in')
""" % locals())
test.write('f1.in', "f1.in\n")
test.write('f2.in', "f2.in\n")
test.write(['subdir', 'f3.in'], "subdir/f3.in\n")
test.write(['subdir', 'f4.in'], "subdir/f4.in\n")
test.run()
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.must_match('f1.out', "f1.in\n")
test.must_match('f2.out', "f2.in\n")
test.must_match(['subdir', 'f3.out'], "subdir/f3.in\n")
test.must_match(['subdir', 'f4.out'], "subdir/f4.in\n")
test.up_to_date(arguments = '.')
test.must_exist(test.workpath('.sconsign'))
test.must_not_exist(test.workpath('.sconsign.dblite'))
test.must_not_exist(test.workpath('subdir', '.sconsign'))
test.must_not_exist(test.workpath('subdir', '.sconsign.dblite'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
"""
Distance functions
==================
Distance functions measure closeness of observed and sampled data. This
module implements various commonly used distance functions for ABC, featuring
a few advanced concepts.
For custom distance functions, either pass a plain function to ABCSMC or
subclass the pyabc.Distance class.
"""
from .base import (
Distance,
NoDistance,
IdentityFakeDistance,
AcceptAllDistance,
SimpleFunctionDistance,
to_distance)
from .distance import (
PNormDistance,
AdaptivePNormDistance,
ZScoreDistance,
PCADistance,
MinMaxDistance,
PercentileDistance,
RangeEstimatorDistance,
DistanceWithMeasureList)
from .scales import (
median_absolute_deviation,
mean_absolute_deviation,
standard_deviation,
bias,
root_mean_square_deviation,
median_absolute_deviation_to_observation,
mean_absolute_deviation_to_observation,
combined_median_absolute_deviation,
combined_mean_absolute_deviation,
standard_deviation_to_observation)
__all__ = [
# base
"Distance",
"NoDistance",
"IdentityFakeDistance",
"AcceptAllDistance",
"SimpleFunctionDistance",
"to_distance",
# distance
"PNormDistance",
"AdaptivePNormDistance",
"ZScoreDistance",
"PCADistance",
"MinMaxDistance",
"PercentileDistance",
"RangeEstimatorDistance",
"DistanceWithMeasureList",
# scales
"median_absolute_deviation",
"mean_absolute_deviation",
"standard_deviation",
"bias",
"root_mean_square_deviation",
"median_absolute_deviation_to_observation",
"mean_absolute_deviation_to_observation",
"combined_median_absolute_deviation",
"combined_mean_absolute_deviation",
"standard_deviation_to_observation"
]
|
__author__ = 'efelix'
# ----------------------------------------------------------------------------------------------------------------------
from rdkit.Chem import AllChem
from beaker import app
from bottle import request
from beaker.core_apps.D2Coords.impl import _ctab22D, _smiles22D, _is3D
from beaker.utils.io import _parseFlag
# ----------------------------------------------------------------------------------------------------------------------
def ctab22DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _ctab22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
def is3DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _is3D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/ctab22D', method=['OPTIONS', 'POST'], name="ctab22D")
def ctab22D():
"""
Generate 2D coordinates for a molecule using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @no_coords.mol ${BEAKER_ROOT_URL}ctab22D
curl -X POST -F "file=@no_coords.mol" ${BEAKER_ROOT_URL}ctab22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return ctab22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
def smiles22DView(data, params):
kwargs = dict()
kwargs['computeCoords'] = False
kwargs['delimiter'] = params.get('delimiter', ' ')
kwargs['smilesColumn'] = int(params.get('smilesColumn', 0))
kwargs['nameColumn'] = int(params.get('nameColumn', 1))
kwargs['sanitize'] = _parseFlag(params.get('sanitize', True))
if params.get('titleLine') is None and not data.startswith(b'SMILES Name'):
kwargs['titleLine'] = False
else:
kwargs['titleLine'] = _parseFlag(params.get('titleLine', True))
return _smiles22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/smiles22D', method=['OPTIONS', 'POST'], name="smiles22D")
def smiles22D():
"""
Generate 2D coordinates from SMILES using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}smiles22D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}smiles22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return smiles22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/is3D', method=['OPTIONS', 'POST'], name="is3D")
def is3D():
"""
Check if molecule has any 3D coordinate.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}is3D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}is3D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return is3DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
|
if __name__ == "__main__":
import os
import sys
import subprocess
import XMLReduction
#python2 main.py name.pdf 8.5 font
#need font because need to filter out text that matches in all other regards except font
#but also need to except font that starts exactly the same up to the end where it is a
#+somenumber
#IndexError IOError
#LOL bit of a cycle here need size to get text but need xml to get size
#LOL wow so I have been looking at wrong size and font this whole time I was paying
#attention to the abstract not the real text funny enough did a good job of extracting
#the abstract so I'm impressed regardless
#font match doesn't seem to be working the way I hoped so made it always true
#but for some reason the paragraph following this "recipients in each discipline (7)."
#is completely ignored find it in xml inspect it why is it being ignored?
#answer is that it was ignored because towards the end of the block because the delta
#R^2 had a 9 and 5 size font which meant entire block was lost, but maybe I should
#go for a percent of the same font and size so like if within the whole block 90% is
#True and only 10% false then accept the whole block of text otherwise reject play around
#with the percentage not sure if 90% or maybe it should be 99%
#seems like I sort of solved the problem basically I did what I said directly before this
#plus I would check everytime I reached the fail branch for process_textbox whether or
#not the number of fails was greater than number of passes and if it was then stopped
#processing textbox ensured that all the text I wanted was included and that it was fast
#only problem now is that I still have a small bit of text I don't want need to figure out
#why that text still shows up is my code doing what I think it is doing?
#okay so I added some debugging code basically a debug version of xml_reduce that along
#side each character prints the bbox information which is sort of like a unique id
#this has allowed me to figure out why the "l" on line 138 in sample2.txt was included
#the reason is because it is a valid textbox that is not self closing and only contains
#one valid text tag so since it is technically the "first" text tag the font and the
#size do not have to match so it is included even though it shouldn't be to fix this
#I think within process_textbox I will add in code that checks the total number of
#text tags and if it is not greater than or equal to a certain number then it is excluded
#moved onto testing new articles now testing out the Eagly article which seems to have
#mostly worked except I noticed that for this article for some reason it rejected a
#textbox paragrah that should have been included and shown up right before the paragraph
#that starts on line 875 and the even stranger part is that through the python interpreter
#I imported XMLReduction and ran xml_reduce with just that textbox the one that should
#have been accepted but was rejected and this time around it was accepted meaning
#it must have something to do with what happens before that paragraph although I need
#to replicate it and make sure it somehow wasn't some sort of fluke
try:
if not sys.argv[1].endswith(".pdf"): sys.exit()
pdf_file_name = sys.argv[1]
xml_file_name = pdf_file_name[:(len(pdf_file_name) - 4)] + ".xml"
pdfminer_args = ["./bin/pdf2txt.py", "-o", xml_file_name,
pdf_file_name]
#figure out how to check exit status/return code and if I should do anything special
subprocess.call(pdfminer_args)
#again should be checking in case the open doesn't succeed how to handle error
file = open(xml_file_name, "r")
#again handle error in case someone doesn't put in actual number
size = float(sys.argv[2])
font = sys.argv[3]
text = XMLReduction.xml_reduce(file, size, font)
#text = XMLReduction.xml_reduce_debug(file, size, font)
print text
#os.remove(xml_file_name) #comment out while trying to fix XMLReduction
except (IndexError, IOError):
print "Please provide a file with the .xml filetype to be reduced and the \
size of the text to be extracted."
|
import numpy as np
from simtk import openmm, unit
from simtk.openmm import app
from openmmtools.testsystems import WaterBox
from openmmtools.integrators import ExternalPerturbationLangevinIntegrator
print('OpenMM version: ', openmm.version.full_version)
# Using one CPU thread
import os
os.environ['OPENMM_CPU_THREADS'] = '1'
# Long range method
nonbonded_method = 'CutoffPeriodic'
# Creating a waterbox
wbox = WaterBox(box_edge=21.0*unit.angstrom , nonbondedMethod=getattr(app, nonbonded_method))
wbox.system.addForce(openmm.MonteCarloBarostat(1*unit.atmospheres, 300*unit.kelvin))
# Extracting the nonbonded force
non_bonded_force = wbox.system.getForce(2)
# The integrator to perform the equilibrium dynamics
integrator = ExternalPerturbationLangevinIntegrator(temperature=300*unit.kelvin, collision_rate=50.0 / unit.picosecond, timestep=1.0 * unit.femtosecond)
# Creating the context
platform = openmm.Platform.getPlatformByName('CPU')
context = openmm.Context(wbox.system, integrator, platform)
context.setPositions(wbox.positions)
# Running some equilibrium dynamics
integrator.step(100)
# The number of NCMC type iterations and NCMC steps per iteration.
niterations = 20
ncmc_steps = 10
internal_work = np.zeros(niterations)
external_work = np.zeros(niterations)
# Whether to call updateParametersInContext. If True, then assertion below will fail.
update_parameters = True
# A model of NCMC without perturbation but using updateParametersInContext
for i in range(niterations):
#integrator.reset_protocol_work()
#integrator.setGlobalVariableByName('first_step',0)
integrator.setGlobalVariableByName('protocol_work',0)
for s in range(ncmc_steps):
integrator.step(1)
initial_external_energy = context.getState(getEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
###---- Not perturbing the system but updating parameters anyway----###
if update_parameters:
non_bonded_force.updateParametersInContext(context)
final_external_energy = context.getState(getEnergy=True).getPotentialEnergy() / unit.kilojoule_per_mole
integrator.step(1)
internal_work[i] = integrator.getGlobalVariableByName('protocol_work')
external_work[i] = final_external_energy - initial_external_energy
assert np.all(np.abs(internal_work - external_work) < 1E-5)
|
#
# Copyright (c) Microsoft Corporation. All Rights Reserved.
#
"""
Use DOT to layout a graph for cytoscape.js
TODO: add support for middle points in edges
"""
from __future__ import division
from collections import deque, defaultdict
import platform
if True or platform.system() == 'Windows':
from ivy_graphviz import AGraph
else:
from pygraphviz import AGraph
from ivy_utils import topological_sort
import ivy_utils as iu
# import pygraphviz
def cubic_bezier_point(p0, p1, p2, p3, t):
"""
https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Cubic_B.C3.A9zier_curves
"""
a = (1.0 - t)**3
b = 3.0 * t * (1.0 - t)**2
c = 3.0 * t**2 * (1.0 - t)
d = t**3
return {
"x": a * p0["x"] + b * p1["x"] + c * p2["x"] + d * p3["x"],
"y": a * p0["y"] + b * p1["y"] + c * p2["y"] + d * p3["y"],
}
def square_distance_to_segment(p, p1, p2):
v0 = (p["x"] - p1["x"], p["y"] - p1["y"])
v1 = (p2["x"] - p1["x"], p2["y"] - p1["y"])
v0sq = v0[0] * v0[0] + v0[1] * v0[1]
v1sq = v1[0] * v1[0] + v1[1] * v1[1]
prod = v0[0] * v1[0] + v0[1] * v1[1]
v2sq = prod * prod / v1sq
if prod < 0:
return v0sq
elif v2sq < v1sq:
return v0sq - v2sq
else:
v3 = (v0[0] - v1[0], v0[1] - v1[1])
return v3[0] * v3[0] + v3[1] * v3[1]
def approximate_cubic_bezier(p0, p1, p2, p3, threshold=1.0, limit=1024):
"""
Return an series of points whose segments approximate the given
bezier curve
"""
threshold_squared = threshold ** 2
points = { # dict mapping t values to points
0.0: p0,
1.0: p3,
}
to_check = deque([(0.0, 1.0)])
while len(to_check) > 0 and len(points) < limit:
l, r = to_check.popleft()
pl = points[l]
pr = points[r]
m = (l + r) / 2.0
pm = cubic_bezier_point(p0, p1, p2, p3, m)
if square_distance_to_segment(pm, pl, pr) > threshold_squared:
points[m] = pm
to_check.append((l, m))
to_check.append((m, r))
return [points[t] for t in sorted(points.keys())]
def get_approximation_points(bspline):
"""
Retrurn a series of points whose segments approximate the given
bspline
"""
result = []
for i in range(0, len(bspline) - 3, 3):
result.extend(approximate_cubic_bezier(
bspline[i], bspline[i+1], bspline[i+2], bspline[i+3],
threshold=4.0,
limit=100,
)[:-1])
result.append(bspline[-1])
return result
def _to_position(st):
global y_origin
sp = st.split(',')
assert len(sp) == 2, st
return {
"x": float(sp[0]),
"y": y_origin-float(sp[1]),
}
def _to_edge_position(st):
"""
http://www.graphviz.org/doc/info/attrs.html#k:splineType
"""
sp = st.split()
result = {}
if sp[0].startswith('e,'):
result["arrowend"] = _to_position(sp[0][2:])
sp = sp[1:]
if sp[0].startswith('s,'):
result["arrowstart"] = _to_position(sp[0][2:])
sp = sp[1:]
result["bspline"] = [_to_position(x) for x in sp]
result["approxpoints"] = get_approximation_points(result["bspline"])
# print "approxpoints: ", len(result["approxpoints"])
return result
def _to_coord_list(st):
""" create a sequence of positions from a dot-generated string """
nums = st.split(',')
pairs = [','.join((nums[2*i],nums[2*i+1])) for i in range(len(nums)//2)]
return map(_to_position,pairs)
def dot_layout(cy_elements,edge_labels=False,subgraph_boxes=False,node_gt=None):
"""
Get a CyElements object and augment it (in-place) with positions,
widths, heights, and spline data from a dot based layout.
edge_labels is true if labels should appear on edges
subgraph_boxes is true if boxes should be drawn around subgraphs
Returns the object.
"""
elements = cy_elements.elements
# g = AGraph(directed=True, strict=False)
g = AGraph(directed=True, strict=False, forcelabels=True)
# make transitive relations appear top to bottom
elements = list(elements)
nodes_by_id = dict(
(e["data"]["id"], e)
for e in elements if e["group"] == "nodes"
)
order = [
(nodes_by_id[e["data"]["source"]], nodes_by_id[e["data"]["target"]])
for e in elements if
e["group"] == "edges" and
"transitive" in e["data"] and
e["data"]["transitive"]
]
elements = topological_sort(elements, order, lambda e: e["data"]["id"])
# get the node id's and stable sort them by cluster
# the idea here is to convert the graph into a dag by sorting
# the nodes, then reversing the back edges. In particular, we try to make
# all the edges between two clusters go in the same direction so clustering
# doesn't result in horizontal edges, which dot renders badly.
sorted_nodes = [e["data"]["id"] for e in elements if e["group"] == "nodes"]
sorted_nodes = sorted(enumerate(sorted_nodes),key = lambda x: (nodes_by_id[x[1]]["data"]["cluster"],x[0]))
sorted_nodes = [y for idx,y in sorted_nodes]
node_key = dict((id,idx) for idx,id in enumerate(sorted_nodes))
if node_gt is None:
node_gt = lambda X,y:False
else:
node_gt = lambda x,y: node_key[x] > node_key[y]
# add nodes to the graph
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
g.add_node(e["data"]["id"], label=e["data"]["label"].replace('\n', '\\n'))
# TODO: remove this, it's specific to leader_demo
weight = {
'reach': 10,
'le': 10,
'id': 1,
}
constraint = {
'pending': False,
}
# add edges to the graph
for e in elements:
if e["group"] == "edges":
# kwargs = {'weight': weight.get(e["data"]["obj"], 0)},
kwargs = {'label':e["data"]["label"]} if edge_labels else {}
if node_gt(e["data"]["source"],e["data"]["target"]):
g.add_edge(
e["data"]["target"],
e["data"]["source"],
e["data"]["id"],
dir = 'back',
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
else:
g.add_edge(
e["data"]["source"],
e["data"]["target"],
e["data"]["id"],
**kwargs
#constraint=constraint.get(e["data"]["obj"], True),
)
# add clusters
clusters = defaultdict(list)
for e in elements:
if e["group"] == "nodes" and e["data"]["cluster"] is not None and e["classes"] != 'non_existing':
clusters[e["data"]["cluster"]].append(e["data"]["id"])
for i, k in enumerate(sorted(clusters.keys())):
g.add_subgraph(
name='cluster_{}'.format(i),
nbunch=clusters[k],
rank='min',
)
# now get positions, heights, widths, and bsplines
g.layout(prog='dot')
# get the y origin. we want the top left of the graph to be a
# fixed coordinate (hopefully (0,0)) so the graph doesn't jump when
# its height changes. Unfortunately, pygraphviz has a bug a gives
# the wrong bbox, so we compute the max y coord.
# bbox = pygraphviz.graphviz.agget(g.handle,'bb')
global y_origin
y_origin = 0.0
for n in g.nodes():
top = float(n.attr['pos'].split(',')[1]) + float(n.attr['height'])/2
if top > y_origin:
y_origin = top
if subgraph_boxes:
for sg in g.subgraphs():
top = float(sg.graph_attr['bb'].split(',')[3])
if top > y_origin:
y_origin = top
for e in elements:
if e["group"] == "nodes" and e["classes"] != 'non_existing':
attr = g.get_node(e["data"]["id"]).attr
e["position"] = _to_position(attr['pos'])
e["data"]["width"] = 72 * float(attr['width'])
e["data"]["height"] = 72 * float(attr['height'])
elif e["group"] == "edges":
if node_gt(e["data"]["source"],e["data"]["target"]):
attr = g.get_edge(e["data"]["target"], e["data"]["source"], e["data"]["id"]).attr
pos = attr['pos']
pe = pos.split()
ppe = pe[1:]
ppe.reverse()
pos = ' '.join([pe[0].replace('s','e')] + ppe)
else:
attr = g.get_edge(e["data"]["source"], e["data"]["target"], e["data"]["id"]).attr
pos = attr['pos']
e["data"].update(_to_edge_position(pos))
if edge_labels and e["data"]["label"] != '':
e["data"]["lp"] = _to_position(attr['lp'])
# g.draw('g.png')
if subgraph_boxes:
for sg in g.subgraphs():
box = cy_elements.add_shape(sg.name,classes='subgraphs')
coords = _to_coord_list(sg.graph_attr['bb'])
box["data"]["coords"] = coords
return cy_elements
|
import sys
sys.path.append('..')
from data_object.word_data_object import WordDataObject
from service.word import Word
from service.language import Language
from utils.print import ppp
# random word list options
language = Language(Language.ENGLISH)
qwerty_difficulty_rank = {
'min': 0,
'max': 10000
}
frequency_rank = {
'min': 0,
'max': 10000
}
length = {
'min': 0,
'max': 100
}
substring = 'ba'
limit = 10
word_list = Word.get_random_list(
language=language,
qwerty_difficulty_rank=qwerty_difficulty_rank,
frequency_rank=frequency_rank,
length=length,
substring=substring,
limit=limit
)
ppp(word_list)
for wordDO in word_list:
ppp(wordDO.to_dict())
ppp("length: {0}".format(len(word_list)))
|
import base64
import os
import shutil
import string
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import (
SessionStore as CacheDBSession,
)
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import (
SessionStore as CookieSession,
)
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import patch_logger
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertIs(self.session.modified, False)
self.assertIs(self.session.accessed, False)
def test_get_empty(self):
self.assertIsNone(self.session.get('cat'))
def test_store(self):
self.session['cat'] = "dog"
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertIsNone(self.session.get('some key'))
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_default_named_argument(self):
self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop('some key')
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_update(self):
self.session.update({'update key': 1})
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertIs(self.session.accessed, True)
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_keys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ['x'])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_items(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_save(self):
self.session.save()
self.assertIs(self.session.exists(self.session.session_key), True)
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertIs(self.session.exists(self.session.session_key), False)
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertIs(self.session.modified, True)
self.assertIs(self.session.accessed, True)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_data = self.session.items()
self.session = self.backend(self.session.session_key)
self.assertIs(hasattr(self.session, '_session_cache'), False)
self.session.cycle_key()
self.assertCountEqual(self.session.items(), prev_data)
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
session.save()
self.assertNotEqual(session.session_key, '1')
self.assertIsNone(session.get('cat'))
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), False)
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), True)
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf')
with patch_logger('django.security.SuspiciousSession', 'warning') as calls:
self.assertEqual({}, self.session.decode(bad_encode))
# check that the failed decode is logged
self.assertEqual(len(calls), 1)
self.assertIn('corrupted', calls[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend() # reinitialize after overriding settings
# Regression test for #19200
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
session = self.backend('someunknownkey')
session.load()
self.assertIsNone(session.session_key)
self.assertIs(session.exists(session.session_key), False)
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, 'someunknownkey')
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
"""
Sessions shouldn't be resurrected by a concurrent request.
"""
# Create new session.
s1 = self.backend()
s1['test_data'] = 'value1'
s1.save(must_create=True)
# Logout in another context.
s2 = self.backend(s1.session_key)
s2.delete()
# Modify session in first context.
s1['test_data'] = 'value2'
with self.assertRaises(UpdateError):
# This should throw an exception as the session is deleted, not
# resurrect the session.
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = 'django.contrib.sessions.backends.db'
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
"Session repr should be the session key."
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
# Change it
self.model.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, self.model.objects.count())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = 'sessions_tests.models'
def test_extra_session_field(self):
# Set the account ID to be picked up by a custom session storage
# and saved to a custom session model database column.
self.session['_auth_user_id'] = 42
self.session.save()
# Make sure that the customized create_model_instance() was called.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
# Make the session "anonymous".
self.session.pop('_auth_user_id')
self.session.save()
# Make sure that save() on an existing session did the right job.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertIs(self.session.exists(self.session.session_key), True)
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super().setUp()
def tearDown(self):
super().tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an IOError - by creating
# a new session, making it unclear whether the slashes were detected.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([
session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2['foo'] = 'bar'
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command('clearsessions')
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches['default'].get(self.session.cache_key))
self.assertIsNotNone(caches['sessions'].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['secure'], True)
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], True)
self.assertIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], '')
self.assertNotIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_session_save_on_500(self):
request = RequestFactory().get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_update_error_redirect(self):
path = '/foo/'
request = RequestFactory().get(path)
response = HttpResponse()
middleware = SessionMiddleware()
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SuspiciousOperation, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
# and raised as a SuspiciousOperation.
middleware.process_response(request, response)
def test_session_delete_on_end(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/')
def test_session_delete_on_end_with_custom_domain_and_path(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header with a custom domain and path looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0;
# Path=/example/
self.assertEqual(
'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, '
'01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# A cookie should not be set.
self.assertEqual(response.cookies, {})
# The session is accessed so "Vary: Cookie" should be set.
self.assertEqual(response['Vary'], 'Cookie')
def test_empty_session_saved(self):
"""
If a session is emptied of data but still has a key, it should still
be updated.
"""
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Set a session key and some data.
middleware.process_request(request)
request.session['foo'] = 'bar'
# Handle the response through the middleware.
response = middleware.process_response(request, response)
self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),))
# A cookie should be set, along with Vary: Cookie.
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Empty the session data.
del request.session['foo']
# Handle the response through the middleware.
response = HttpResponse('Session test')
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# While the session is empty, it hasn't been flushed so a cookie should
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super().test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
@unittest.skip("Cookie backend doesn't have an external store to create records in.")
def test_session_load_does_not_create_record(self):
pass
@unittest.skip("CookieSession is stored in the client and there is no way to query it.")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
|
# -*- python -*-
# Copyright (C) 2009-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/share/gcc-4.9.3/python'
libdir = '/NOBACKUP/sqa10/sqa/build-bspv410/build-toolchain/build-2016-04-13/toolchain/nds32le-elf-mculib-v3/nds32le-elf/lib'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
|
import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
@pytest.fixture(scope="session")
def driver(request):
wd=webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_admin_login(driver):
driver.get("http://localhost/litecart/admin")
driver.find_element_by_name("username").send_keys("admin")
driver.find_element_by_name("password").send_keys("123456")
driver.find_element_by_name("login").click()
WebDriverWait(driver,10).until(EC.title_is("My Store"))
print('Поставим sleep перед закрытием браузера на 5 секунд для отладки')
time.sleep(5)
|
from parsimonious.grammar import Grammar
from .. import html_paths
def read_html_path(string):
path_node = _grammar.parse(string)
return read_html_path_node(path_node)
def read_html_path_node(path_node):
if path_node.children[0].expr_name == "ignore":
return html_paths.ignore
elif path_node.children[0].children:
return _read_html_path_elements_node(path_node.children[0].children[0])
else:
return html_paths.empty
def _read_html_path_elements_node(path_node):
elements = [
_read_element_node(child)
for child in _repeated_children_with_separator(path_node, has_whitespace=True)
]
return html_paths.path(elements)
def _read_element_node(node):
tag_names = _read_tag_names_node(node.children[0])
class_names = _read_class_names_node(node.children[1])
fresh = _read_fresh_node(node.children[2])
return html_paths.element(tag_names, class_names=class_names, fresh=fresh)
def _read_tag_names_node(node):
return [
child.text
for child in _repeated_children_with_separator(node, has_whitespace=False)
]
def _read_class_names_node(class_names_node):
return [
_read_class_name_node(node)
for node in class_names_node.children
]
def _read_class_name_node(node):
return node.children[1].text
def _read_fresh_node(node):
return len(node.children) > 0
def _repeated_children_with_separator(node, has_whitespace):
yield node.children[0]
if has_whitespace:
sequence_node_index = 3
else:
sequence_node_index = 1
sequence_node = node.children[1]
for child in sequence_node.children:
yield child.children[sequence_node_index]
grammar_text = r"""
html_path = ignore / html_path_elements?
ignore = "!"
html_path_elements = element (whitespace* ">" whitespace* element)*
element = tag_names class_name* fresh?
tag_names = identifier ("|" identifier)*
class_name = "." identifier
fresh = ":fresh"
identifier = ~"[A-Z0-9]+"i
whitespace = ~"\s"*
"""
_grammar = Grammar(grammar_text)
|
from . import git
import os
from .utils import errordie, mkpath, msg
def _trim(lines):
stripped = [line.strip() for line in lines]
return [line for line in stripped if line and not line.startswith('#')]
def _git_destname(repository):
git_folder = repository.rsplit('/', 1)[1]
if git_folder.endswith('.git'):
return git_folder[:-4]
else:
return git_folder
class Repo(object):
def __init__(self, repository, prefix):
self._repository = repository
self._prefix = prefix
@classmethod
def parse(cls, line):
parts = line.split(' ', 1)
if len(parts) == 2:
repository = parts[1]
prefix = parts[0]
else:
errordie('Invalid repository file line: {}'.format(line))
return cls(repository, prefix)
def _group_folder(self, folder):
if self._prefix:
return os.path.join(folder, self._prefix)
else:
return folder
def clone(self, folder):
group_folder = self._group_folder(folder)
mkpath(group_folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if os.path.exists(destination):
msg('IN %s SKIPPING %s' % (self._prefix, git_folder))
return
msg('IN %s CLONING %s' % (self._prefix, git_folder))
git.clone_or_die(self._repository, destination)
def fast_forward(self, folder):
group_folder = self._group_folder(folder)
git_folder = _git_destname(self._repository)
destination = os.path.join(group_folder, git_folder)
if not os.path.exists(destination):
errordie('Can\'t fast forward missing repository: {}'.format(destination))
msg('IN %s FAST FORWARDING %s' % (self._prefix, git_folder))
git.fast_forward_or_die(destination)
def as_line(self):
if self._prefix:
return '{} {}'.format(self._prefix, self._repository)
else:
return '. {}'.format(self._repository)
def __eq__(self, other):
if isinstance(other, Repo):
return (
self._repository == other._repository
and
self._prefix == other._prefix
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return self.as_line()
class ReposFile(object):
def __init__(self, stored_file):
self._stored_file = stored_file
self._repositories = [
Repo.parse(line)
for line in _trim(stored_file.readlines())]
def clone(self, folder):
for repo in self._repositories:
repo.clone(folder)
def fast_forward(self, folder):
for repo in self._repositories:
repo.fast_forward(folder)
def add(self, repo):
for existing in self._repositories:
if repo == existing:
errordie('Duplicate entry {}'.format(repo))
self._repositories.append(repo)
def save(self):
lines = [repo.as_line()+'\n' for repo in self._repositories]
self._stored_file.writelines(sorted(lines))
class RepoSet(object):
def __init__(self, name, folder, stored_file):
self._name = name
self._reposfile = ReposFile(stored_file)
self._folder = folder
def clone(self):
msg('CLONING SET {}'.format(self._name))
self._reposfile.clone(self._folder)
def fast_forward(self):
msg('FAST FORWARD IN SET {}'.format(self._name))
self._reposfile.fast_forward(self._folder)
def add_and_clone(self, repository, prefix):
msg('IN SET {}'.format(self._name))
repo = Repo(repository=repository, prefix=prefix)
self._reposfile.add(repo)
self._reposfile.save()
repo.clone(self._folder)
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TaskIdRange(Model):
"""
A range of task ids that a task can depend on. All tasks with ids in the
range must complete successfully before the dependent task can be
scheduled.
:param start: The first task id in the range.
:type start: int
:param end: The last task id in the range.
:type end: int
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'start', 'type': 'int'},
'end': {'key': 'end', 'type': 'int'},
}
def __init__(self, start, end):
self.start = start
self.end = end
|
'''
module that retreives and pack adms input info
'''
import rdflib
from pyproj import Proj, transform
import requests
import math
import sys
import os
import rdflib.plugins.sparql.results.jsonresults as jsresult
from collections import namedtuple
from admsSrc import admsSrc
from admsPolygon import Polygon
import cobbling
class admsInputDataRetriever(object):
BDN = namedtuple('BDN', ['BldNumBuildings','BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle'])
OPT = namedtuple('OPT', ['OptNumOutputs','OptPolName','OptInclude','OptShortOrLong', 'OptSamplingTime','OptSamplingTimeUnits','OptCondition','OptNumPercentiles','OptNumExceedences','OptPercentiles','OptExceedences','OptUnits','OptGroupsOrSource','OptAllSources','OptNumGroups','OptIncludedGroups','OptIncludedSource','OptCreateComprehensiveFile'])
def __init__(self, topnode, bdnnode=None, range=None, pollutants =['so2'], srcLimit = 5, bdnLimit = 25, filterSrc = False):
'''constructor
inputs:
range - user input range {'xmin', 'xmax', 'ymin', 'ymax'}, actual range is the min(user range, region envelope(e.g. jurongisland))
topnode - uri of topnode to begin search within tree struct,
filtersrc - true if use all children under topnode as src, false to use topnode as src directly
bdnnode - top/colleciton node of building
pollutants: pollutant to test
srcLimit: limit of src number, actual number might fewer
bdnLimit: limit of bdn number, actual number might fewer
'''
self.address = None
self.pollutants = pollutants
self.topnode = topnode
self.bdnnode = bdnnode
self.srcLimit = srcLimit
self.bdnLimit = bdnLimit
self.filterSrc = False
self.range = self.getRange(range)
print(self.range)
def getRange(self, userrange):
'''
Define range from topnode info and user give parameters
returns (xRange, yRange)
'''
if not self.filterSrc:
return ((userrange['xmin'], userrange['xmax']), (userrange['ymin'],userrange['ymax']))
self.connectDB(self.topnode)#connect to db
qx = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_x ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
qy = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
SELECT ?value
WHERE {
?co space_and_time_extended:hasProjectedCoordinate_y ?upper.
?upper sys:hasValue ?v.
?v sys:numericalValue ?value .
}
""")
#extract bounds data
xs = tuple(row['value'] for row in qx)
ys = tuple(row['value'] for row in qy)
xRange = (xs[0].toPython(),xs[1].toPython()) if xs[0]<xs[1] else (xs[1].toPython(),xs[0].toPython())
yRange = (ys[0].toPython(),ys[1].toPython()) if ys[0]<ys[1] else (ys[1].toPython(),ys[0].toPython())
#todo: provide gis speci number in future and do conversion if needed
#if user specified range, compare
if userrange is not None:
xRange = (min(xRange[0], userrange['xmin']), max(xRange[1], userrange['xmax']))
yRange = (min(yRange[0], userrange['ymin']), max(yRange[1], userrange['ymax']))
print('xrange: {} - {}', *xRange)
print('yrange: {} - {}', *yRange)
return(xRange, yRange)
def filterSource(self):
'''filter the source from tree starting from topnode, within the range and with user set content
returns: list of source uris
'''
xRange, yRange = self.range
self.connectDB(self.topnode)#connect to db
#query for children uris from topnode
#todo: future kb: type check :emission type
qChildren = self.query(
"""
PREFIX Eco-industrialPark: <http://www.theworldavatar.com/OntoEIP/Eco-industrialPark.owl#>
SELECT ?child
WHERE {{
?o Eco-industrialPark:hasIRI ?child .
}}
LIMIT {0}
""".format(self.srcLimit)
)
###query each children to get coordinates
uris = list(row["child"].strip() for row in qChildren)
###todo: add this test file, delete in future
#uris.append("http://www.theworldavatar.com/TankID_1574.owl#TankID_1574")
filtered = []
print(uris)
for uri in uris:
print("connecting: {:s}".format(uri))
self.connectDB(uri)
qstr ='''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT DISTINCT ?x ?y ?content
WHERE {{
<{0!s}> space_and_time_extended:hasGISCoordinateSystem ?o.
?o space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?vx.
?vx sys:numericalValue ?x .
<{0!s}> sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
OPTIONAL{{
?o space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?vy.
?vy sys:numericalValue ?y .
}}
}}
'''.format(uri)
#print (qstr)
coordQresults = self.query(qstr)
##filter children within range
for row in coordQresults:
x,y,content = float(row['x'].toPython()), float(row['y'].toPython()), row['content'].toPython()
#print("{},{},{}".format(x, y, content))
if x - xRange[0]>0 and x - xRange[1] < 0 and y - yRange[0] > 0 and y - yRange[1]<0 and content in self.pollutants:
filtered.append(uri)
print('add to filtered {}'.format(uri))
break
return filtered
def getSrcData(self):
'''get all sourced data :
returns: data object
'''
filtered = None
if not self.filterSrc:
filtered = self.filterSource()
else:
filtered = self.topnode
s = set()#make a set of substance to query later
result = []
for uri in filtered:
print("connecting: {:s}".format(uri))
self.connectDB(uri)
qdata = self.query(
"""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX plant:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_realization/plant.owl#>
PREFIX topology:<http://www.theworldavatar.com/OntoCAPE/meta_model/topology/topology.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX chemical_process_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/chemical_process_system.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
PREFIX material: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/material.owl#>
SELECT ?o ?height ?diameter ?content ?x ?y ?velocity ?massflow ?temp
WHERE {{
?o plant:hasHeight ?he.
?he sys:numericalValue ?height .
?o plant:hasInsideDiameter ?de .
?de sys:numericalValue ?diameter.
?o sys:hasContent ?contentE .
?contentE material:intrinsicCharacteristics ?chemsp.
?chemsp sys:containsDirectly ?content.
?contentE material:thermodynamicBehavior ?phase.
?phase phase_system:has_temperature ?tempE.
?tempE sys:hasValue ?vte.
?vte sys:numericalValue ?temp .
?o space_and_time_extended:hasGISCoordinateSystem ?coe .
?coe space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?coe space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?stream topology:leaves ?o.
?stream chemical_process_system:refersToGeneralizedAmount ?ga.
?ga sys:hasSubsystem ?ma.
?ma sys:hasProperty ?ve.
?ve a behavior:Velocity .
?ve sys:hasValue ?vv.
?vv sys:numericalValue ?velocity.
?ma sys:hasProperty ?me.
?me a behavior:ConvectiveMassFlowrate .
?me sys:hasValue ?mv.
?mv sys:numericalValue ?massflow.
}}
LIMIT 1
""")
for row in qdata:
s.add(row['content'].toPython())
result.append(row.asdict())
print("FILTERED :")
print(result)
#use this if need query substance separately
#query substance for substance related data
cMap = {}
#hard coding for now before I get a better solusion
self.connectDB("http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl")
template = """
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX sub:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl#>
PREFIX behavior: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/chemical_process_system/CPS_behavior/behavior.owl#>
PREFIX phase_system:<http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/phase_system/phase_system.owl#>
SELECT DISTINCT ?o ?hc ?d ?mw
WHERE {{
<{0}> behavior:hasHeatCapacityRatio ?hce. #heatcapacity ratio
?hce sys:hasValue ?hcv .
?hcv sys:numericalValue ?hc.
<{0}> phase_system:has_density ?de . #density
?de sys:hasValue ?dv .
?dv sys:numericalValue ?d.
<{0}> sys:hasProperty ?mwe. #molecular weight
?mwe a sub:MolecularWeight .
?mwe sys:hasValue ?mwv .
?mwv sys:numericalValue ?mw.
}}
LIMIT 1
"""
for sub in s:
print (sub)
print (template.format(sub))
sdata = self.query(template.format(sub))
for row in sdata:
cMap[sub] = row
break
packed = []
for src in result:
subData = cMap[src['content'].toPython()].asdict()
newSrc = admsSrc(SrcName = src['o'].toPython(), SrcHeight = src['height'].toPython(), SrcDiameter = src['diameter'].toPython(),SrcVertVeloc = src['velocity'].toPython(), SrcPolEmissionRate = src['massflow'].toPython(), SrcPollutants = self.polIRI2Name(src['content'].toPython()),SrcTemperature = src['temp'].toPython(), SrcX1 = src['x'].toPython(), SrcY1 = src['y'].toPython(), SrcMolWeight = subData['mw'].toPython(), SrcDensity = subData['d'].toPython(), SrcSpecHeatCap = subData['hc'].toPython())
packed.append(newSrc)
return packed
def getBdnData(self):
self.connectDB(self.bdnnode, connectType = 'endpoint')
bdns = self.filterBdnEnvelope()
if len(bdns) is 0: #range is smaller than any envelope,
#then we have to filter indi buildings
#todo: in this case, should we filter by calculated cnetroid, or a crude one with ground x,y? i'd go with x, y first。。。
bdns = self.filterBdns(bdns)
if len(bdns) is 0:
raise Exception('no bdn within range')
print ('Found {0} bdn within range , they are '.format(len(bdns)))
result = list((zip(*[self.getMetrics(bld) for bld in bdns])))
print (result)
newBdn = self.BDN(len(bdns), *result)
return newBdn
def filterBdnEnvelope(self):
'''
Get all buildings within range by comparing range with envelope
return list of building url
'''
xRange, yRange = self.range
qb =self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX citygml: <http://www.theworldavatar.com/CityGMLOntology.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE{{
?cityM a citygml:CityModelType.
?cityM citygml:boundedBy ?envelope .
?envelope a citygml:EnvelopeType. # get all envelopes
?envelope citygml:upperCornerPoint ?upoint. # get bounds of envelope
?upoint space_and_time_extended:hasGISCoordinateSystem ?uco.
?uco space_and_time_extended:hasProjectedCoordinate_x ?uxe.
?uxe sys:hasValue ?uxv.
?uxv sys:numericalValue ?ux.
?uco space_and_time_extended:hasProjectedCoordinate_y ?uye.
?uye sys:hasValue ?uyv.
?uyv sys:numericalValue ?uy.
?envelope citygml:lowerCornerPoint ?lpoint.
?lpoint space_and_time_extended:hasGISCoordinateSystem ?lco.
?lco space_and_time_extended:hasProjectedCoordinate_x ?lxe.
?lxe sys:hasValue ?lxv.
?lxv sys:numericalValue ?lx.
?lco space_and_time_extended:hasProjectedCoordinate_y ?lye.
?lye sys:hasValue ?lyv.
?lyv sys:numericalValue ?ly.
?cityM citygml:cityObjectMember ?bdn . #get bdn belongs to filterd envelope
Filter(xsd:double(?ly) > "{1}"^^xsd:double && xsd:double(?uy) < "{2}"^^xsd:double && xsd:double(?lx) > "{3}"^^xsd:double && xsd:double(?ux) < "{4}"^^xsd:double) #filter envelope within range
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange))
########todo: in future delete stud data
return tuple(row['bdn'] for row in qb)
#todo
def filterBdns(self, bdns):
'''
filter individual building to see if they are within range
get all uris where every x and y in its ground is within range(maybe count ?)
'''
xRange, yRange = self.range
qstr = '''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
#PREFIX citygml:<file:/D:/citygmllearn/citygmlhandmade.owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?bdn
WHERE {{
{{ #case1:building has no parts
?bdn a citygml:BuildingType.
?bdn citygml:boundedBy ?g. #building boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}} UNION {{ #case 2:
?bdn a citygml:BuildingType. #bdns that consists of part
?bdn citygml:consistsOfBuildingPart ?part.
?part a citygml:BuildingPartType.
?part citygml:boundedBy ?g.
?g a citygml:GroundSurfaceType.
?g citygml:lod2MultiSurface ?ms.
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_x ?xe.
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
}}
filter(xsd:double(?y) > "{1}"^^xsd:double && xsd:double(?y) < "{2}"^^xsd:double && xsd:double(?x) > "{3}"^^xsd:double && xsd:double(?x) < "{4}"^^xsd:double)
}}
LIMIT {0} #limit of building num
'''.format(self.bdnLimit, *yRange, *xRange)
qre = self.query(qstr)
return tuple(row['bdn'] for row in qre)
def getBdnVertices(self, nodeuri):
'''get all buildings data
returns: data object
'''
#todo: modify query to get raw data,then pass to converter
qData= self.query('''
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?x ?y ?z
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a ground
?g citygml:lod2MultiSurface ?ms. #ground has lod2multisurface ms
?ms citygml:surfaceMember ?pol. #ms has member polygon
?pol citygml:exterior ?lring. # polygon exterior is linear ring
?lring sys:contains ?po. # linear ring consists of points
?po space_and_time_extended:hasGISCoordinateSystem ?co. #point has coordinate system cs
?co space_and_time_extended:hasProjectedCoordinate_x ?xe. #[extract cs to get x,y,z value]
?xe sys:hasValue ?xv.
?xv sys:numericalValue ?x.
?co space_and_time_extended:hasProjectedCoordinate_y ?ye.
?ye sys:hasValue ?yv.
?yv sys:numericalValue ?y.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
}}
'''.format(nodeuri))
#query for roof max and ground min
qHeight = self.query("""
PREFIX sys: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/upper_level/system.owl#>
PREFIX space_and_time_extended: <http://www.theworldavatar.com/OntoCAPE/OntoCAPE/supporting_concepts/space_and_time/space_and_time_extended.owl#>
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT (MIN(?z) AS ?min) (MAX(?zr) AS ?max) #select min of ground z values, max of roof z values
WHERE {{
<{0}> citygml:boundedBy ?g. #building/part IRI boundBy surface
?g a citygml:GroundSurfaceType. # surface is a GROUND
?g citygml:lod2MultiSurface ?ms. #[select all coordi z value for ground surface]
?ms citygml:surfaceMember ?pol.
?pol citygml:exterior ?lring.
?lring sys:contains ?po.
?po space_and_time_extended:hasGISCoordinateSystem ?co.
?co space_and_time_extended:hasProjectedCoordinate_z ?ze.
?ze sys:hasValue ?zv.
?zv sys:numericalValue ?z.
<{0}> citygml:boundedBy ?gr. #building/part IRI boundBy surface
?gr a citygml:RoofSurfaceType. # surface is a ROOF
?gr citygml:lod2MultiSurface ?msr. #[select all coordi z value for roof surface]
?msr citygml:surfaceMember ?polr.
?polr citygml:exterior ?lringr.
?lringr sys:contains ?por.
?por space_and_time_extended:hasGISCoordinateSystem ?cor.
?cor space_and_time_extended:hasProjectedCoordinate_z ?zer.
?zer sys:hasValue ?zvr.
?zvr sys:numericalValue ?zr.
}} GROUP BY ?g # group by each ground IRI
""".format(nodeuri))
# define coordi convert function : building kb ---> adms
Bdn2ADMSCoordC = defineCoordConvert('epsg:28992','epsg:32648')
#float(row['min'].toPython()), float(row['max'].toPython())
zlimit = tuple( Bdn2ADMSCoordC(float(row['min'].toPython()), float(row['max'].toPython())) for row in qHeight )[0]
return ( list(Bdn2ADMSCoordC(float(row['x'].toPython()), float(row['y'].toPython())) for row in qData), zlimit)
def getMetrics(self, nodeuri):
base = None
if self.hasBuildingPart(nodeuri):
print('{0} has building part'.format(nodeuri))
#get list of building part
bparts = list(row['p'] for row in self.query(
''' PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
SELECT ?p
WHERE
{
?b citygml:consistsOfBuildingPart ?p.
}'''))
#get metrics for each part
polygons = tuple( Polygon(*self.getBdnVertices(uri)) for uri in bparts)
#get centroid for pols
base = Polygon.combineBaseMulti(polygons)
else: # no building part
print('{0} NOT has building part'.format(nodeuri))
verticesNHeight = self.getBdnVertices(nodeuri)
#print(verticesHeight)
base = Polygon(*verticesNHeight)
#todo pack return result
#('BldName','BldType','BldX','BldY','BldHeight', 'BldLength', 'BldWidth', 'BldAngle')
print ((nodeuri.toPython(), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle) )
#todo: coordinate coversion for centroid!!!
return (uri2name(nodeuri.toPython()), base.type, base.centroid[0], base.centroid[1], base.height, base.length, base.width, base.angle)
#calulate centro
#choose shape
#calculate angle
#probably for the best if we construct a type of polygon instead?
def hasBuildingPart(self, nodeuri):
#self.connectDB(nodeuri)
print('checking if building part for :{0}'.format(nodeuri))
qData = self.query('''
PREFIX citygml:<http://www.theworldavatar.com/CityGMLOntology.owl#>
ASK
{{
<{0}> citygml:consistsOfBuildingPart ?p
}}
'''.format(nodeuri))
qData, = tuple(qData)
return qData
def coreBdn2Src(self):
'''calculate main building for each src
'''
#compare src coords to each bdn
for src in self.rawSrc:
closed, dClosed, first = None, sys.maxsize, True
print('find closed bdn for src: '+src.SrcName+" with x: "+str(src.SrcX1) +" y: "+str(src.SrcY1))
for i in range(len(self.rawBdn.BldX)):
#print('bdn x: ' +str( self.rawBdn.BldX[i]))
dx, dy = self.rawBdn.BldX[i] - src.SrcX1, self.rawBdn.BldY[i] - src.SrcY1
d = dx * dx + dy * dy
if first:
dClosed = d
closed = self.rawBdn.BldName[i]
first = False
#print('d:{0} dclosed:{1}'.format(d, dClosed))
if d - dClosed < 0:
closed = self.rawBdn.BldName[i]
dClosed = d
print('new smallest distance: '+str(dClosed))
if closed is not None:
src.setMainBuilding(closed)
else: #err handling, something is wrong if no closed building is found, just throw it
raise Exception('Dear lord, no closed buildinf found for src: '+src.SrcName)
def getOpt(self, PolNames, SrcNames):
numPol = len(PolNames)
return self.OPT(numPol,PolNames, [1]*numPol,[0]*numPol,[1]*numPol,[3]*numPol,[0]*numPol,[0]*numPol,[0]*numPol,[0]*80,[0]*80,['ug/m3']*4,1,0,1,"Grouptank001",SrcNames,0)
def polIRI2Name(self, polIRI):
substances = {'http://www.theworldavatar.com/OntoCAPE/OntoCAPE/material/substance/substance.owl#chlorine':'Cl2'}
if polIRI in substances.keys():
print('Found: '+ substances[polIRI])
return substances[polIRI]
else:
print ('Not found !!!!')
raise Exception('This substance is not defined!!!!')
def getWeather(self):
'''
get weather data,
for now we trigger a python script to write the .met file directly
'''
#finish writing met
metLoc= r"test.met"
cobbling.run(meteo_data = metLoc)
#pointing to met in apl
return os.path.realpath(metLoc)
def get(self):
'''main function, get all related info for adms
returns: complete data for adms
'''
#get all src data
self.rawSrc = self.getSrcData()
##todo: think about this case: when nothing is found ,where should we handle it?
##then nothing should be written, and catch this exception and mute it in main function
if self.rawSrc is None:
raise Exception("No src in found to requiries")
#get all building data
self.rawBdn = self.getBdnData()
#print('raw building: ')
#print(self.rawBdn)
rawOpt = self.getOpt(self.pollutants, [s.SrcName for s in self.rawSrc])
self.coreBdn2Src()
#for debuging, in future,define this for data type, i dont think it auto rpint for objet
for src in self.rawSrc:
print(src)
met = self.getWeather()
return {'Src': self.rawSrc, 'Bdn': self.rawBdn, 'Opt': rawOpt, 'Met': met}
def queryEndpoint(self, str):
print('requesting @ '+self.address+" with query:")
#print(str)
resp = requests.get(self.address, params = {'query':str}, timeout = 1500, headers = {'user-agent': 'my-app/0.0.1'})
print(resp.json())
qres = jsresult.JSONResult(resp.json())#json decoded
print(qres)
return qres
def queryLocalGraph(self, str):
qres = self.g.query(str)
return qres
def Uri2Local(uri):
'''replace a uri to local address
inputs:
uri - uri to be changed
returns: string - local address
'''
return uri.replace("http://www.jparksimulator.com",config.root ).replace("http://www.theworldavatar.com",config.root)
def remote2local(self, func):
'''decorator to change connection function to local connection by replacing given iri to local address
'''
def functionWrapper(self, address):
address = self.Uri2Local(address)
func(self, address)
return functionWrapper
def connectDB(self, address, connectType = 'parse'):
'''connect to db anyhow (we use rdflib graph parse now)
'''
def connectDBActual( address):
'''
Actual method to connect to db
'''
#obsolete: use rdflib locally
self.address = address
if connectType is 'parse':
self.g = rdflib.Graph()#comment out in future
self.g.parse(address)#comment out in future
self.qmethodMap = {'parse': self.queryLocalGraph, 'endpoint':self.queryEndpoint}
if not sameGraph(address, self.address):
print ('parsing graph: '+ address)
if connectType not in self.qmethodMap:
raise exception('db connection method not defined')
#self.connectType = connectType
self.query = self.qmethodMap[connectType]
connectDBActual(address)
def sameGraph(uri1, uri2):
def trimloc(uri):
if uri is None:
return None
else:
return uri.split('#')[0]
return trimloc(uri1) == trimloc(uri2)
def defineCoordConvert(inCode, outCode):
inProj = Proj(init=inCode)
outProj = Proj(init=outCode)
def coordConvert(x,y):
return transform(inProj, outProj, x, y)
return coordConvert
def uri2name(uri):
base = 'http://www.theworldavatar.com/'
return uri.split('#')[1]
|
""" command line options, ini-file and conftest.py processing. """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import copy
import inspect
import os
import shlex
import sys
import types
import warnings
import py
import six
from pkg_resources import parse_version
from pluggy import HookimplMarker
from pluggy import HookspecMarker
from pluggy import PluginManager
import _pytest._code
import _pytest.assertion
import _pytest.hookspec # the extension point definitions
from .exceptions import PrintHelp
from .exceptions import UsageError
from .findpaths import determine_setup
from .findpaths import exists
from _pytest._code import ExceptionInfo
from _pytest._code import filter_traceback
from _pytest.compat import lru_cache
from _pytest.compat import safe_str
from _pytest.outcomes import Skipped
hookimpl = HookimplMarker("pytest")
hookspec = HookspecMarker("pytest")
class ConftestImportFailure(Exception):
def __init__(self, path, excinfo):
Exception.__init__(self, path, excinfo)
self.path = path
self.excinfo = excinfo
def main(args=None, plugins=None):
""" return exit code, after performing an in-process test run.
:arg args: list of command line arguments.
:arg plugins: list of plugin objects to be auto-registered during
initialization.
"""
from _pytest.main import EXIT_USAGEERROR
try:
try:
config = _prepareconfig(args, plugins)
except ConftestImportFailure as e:
exc_info = ExceptionInfo(e.excinfo)
tw = py.io.TerminalWriter(sys.stderr)
tw.line(
"ImportError while loading conftest '{e.path}'.".format(e=e), red=True
)
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short", chain=False)
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = safe_str(exc_repr)
for line in formatted_tb.splitlines():
tw.line(line.rstrip(), red=True)
return 4
else:
try:
return config.hook.pytest_cmdline_main(config=config)
finally:
config._ensure_unconfigure()
except UsageError as e:
tw = py.io.TerminalWriter(sys.stderr)
for msg in e.args:
tw.line("ERROR: {}\n".format(msg), red=True)
return EXIT_USAGEERROR
class cmdline(object): # compatibility namespace
main = staticmethod(main)
def filename_arg(path, optname):
""" Argparse type validator for filename arguments.
:path: path of filename
:optname: name of the option
"""
if os.path.isdir(path):
raise UsageError("{} must be a filename, given: {}".format(optname, path))
return path
def directory_arg(path, optname):
"""Argparse type validator for directory arguments.
:path: path of directory
:optname: name of the option
"""
if not os.path.isdir(path):
raise UsageError("{} must be a directory, given: {}".format(optname, path))
return path
default_plugins = (
"mark",
"main",
"terminal",
"runner",
"python",
"fixtures",
"debugging",
"unittest",
"capture",
"skipping",
"tmpdir",
"monkeypatch",
"recwarn",
"pastebin",
"helpconfig",
"nose",
"assertion",
"junitxml",
"resultlog",
"doctest",
"cacheprovider",
"freeze_support",
"setuponly",
"setupplan",
"stepwise",
"warnings",
"logging",
)
builtin_plugins = set(default_plugins)
builtin_plugins.add("pytester")
def get_config():
# subsequent calls to main will create a fresh instance
pluginmanager = PytestPluginManager()
config = Config(pluginmanager)
for spec in default_plugins:
pluginmanager.import_plugin(spec)
return config
def get_plugin_manager():
"""
Obtain a new instance of the
:py:class:`_pytest.config.PytestPluginManager`, with default plugins
already loaded.
This function can be used by integration with other tools, like hooking
into pytest to run tests into an IDE.
"""
return get_config().pluginmanager
def _prepareconfig(args=None, plugins=None):
warning = None
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
args = [str(args)]
elif not isinstance(args, (tuple, list)):
if not isinstance(args, str):
raise ValueError("not a string or argument list: %r" % (args,))
args = shlex.split(args, posix=sys.platform != "win32")
from _pytest import deprecated
warning = deprecated.MAIN_STR_ARGS
config = get_config()
pluginmanager = config.pluginmanager
try:
if plugins:
for plugin in plugins:
if isinstance(plugin, six.string_types):
pluginmanager.consider_pluginarg(plugin)
else:
pluginmanager.register(plugin)
if warning:
from _pytest.warnings import _issue_config_warning
_issue_config_warning(warning, config=config, stacklevel=4)
return pluginmanager.hook.pytest_cmdline_parse(
pluginmanager=pluginmanager, args=args
)
except BaseException:
config._ensure_unconfigure()
raise
class PytestPluginManager(PluginManager):
"""
Overwrites :py:class:`pluggy.PluginManager <pluggy.PluginManager>` to add pytest-specific
functionality:
* loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and
``pytest_plugins`` global variables found in plugins being loaded;
* ``conftest.py`` loading during start-up;
"""
def __init__(self):
super(PytestPluginManager, self).__init__("pytest")
self._conftest_plugins = set()
# state related to local conftest plugins
self._dirpath2confmods = {}
self._conftestpath2mod = {}
self._confcutdir = None
self._noconftest = False
self._duplicatepaths = set()
self.add_hookspecs(_pytest.hookspec)
self.register(self)
if os.environ.get("PYTEST_DEBUG"):
err = sys.stderr
encoding = getattr(err, "encoding", "utf8")
try:
err = py.io.dupfile(err, encoding=encoding)
except Exception:
pass
self.trace.root.setwriter(err.write)
self.enable_tracing()
# Config._consider_importhook will set a real object if required.
self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
# Used to know when we are importing conftests after the pytest_configure stage
self._configured = False
def addhooks(self, module_or_class):
"""
.. deprecated:: 2.8
Use :py:meth:`pluggy.PluginManager.add_hookspecs <PluginManager.add_hookspecs>`
instead.
"""
warning = dict(
code="I2",
fslocation=_pytest._code.getfslineno(sys._getframe(1)),
nodeid=None,
message="use pluginmanager.add_hookspecs instead of "
"deprecated addhooks() method.",
)
self._warn(warning)
return self.add_hookspecs(module_or_class)
def parse_hookimpl_opts(self, plugin, name):
# pytest hooks are always prefixed with pytest_
# so we avoid accessing possibly non-readable attributes
# (see issue #1073)
if not name.startswith("pytest_"):
return
# ignore some historic special names which can not be hooks anyway
if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
return
method = getattr(plugin, name)
opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
# consider only actual functions for hooks (#3775)
if not inspect.isroutine(method):
return
# collect unmarked hooks as long as they have the `pytest_' prefix
if opts is None and name.startswith("pytest_"):
opts = {}
if opts is not None:
for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
opts.setdefault(name, hasattr(method, name))
return opts
def parse_hookspec_opts(self, module_or_class, name):
opts = super(PytestPluginManager, self).parse_hookspec_opts(
module_or_class, name
)
if opts is None:
method = getattr(module_or_class, name)
if name.startswith("pytest_"):
opts = {
"firstresult": hasattr(method, "firstresult"),
"historic": hasattr(method, "historic"),
}
return opts
def register(self, plugin, name=None):
if name in ["pytest_catchlog", "pytest_capturelog"]:
self._warn(
"{} plugin has been merged into the core, "
"please remove it from your requirements.".format(
name.replace("_", "-")
)
)
return
ret = super(PytestPluginManager, self).register(plugin, name)
if ret:
self.hook.pytest_plugin_registered.call_historic(
kwargs=dict(plugin=plugin, manager=self)
)
if isinstance(plugin, types.ModuleType):
self.consider_module(plugin)
return ret
def getplugin(self, name):
# support deprecated naming because plugins (xdist e.g.) use it
return self.get_plugin(name)
def hasplugin(self, name):
"""Return True if the plugin with the given name is registered."""
return bool(self.get_plugin(name))
def pytest_configure(self, config):
# XXX now that the pluginmanager exposes hookimpl(tryfirst...)
# we should remove tryfirst/trylast as markers
config.addinivalue_line(
"markers",
"tryfirst: mark a hook implementation function such that the "
"plugin machinery will try to call it first/as early as possible.",
)
config.addinivalue_line(
"markers",
"trylast: mark a hook implementation function such that the "
"plugin machinery will try to call it last/as late as possible.",
)
self._configured = True
def _warn(self, message):
kwargs = (
message
if isinstance(message, dict)
else {"code": "I1", "message": message, "fslocation": None, "nodeid": None}
)
self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
#
# internal API for local conftest plugin handling
#
def _set_initial_conftests(self, namespace):
""" load initial conftest files given a preparsed "namespace".
As conftest files may add their own command line options
which have arguments ('--my-opt somepath') we might get some
false positives. All builtin and 3rd party plugins will have
been loaded, however, so common options will not confuse our logic
here.
"""
current = py.path.local()
self._confcutdir = (
current.join(namespace.confcutdir, abs=True)
if namespace.confcutdir
else None
)
self._noconftest = namespace.noconftest
self._using_pyargs = namespace.pyargs
testpaths = namespace.file_or_dir
foundanchor = False
for path in testpaths:
path = str(path)
# remove node-id syntax
i = path.find("::")
if i != -1:
path = path[:i]
anchor = current.join(path, abs=1)
if exists(anchor): # we found some file object
self._try_load_conftest(anchor)
foundanchor = True
if not foundanchor:
self._try_load_conftest(current)
def _try_load_conftest(self, anchor):
self._getconftestmodules(anchor)
# let's also consider test* subdirs
if anchor.check(dir=1):
for x in anchor.listdir("test*"):
if x.check(dir=1):
self._getconftestmodules(x)
@lru_cache(maxsize=128)
def _getconftestmodules(self, path):
if self._noconftest:
return []
if path.isfile():
directory = path.dirpath()
else:
directory = path
if six.PY2: # py2 is not using lru_cache.
try:
return self._dirpath2confmods[directory]
except KeyError:
pass
# XXX these days we may rather want to use config.rootdir
# and allow users to opt into looking into the rootdir parent
# directories instead of requiring to specify confcutdir
clist = []
for parent in directory.realpath().parts():
if self._confcutdir and self._confcutdir.relto(parent):
continue
conftestpath = parent.join("conftest.py")
if conftestpath.isfile():
mod = self._importconftest(conftestpath)
clist.append(mod)
self._dirpath2confmods[directory] = clist
return clist
def _rget_with_confmod(self, name, path):
modules = self._getconftestmodules(path)
for mod in reversed(modules):
try:
return mod, getattr(mod, name)
except AttributeError:
continue
raise KeyError(name)
def _importconftest(self, conftestpath):
try:
return self._conftestpath2mod[conftestpath]
except KeyError:
pkgpath = conftestpath.pypkgpath()
if pkgpath is None:
_ensure_removed_sysmodule(conftestpath.purebasename)
try:
mod = conftestpath.pyimport()
if (
hasattr(mod, "pytest_plugins")
and self._configured
and not self._using_pyargs
):
from _pytest.deprecated import (
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST
)
warnings.warn_explicit(
PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST,
category=None,
filename=str(conftestpath),
lineno=0,
)
except Exception:
raise ConftestImportFailure(conftestpath, sys.exc_info())
self._conftest_plugins.add(mod)
self._conftestpath2mod[conftestpath] = mod
dirpath = conftestpath.dirpath()
if dirpath in self._dirpath2confmods:
for path, mods in self._dirpath2confmods.items():
if path and path.relto(dirpath) or path == dirpath:
assert mod not in mods
mods.append(mod)
self.trace("loaded conftestmodule %r" % (mod))
self.consider_conftest(mod)
return mod
#
# API for bootstrapping plugin loading
#
#
def consider_preparse(self, args):
for opt1, opt2 in zip(args, args[1:]):
if opt1 == "-p":
self.consider_pluginarg(opt2)
def consider_pluginarg(self, arg):
if arg.startswith("no:"):
name = arg[3:]
# PR #4304 : remove stepwise if cacheprovider is blocked
if name == "cacheprovider":
self.set_blocked("stepwise")
self.set_blocked("pytest_stepwise")
self.set_blocked(name)
if not name.startswith("pytest_"):
self.set_blocked("pytest_" + name)
else:
self.import_plugin(arg)
def consider_conftest(self, conftestmodule):
self.register(conftestmodule, name=conftestmodule.__file__)
def consider_env(self):
self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
def consider_module(self, mod):
self._import_plugin_specs(getattr(mod, "pytest_plugins", []))
def _import_plugin_specs(self, spec):
plugins = _get_plugin_specs_as_list(spec)
for import_spec in plugins:
self.import_plugin(import_spec)
def import_plugin(self, modname):
# most often modname refers to builtin modules, e.g. "pytester",
# "terminal" or "capture". Those plugins are registered under their
# basename for historic purposes but must be imported with the
# _pytest prefix.
assert isinstance(modname, (six.text_type, str)), (
"module name as text required, got %r" % modname
)
modname = str(modname)
if self.is_blocked(modname) or self.get_plugin(modname) is not None:
return
if modname in builtin_plugins:
importspec = "_pytest." + modname
else:
importspec = modname
self.rewrite_hook.mark_rewrite(importspec)
try:
__import__(importspec)
except ImportError as e:
new_exc_type = ImportError
new_exc_message = 'Error importing plugin "%s": %s' % (
modname,
safe_str(e.args[0]),
)
new_exc = new_exc_type(new_exc_message)
six.reraise(new_exc_type, new_exc, sys.exc_info()[2])
except Skipped as e:
self._warn("skipped plugin %r: %s" % ((modname, e.msg)))
else:
mod = sys.modules[importspec]
self.register(mod, modname)
def _get_plugin_specs_as_list(specs):
"""
Parses a list of "plugin specs" and returns a list of plugin names.
Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in
which case it is returned as a list. Specs can also be `None` in which case an
empty list is returned.
"""
if specs is not None:
if isinstance(specs, str):
specs = specs.split(",") if specs else []
if not isinstance(specs, (list, tuple)):
raise UsageError(
"Plugin specs must be a ','-separated string or a "
"list/tuple of strings for plugin names. Given: %r" % specs
)
return list(specs)
return []
def _ensure_removed_sysmodule(modname):
try:
del sys.modules[modname]
except KeyError:
pass
class Notset(object):
def __repr__(self):
return "<NOTSET>"
notset = Notset()
def _iter_rewritable_modules(package_files):
for fn in package_files:
is_simple_module = "/" not in fn and fn.endswith(".py")
is_package = fn.count("/") == 1 and fn.endswith("__init__.py")
if is_simple_module:
module_name, _ = os.path.splitext(fn)
yield module_name
elif is_package:
package_name = os.path.dirname(fn)
yield package_name
class Config(object):
""" access to configuration values, pluginmanager and plugin hooks. """
def __init__(self, pluginmanager):
#: access to command line option as attributes.
#: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
self.option = argparse.Namespace()
from .argparsing import Parser, FILE_OR_DIR
_a = FILE_OR_DIR
self._parser = Parser(
usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
processopt=self._processopt,
)
#: a pluginmanager instance
self.pluginmanager = pluginmanager
self.trace = self.pluginmanager.trace.root.get("config")
self.hook = self.pluginmanager.hook
self._inicache = {}
self._override_ini = ()
self._opt2dest = {}
self._cleanup = []
self._warn = self.pluginmanager._warn
self.pluginmanager.register(self, "pytestconfig")
self._configured = False
def do_setns(dic):
import pytest
setns(pytest, dic)
self.hook.pytest_namespace.call_historic(do_setns, {})
self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
def add_cleanup(self, func):
""" Add a function to be called when the config object gets out of
use (usually coninciding with pytest_unconfigure)."""
self._cleanup.append(func)
def _do_configure(self):
assert not self._configured
self._configured = True
self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
def _ensure_unconfigure(self):
if self._configured:
self._configured = False
self.hook.pytest_unconfigure(config=self)
self.hook.pytest_configure._call_history = []
while self._cleanup:
fin = self._cleanup.pop()
fin()
def warn(self, code, message, fslocation=None, nodeid=None):
"""
.. deprecated:: 3.8
Use :py:func:`warnings.warn` or :py:func:`warnings.warn_explicit` directly instead.
Generate a warning for this test session.
"""
from _pytest.warning_types import RemovedInPytest4Warning
if isinstance(fslocation, (tuple, list)) and len(fslocation) > 2:
filename, lineno = fslocation[:2]
else:
filename = "unknown file"
lineno = 0
msg = "config.warn has been deprecated, use warnings.warn instead"
if nodeid:
msg = "{}: {}".format(nodeid, msg)
warnings.warn_explicit(
RemovedInPytest4Warning(msg),
category=None,
filename=filename,
lineno=lineno,
)
self.hook.pytest_logwarning.call_historic(
kwargs=dict(
code=code, message=message, fslocation=fslocation, nodeid=nodeid
)
)
def get_terminal_writer(self):
return self.pluginmanager.get_plugin("terminalreporter")._tw
def pytest_cmdline_parse(self, pluginmanager, args):
# REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
self.parse(args)
return self
def notify_exception(self, excinfo, option=None):
if option and option.fulltrace:
style = "long"
else:
style = "native"
excrepr = excinfo.getrepr(
funcargs=True, showlocals=getattr(option, "showlocals", False), style=style
)
res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo)
if not any(res):
for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" % line)
sys.stderr.flush()
def cwd_relative_nodeid(self, nodeid):
# nodeid's are relative to the rootpath, compute relative to cwd
if self.invocation_dir != self.rootdir:
fullpath = self.rootdir.join(nodeid)
nodeid = self.invocation_dir.bestrelpath(fullpath)
return nodeid
@classmethod
def fromdictargs(cls, option_dict, args):
""" constructor useable for subprocesses. """
config = get_config()
config.option.__dict__.update(option_dict)
config.parse(args, addopts=False)
for x in config.option.plugins:
config.pluginmanager.consider_pluginarg(x)
return config
def _processopt(self, opt):
for name in opt._short_opts + opt._long_opts:
self._opt2dest[name] = opt.dest
if hasattr(opt, "default") and opt.dest:
if not hasattr(self.option, opt.dest):
setattr(self.option, opt.dest, opt.default)
@hookimpl(trylast=True)
def pytest_load_initial_conftests(self, early_config):
self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
def _initini(self, args):
ns, unknown_args = self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
r = determine_setup(
ns.inifilename,
ns.file_or_dir + unknown_args,
rootdir_cmd_arg=ns.rootdir or None,
config=self,
)
self.rootdir, self.inifile, self.inicfg = r
self._parser.extra_info["rootdir"] = self.rootdir
self._parser.extra_info["inifile"] = self.inifile
self.invocation_dir = py.path.local()
self._parser.addini("addopts", "extra command line options", "args")
self._parser.addini("minversion", "minimally required pytest version")
self._override_ini = ns.override_ini or ()
def _consider_importhook(self, args):
"""Install the PEP 302 import hook if using assertion rewriting.
Needs to parse the --assert=<mode> option from the commandline
and find all the installed plugins to mark them for rewriting
by the importhook.
"""
ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
mode = ns.assertmode
if mode == "rewrite":
try:
hook = _pytest.assertion.install_importhook(self)
except SystemError:
mode = "plain"
else:
self._mark_plugins_for_rewrite(hook)
_warn_about_missing_assertion(mode)
def _mark_plugins_for_rewrite(self, hook):
"""
Given an importhook, mark for rewrite any top-level
modules or packages in the distribution package for
all pytest plugins.
"""
import pkg_resources
self.pluginmanager.rewrite_hook = hook
if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# We don't autoload from setuptools entry points, no need to continue.
return
# 'RECORD' available for plugins installed normally (pip install)
# 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
# for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
# so it shouldn't be an issue
metadata_files = "RECORD", "SOURCES.txt"
package_files = (
entry.split(",")[0]
for entrypoint in pkg_resources.iter_entry_points("pytest11")
for metadata in metadata_files
for entry in entrypoint.dist._get_metadata(metadata)
)
for name in _iter_rewritable_modules(package_files):
hook.mark_rewrite(name)
def _validate_args(self, args):
"""Validate known args."""
self._parser.parse_known_and_unknown_args(
args, namespace=copy.copy(self.option)
)
return args
def _preparse(self, args, addopts=True):
if addopts:
env_addopts = os.environ.get("PYTEST_ADDOPTS", "")
if len(env_addopts):
args[:] = self._validate_args(shlex.split(env_addopts)) + args
self._initini(args)
if addopts:
args[:] = self._validate_args(self.getini("addopts")) + args
self._checkversion()
self._consider_importhook(args)
self.pluginmanager.consider_preparse(args)
if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"):
# Don't autoload from setuptools entry point. Only explicitly specified
# plugins are going to be loaded.
self.pluginmanager.load_setuptools_entrypoints("pytest11")
self.pluginmanager.consider_env()
self.known_args_namespace = ns = self._parser.parse_known_args(
args, namespace=copy.copy(self.option)
)
if self.known_args_namespace.confcutdir is None and self.inifile:
confcutdir = py.path.local(self.inifile).dirname
self.known_args_namespace.confcutdir = confcutdir
try:
self.hook.pytest_load_initial_conftests(
early_config=self, args=args, parser=self._parser
)
except ConftestImportFailure:
e = sys.exc_info()[1]
if ns.help or ns.version:
# we don't want to prevent --help/--version to work
# so just let is pass and print a warning at the end
self._warn("could not load initial conftests (%s)\n" % e.path)
else:
raise
def _checkversion(self):
import pytest
minver = self.inicfg.get("minversion", None)
if minver:
if parse_version(minver) > parse_version(pytest.__version__):
raise pytest.UsageError(
"%s:%d: requires pytest-%s, actual pytest-%s'"
% (
self.inicfg.config.path,
self.inicfg.lineof("minversion"),
minver,
pytest.__version__,
)
)
def parse(self, args, addopts=True):
# parse given cmdline arguments into this config object.
assert not hasattr(
self, "args"
), "can only parse cmdline args at most once per Config object"
self._origargs = args
self.hook.pytest_addhooks.call_historic(
kwargs=dict(pluginmanager=self.pluginmanager)
)
self._preparse(args, addopts=addopts)
# XXX deprecated hook:
self.hook.pytest_cmdline_preparse(config=self, args=args)
self._parser.after_preparse = True
try:
args = self._parser.parse_setoption(
args, self.option, namespace=self.option
)
if not args:
if self.invocation_dir == self.rootdir:
args = self.getini("testpaths")
if not args:
args = [str(self.invocation_dir)]
self.args = args
except PrintHelp:
pass
def addinivalue_line(self, name, line):
""" add a line to an ini-file option. The option must have been
declared but might not yet be set in which case the line becomes the
the first line in its value. """
x = self.getini(name)
assert isinstance(x, list)
x.append(line) # modifies the cached list inline
def getini(self, name):
""" return configuration value from an :ref:`ini file <inifiles>`. If the
specified name hasn't been registered through a prior
:py:func:`parser.addini <_pytest.config.Parser.addini>`
call (usually from a plugin), a ValueError is raised. """
try:
return self._inicache[name]
except KeyError:
self._inicache[name] = val = self._getini(name)
return val
def _getini(self, name):
try:
description, type, default = self._parser._inidict[name]
except KeyError:
raise ValueError("unknown configuration value: %r" % (name,))
value = self._get_override_ini_value(name)
if value is None:
try:
value = self.inicfg[name]
except KeyError:
if default is not None:
return default
if type is None:
return ""
return []
if type == "pathlist":
dp = py.path.local(self.inicfg.config.path).dirpath()
values = []
for relpath in shlex.split(value):
values.append(dp.join(relpath, abs=True))
return values
elif type == "args":
return shlex.split(value)
elif type == "linelist":
return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
elif type == "bool":
return bool(_strtobool(value.strip()))
else:
assert type is None
return value
def _getconftest_pathlist(self, name, path):
try:
mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
except KeyError:
return None
modpath = py.path.local(mod.__file__).dirpath()
values = []
for relroot in relroots:
if not isinstance(relroot, py.path.local):
relroot = relroot.replace("/", py.path.local.sep)
relroot = modpath.join(relroot, abs=True)
values.append(relroot)
return values
def _get_override_ini_value(self, name):
value = None
# override_ini is a list of "ini=value" options
# always use the last item if multiple values are set for same ini-name,
# e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
for ini_config in self._override_ini:
try:
key, user_ini_value = ini_config.split("=", 1)
except ValueError:
raise UsageError("-o/--override-ini expects option=value style.")
else:
if key == name:
value = user_ini_value
return value
def getoption(self, name, default=notset, skip=False):
""" return command line option value.
:arg name: name of the option. You may also specify
the literal ``--OPT`` option instead of the "dest" option name.
:arg default: default value if no option of that name exists.
:arg skip: if True raise pytest.skip if option does not exists
or has a None value.
"""
name = self._opt2dest.get(name, name)
try:
val = getattr(self.option, name)
if val is None and skip:
raise AttributeError(name)
return val
except AttributeError:
if default is not notset:
return default
if skip:
import pytest
pytest.skip("no %r option found" % (name,))
raise ValueError("no option named %r" % (name,))
def getvalue(self, name, path=None):
""" (deprecated, use getoption()) """
return self.getoption(name)
def getvalueorskip(self, name, path=None):
""" (deprecated, use getoption(skip=True)) """
return self.getoption(name, skip=True)
def _assertion_supported():
try:
assert False
except AssertionError:
return True
else:
return False
def _warn_about_missing_assertion(mode):
if not _assertion_supported():
if mode == "plain":
sys.stderr.write(
"WARNING: ASSERTIONS ARE NOT EXECUTED"
" and FAILING TESTS WILL PASS. Are you"
" using python -O?"
)
else:
sys.stderr.write(
"WARNING: assertions not in test modules or"
" plugins will be ignored"
" because assert statements are not executed "
"by the underlying Python interpreter "
"(are you using python -O?)\n"
)
def setns(obj, dic):
import pytest
for name, value in dic.items():
if isinstance(value, dict):
mod = getattr(obj, name, None)
if mod is None:
modname = "pytest.%s" % name
mod = types.ModuleType(modname)
sys.modules[modname] = mod
mod.__all__ = []
setattr(obj, name, mod)
obj.__all__.append(name)
setns(mod, value)
else:
setattr(obj, name, value)
obj.__all__.append(name)
# if obj != pytest:
# pytest.__all__.append(name)
setattr(pytest, name, value)
def create_terminal_writer(config, *args, **kwargs):
"""Create a TerminalWriter instance configured according to the options
in the config object. Every code which requires a TerminalWriter object
and has access to a config object should use this function.
"""
tw = py.io.TerminalWriter(*args, **kwargs)
if config.option.color == "yes":
tw.hasmarkup = True
if config.option.color == "no":
tw.hasmarkup = False
return tw
def _strtobool(val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
.. note:: copied from distutils.util
"""
val = val.lower()
if val in ("y", "yes", "t", "true", "on", "1"):
return 1
elif val in ("n", "no", "f", "false", "off", "0"):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
|
"""Unit test package for codeforces2html."""
|
# Import Built-Ins
import logging
import json
import time
import ssl
import hashlib
import hmac
from multiprocessing import Queue
from threading import Thread, Event, Timer
from collections import OrderedDict
# Import Third-Party
import websocket
# Import Homebrew
# Init Logging Facilities
log = logging.getLogger(__name__)
class WebSocketConnection(Thread):
"""Websocket Connection Thread
Inspired heavily by ekulyk's PythonPusherClient Connection Class
https://github.com/ekulyk/PythonPusherClient/blob/master/pusherclient/connection.py
It handles all low-level system messages, such a reconnects, pausing of
activity and continuing of activity.
"""
def __init__(self, *args, url=None, timeout=None, sslopt=None,
reconnect_interval=None, log_level=None, **kwargs):
"""Initialize a WebSocketConnection Instance.
:param data_q: Queue(), connection to the Client Class
:param args: args for Thread.__init__()
:param url: websocket address, defaults to v2 websocket.
:param timeout: timeout for connection; defaults to 10s
:param reconnect_interval: interval at which to try reconnecting;
defaults to 10s.
:param log_level: logging level for the connection Logger. Defaults to
logging.INFO.
:param kwargs: kwargs for Thread.__ini__()
"""
# Queue used to pass data up to BTFX client
self.q = Queue()
# Connection Settings
self.socket = None
self.url = url if url else 'wss://api.bitfinex.com/ws/2'
self.sslopt = sslopt if sslopt else {}
# Dict to store all subscribe commands for reconnects
self.channel_configs = OrderedDict()
# Connection Handling Attributes
self.connected = Event()
self.disconnect_called = Event()
self.reconnect_required = Event()
self.reconnect_interval = reconnect_interval if reconnect_interval else 10
self.paused = Event()
# Setup Timer attributes
# Tracks API Connection & Responses
self.ping_timer = None
self.ping_interval = 120
# Tracks Websocket Connection
self.connection_timer = None
self.connection_timeout = timeout if timeout else 10
# Tracks responses from send_ping()
self.pong_timer = None
self.pong_received = False
self.pong_timeout = 30
self.log = logging.getLogger(self.__module__)
if log_level == logging.DEBUG:
websocket.enableTrace(True)
self.log.setLevel(level=log_level if log_level else logging.INFO)
# Call init of Thread and pass remaining args and kwargs
Thread.__init__(self)
self.daemon = True
def disconnect(self):
"""Disconnects from the websocket connection and joins the Thread.
:return:
"""
self.log.debug("disconnect(): Disconnecting from API..")
self.reconnect_required.clear()
self.disconnect_called.set()
if self.socket:
self.socket.close()
self.join(timeout=1)
def reconnect(self):
"""Issues a reconnection by setting the reconnect_required event.
:return:
"""
# Reconnect attempt at self.reconnect_interval
self.log.debug("reconnect(): Initialzion reconnect sequence..")
self.connected.clear()
self.reconnect_required.set()
if self.socket:
self.socket.close()
def _connect(self):
"""Creates a websocket connection.
:return:
"""
self.log.debug("_connect(): Initializing Connection..")
self.socket = websocket.WebSocketApp(
self.url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close
)
if 'ca_certs' not in self.sslopt.keys():
ssl_defaults = ssl.get_default_verify_paths()
self.sslopt['ca_certs'] = ssl_defaults.cafile
self.log.debug("_connect(): Starting Connection..")
self.socket.run_forever(sslopt=self.sslopt)
while self.reconnect_required.is_set():
if not self.disconnect_called.is_set():
self.log.info("Attempting to connect again in %s seconds."
% self.reconnect_interval)
self.state = "unavailable"
time.sleep(self.reconnect_interval)
# We need to set this flag since closing the socket will
# set it to False
self.socket.keep_running = True
self.socket.run_forever(sslopt=self.sslopt)
def run(self):
"""Main method of Thread.
:return:
"""
self.log.debug("run(): Starting up..")
self._connect()
def _on_message(self, ws, message):
"""Handles and passes received data to the appropriate handlers.
:return:
"""
self._stop_timers()
raw, received_at = message, time.time()
self.log.debug("_on_message(): Received new message %s at %s",
raw, received_at)
try:
data = json.loads(raw)
except json.JSONDecodeError:
# Something wrong with this data, log and discard
return
# Handle data
if isinstance(data, dict):
# This is a system message
self._system_handler(data, received_at)
else:
# This is a list of data
if data[1] == 'hb':
self._heartbeat_handler()
else:
self._data_handler(data, received_at)
# We've received data, reset timers
self._start_timers()
def _on_close(self, ws, *args):
self.log.info("Connection closed")
self.connected.clear()
self._stop_timers()
def _on_open(self, ws):
self.log.info("Connection opened")
self.connected.set()
self.send_ping()
self._start_timers()
if self.reconnect_required.is_set():
self.log.info("_on_open(): Connection reconnected, re-subscribing..")
self._resubscribe(soft=False)
def _on_error(self, ws, error):
self.log.info("Connection Error - %s", error)
self.reconnect_required.set()
self.connected.clear()
def _stop_timers(self):
"""Stops ping, pong and connection timers.
:return:
"""
if self.ping_timer:
self.ping_timer.cancel()
if self.connection_timer:
self.connection_timer.cancel()
if self.pong_timer:
self.pong_timer.cancel()
self.log.debug("_stop_timers(): Timers stopped.")
def _start_timers(self):
"""Resets and starts timers for API data and connection.
:return:
"""
self.log.debug("_start_timers(): Resetting timers..")
self._stop_timers()
# Sends a ping at ping_interval to see if API still responding
self.ping_timer = Timer(self.ping_interval, self.send_ping)
self.ping_timer.start()
# Automatically reconnect if we didnt receive data
self.connection_timer = Timer(self.connection_timeout,
self._connection_timed_out)
self.connection_timer.start()
def send_ping(self):
"""Sends a ping message to the API and starts pong timers.
:return:
"""
self.log.debug("send_ping(): Sending ping to API..")
self.socket.send(json.dumps({'event': 'ping'}))
self.pong_timer = Timer(self.pong_timeout, self._check_pong)
self.pong_timer.start()
def _check_pong(self):
"""Checks if a Pong message was received.
:return:
"""
self.pong_timer.cancel()
if self.pong_received:
self.log.debug("_check_pong(): Pong received in time.")
self.pong_received = False
else:
# reconnect
self.log.debug("_check_pong(): Pong not received in time."
"Issuing reconnect..")
self.reconnect()
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
"""Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return:
"""
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = 'AUTH' + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdigest()
payload = {'event': 'auth', 'apiKey': api_key, 'authSig': auth_sig,
'authPayload': auth_string, 'authNonce': nonce}
payload = json.dumps(payload)
elif list_data:
payload = json.dumps(list_data)
else:
payload = json.dumps(kwargs)
self.log.debug("send(): Sending payload to API: %s", payload)
try:
self.socket.send(payload)
except websocket.WebSocketConnectionClosedException:
self.log.error("send(): Did not send out payload %s - client not connected. ", kwargs)
def pass_to_client(self, event, data, *args):
"""Passes data up to the client via a Queue().
:param event:
:param data:
:param args:
:return:
"""
self.q.put((event, data, *args))
def _connection_timed_out(self):
"""Issues a reconnection if the connection timed out.
:return:
"""
self.log.debug("_connection_timed_out(): Fired! Issuing reconnect..")
self.reconnect()
def _pause(self):
"""Pauses the connection.
:return:
"""
self.log.debug("_pause(): Setting paused() Flag!")
self.paused.set()
def _unpause(self):
"""Unpauses the connection.
Send a message up to client that he should re-subscribe to all
channels.
:return:
"""
self.log.debug("_unpause(): Clearing paused() Flag!")
self.paused.clear()
self.log.debug("_unpause(): Re-subscribing softly..")
self._resubscribe(soft=True)
def _heartbeat_handler(self):
"""Handles heartbeat messages.
:return:
"""
# Restart our timers since we received some data
self.log.debug("_heartbeat_handler(): Received a heart beat "
"from connection!")
self._start_timers()
def _pong_handler(self):
"""Handle a pong response.
:return:
"""
# We received a Pong response to our Ping!
self.log.debug("_pong_handler(): Received a Pong message!")
self.pong_received = True
def _system_handler(self, data, ts):
"""Distributes system messages to the appropriate handler.
System messages include everything that arrives as a dict,
or a list containing a heartbeat.
:param data:
:param ts:
:return:
"""
self.log.debug("_system_handler(): Received a system message: %s", data)
# Unpack the data
event = data.pop('event')
if event == 'pong':
self.log.debug("_system_handler(): Distributing %s to _pong_handler..",
data)
self._pong_handler()
elif event == 'info':
self.log.debug("_system_handler(): Distributing %s to _info_handler..",
data)
self._info_handler(data)
elif event == 'error':
self.log.debug("_system_handler(): Distributing %s to _error_handler..",
data)
self._error_handler(data)
elif event in ('subscribed', 'unsubscribed', 'conf', 'auth', 'unauth'):
self.log.debug("_system_handler(): Distributing %s to "
"_response_handler..", data)
self._response_handler(event, data, ts)
else:
self.log.error("Unhandled event: %s, data: %s", event, data)
def _response_handler(self, event, data, ts):
"""Handles responses to (un)subscribe and conf commands.
Passes data up to client.
:param data:
:param ts:
:return:
"""
self.log.debug("_response_handler(): Passing %s to client..", data)
self.pass_to_client(event, data, ts)
def _info_handler(self, data):
"""
Handle INFO messages from the API and issues relevant actions.
:param data:
:param ts:
"""
info_message = {'20051': 'Stop/Restart websocket server '
'(please try to reconnect)',
'20060': 'Refreshing data from the trading engine; '
'please pause any acivity.',
'20061': 'Done refreshing data from the trading engine.'
' Re-subscription advised.'}
codes = {'20051': self.reconnect, '20060': self._pause,
'20061': self._unpause}
if 'version' in data:
self.log.info("API version: %i", data['version'])
return
try:
self.log.info(info_message[data['code']])
codes[data['code']]()
except KeyError as e:
self.log.exception(e)
self.log.error("Unknown Info code %s!", data['code'])
raise
def _error_handler(self, data):
"""
Handle Error messages and log them accordingly.
:param data:
:param ts:
"""
errors = {10000: 'Unknown event',
10001: 'Generic error',
10008: 'Concurrency error',
10020: 'Request parameters error',
10050: 'Configuration setup failed',
10100: 'Failed authentication',
10111: 'Error in authentication request payload',
10112: 'Error in authentication request signature',
10113: 'Error in authentication request encryption',
10114: 'Error in authentication request nonce',
10200: 'Error in un-authentication request',
10300: 'Subscription Failed (generic)',
10301: 'Already Subscribed',
10302: 'Unknown channel',
10400: 'Subscription Failed (generic)',
10401: 'Not subscribed',
11000: 'Not ready, try again later',
20000: 'User is invalid!'
}
try:
self.log.error(errors[data['code']])
except KeyError:
self.log.error("Received unknown error Code in message %s! "
"Reconnecting..", data)
def _data_handler(self, data, ts):
"""Handles data messages by passing them up to the client.
:param data:
:param ts:
:return:
"""
# Pass the data up to the Client
self.log.debug("_data_handler(): Passing %s to client..",
data)
self.pass_to_client('data', data, ts)
def _resubscribe(self, soft=False):
"""Resubscribes to all channels found in self.channel_configs.
:param soft: if True, unsubscribes first.
:return: None
"""
q_list = []
while True:
try:
identifier, q = self.channel_configs.popitem(last=True if soft else False)
except KeyError:
break
q_list.append((identifier, q.copy()))
if identifier == 'auth':
self.send(**q, auth=True)
continue
if soft:
q['event'] = 'unsubscribe'
self.send(**q)
# Resubscribe for soft start.
if soft:
for identifier, q in reversed(q_list):
self.channel_configs[identifier] = q
self.send(**q)
else:
for identifier, q in q_list:
self.channel_configs[identifier] = q
|
# -*- coding: utf-8 -*-
# DISCLAIMER
# This code file is forked and adapted from https://github.com/tezignlab/RippleNet-TF2/blob/master/tools/load_data.py, which is under an MIT license.
""" Utilities for data loading for RippleNet. """
# import libraries
import os
import numpy as np
from collections import defaultdict
from pathlib import Path
from typing import Dict, List, Tuple
# import custom code
from src.util.logger import setup_logging
from src.util.caching import create_cache, load_cache
from src.config import FILENAME_RATINGS_FINAL_TXT, FILENAME_RATINGS_FINAL_NPY, FILENAME_KG_FINAL_TXT, FILENAME_KG_FINAL_NPY, FILENAME_TRAIN_RATINGS, FILENAME_USER_HISTORY_DICT
from src.config import FILENAME_TEST_RATINGS, FILENAME_TEST_RATINGS_RANDOM, FILENAME_TEST_RATINGS_NO_TFIDF, FILENAME_TEST_RATINGS_NO_WORD2VEC, FILENAME_TEST_RATINGS_NO_TRANSFORMER
class LoadData:
def __init__(self, args):
self.args = args
self.logger = setup_logging(name=__file__, log_level='info')
def load_data(self) -> Tuple[np.ndarray, np.ndarray, int, int, Dict[int, List[Tuple[int, int, int]]]]:
"""
Loads and returns the data needed in RippleNet.
Returns:
- :obj:`np.ndarray`:
Training set of ratings.
- :obj:`np.ndarray`:
Test set of ratings.
- :obj:`int`:
Number of entities.
- :obj:`int`:
Number of relations.
- :obj:`Dict[int, List[Tuple[int, int, int]]]`:
Ripple sets of each user.
"""
train_data, test_data, user_history_dict = self.load_rating()
n_entity, n_relation, kg = self.load_kg()
ripple_set = self.get_ripple_set(kg, user_history_dict)
return train_data, test_data, n_entity, n_relation, ripple_set
def get_test_file(self, test_set_type: str) -> Path:
"""
Retrieves the filepath of a test set given its type.
Args:
test_set_type (:obj:`str`):
The type of test set.
Returns:
:obj:`Path`:
The filepath of the test set.
"""
test_set_type2file = {
'complete': FILENAME_TEST_RATINGS,
'random': FILENAME_TEST_RATINGS_RANDOM,
'no_tfidf_ratings': FILENAME_TEST_RATINGS_NO_TFIDF,
'no_word2vec_ratings': FILENAME_TEST_RATINGS_NO_WORD2VEC,
'no_transformer_ratings': FILENAME_TEST_RATINGS_NO_TRANSFORMER
}
return test_set_type2file[test_set_type]
def load_rating(self) -> Tuple[np.ndarray, np.ndarray, Dict[int, List[int]]]:
"""
It loads the training and test data, and the user history, if they exist.
Otherwise, it loads the user ratings, processes them to construct the training and test sets, and user history, and caches them to disk.
Returns:
- :obj:`np.ndarray`:
Training set of ratings.
- :obj:`np.ndarray`:
Test set of ratings.
- :obj:`Dict[int, List[int]]`:
User history dictionary.
"""
self.logger.info('Reading rating file.')
test_file = self.get_test_file(self.args.test_set)
if os.path.exists(FILENAME_TRAIN_RATINGS) and os.path.exists(test_file) and os.path.exists(FILENAME_USER_HISTORY_DICT):
self.logger.info('Loading training and test data.')
train_data = np.load(FILENAME_TRAIN_RATINGS)
test_data = np.load(test_file)
user_history_dict = load_cache(FILENAME_USER_HISTORY_DICT)
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
else:
# Read rating file
if os.path.exists(FILENAME_RATINGS_FINAL_NPY):
rating_np = np.load(FILENAME_RATINGS_FINAL_NPY)
else:
rating_np = np.loadtxt(FILENAME_RATINGS_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_RATINGS_FINAL_NPY, rating_np)
# Split dataset
self.logger.info('Splitting dataset.')
test_ratio = 0.2
n_ratings = rating_np.shape[0]
test_indices = np.random.choice(n_ratings, size=int(n_ratings * test_ratio), replace=False)
train_indices = set(range(n_ratings)) - set(test_indices)
# Traverse training data, only keeping the users with positive ratings
user_history_dict = dict()
for i in train_indices:
user = rating_np[i][0]
item = rating_np[i][1]
rating = rating_np[i][2]
if rating == 1:
if user not in user_history_dict:
user_history_dict[user] = []
user_history_dict[user].append(item)
train_indices = [i for i in train_indices if rating_np[i][0] in user_history_dict]
test_indices = [i for i in test_indices if rating_np[i][0] in user_history_dict]
train_data = rating_np[train_indices]
test_data = rating_np[test_indices]
self.logger.info(f'Size training data: {train_data.shape}.')
self.logger.info(f'Size test data: {test_data.shape}.')
# Cache test and train data
np.save(FILENAME_TRAIN_RATINGS, train_data)
np.save(FILENAME_TEST_RATINGS, test_data)
create_cache(user_history_dict, FILENAME_USER_HISTORY_DICT)
self.logger.info('Finished.\n')
return train_data, test_data, user_history_dict
def load_kg(self) -> Tuple[int, int, Dict[int, List[Tuple[int, int]]]]:
"""
Loads the knowledge graph if already cached as :obj:`np.ndarray`, otherwise it constructs it from the text file.
Returns:
- :obj:`int`:
Number of entities.
- :obj:`int`:
Number of relations.
- :obj:`Dict[int, List[Tuple[int, int]]]`:
The knowledge graph as a dictionary which maps each head entity to a tuple of the form (tail, relation).
"""
self.logger.info('Reading KG file.')
# Reading KG file
if os.path.exists(FILENAME_KG_FINAL_NPY):
kg_np = np.load(FILENAME_KG_FINAL_NPY)
else:
kg_np = np.loadtxt(FILENAME_KG_FINAL_TXT, dtype=np.int32)
np.save(FILENAME_KG_FINAL_NPY, kg_np)
n_entity = len(set(kg_np[:, 0]) | set(kg_np[:, 2]))
n_relation = len(set(kg_np[:, 1]))
self.logger.info('Constructing knowledge graph.')
kg = defaultdict(list)
for head, relation, tail in kg_np:
kg[head].append((tail, relation))
self.logger.info('Finished.\n')
return n_entity, n_relation, kg
def get_ripple_set(self, kg: Dict[int, List[Tuple[int, int]]], user_history_dict: Dict[int, List[int]]) -> Dict[int, List[Tuple[int, int, int]]]:
"""
Creates the ripple set for each user.
Args:
kg (:obj:`Dict[int, List[Tuple[int, int]]]`):
The knowledge graph as a dictionary which maps each head entity to a tuple of the form (tail, relation).
user_history_dict (:obj:`Dict[int, List[int]]`):
User history dictionary.
Returns:
:obj:`Dict[int, List[Tuple[int, int, int]]]`:
Ripple sets of each user.
"""
self.logger.info('Constructing ripple set.')
# user -> [(hop_0_heads, hop_0_relations, hop_0_tails), (hop_1_heads, hop_1_relations, hop_1_tails), ...]
ripple_set = defaultdict(list)
for user in user_history_dict:
for h in range(self.args.n_hop):
memories_h = []
memories_r = []
memories_t = []
if h == 0:
tails_of_last_hop = user_history_dict[user]
else:
tails_of_last_hop = ripple_set[user][-1][2]
for entity in tails_of_last_hop:
for tail_and_relation in kg[entity]:
memories_h.append(entity)
memories_r.append(tail_and_relation[1])
memories_t.append(tail_and_relation[0])
"""
If the current ripple set of the given user is empty, we simply copy the ripple set of the last hop here
This won't happen for h = 0, because only the items that appear in the KG have been selected.
"""
if len(memories_h) == 0:
ripple_set[user].append(ripple_set[user][-1])
else:
# Sample a fixed-size 1-hop memory for each user
replace = len(memories_h) < self.args.n_memory
indices = np.random.choice(len(memories_h), size=self.args.n_memory, replace=replace)
memories_h = [memories_h[i] for i in indices]
memories_r = [memories_r[i] for i in indices]
memories_t = [memories_t[i] for i in indices]
ripple_set[user].append((memories_h, memories_r, memories_t))
self.logger.info('Finished.\n')
return ripple_set
|
import csv
import glob
import pandas as pd
files = glob.glob("data/*.txt")
names = {}
for file in files:
with open(file) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
name = row[0]
sex = row[1]
number = row[2]
if name not in names.keys():
names[name] = {}
names[name]['freq'] = number
names[name]['sex'] = sex
names[name]['count'] = len(name)
else:
names[name]['freq'] += number
df = pd.DataFrame.from_dict(names, orient='index')
with open("dict/names",'w') as f:
for v in df[df['count']==5].index.values:
f.write(v + '\n')
|
from typing import Optional
from functools import partial
import torch
from torch.nn.modules.loss import _Loss
from ._functional import focal_loss_with_logits
from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE
__all__ = ["FocalLoss"]
class FocalLoss(_Loss):
def __init__(
self,
mode: str,
alpha: Optional[float] = None,
gamma: Optional[float] = 2.,
ignore_index: Optional[int] = None,
reduction: Optional[str] = "mean",
normalized: bool = False,
reduced_threshold: Optional[float] = None,
):
"""Compute Focal loss
Args:
mode: Loss mode 'binary', 'multiclass' or 'multilabel'
alpha: Prior probability of having positive value in target.
gamma: Power factor for dampening weight (focal strenght).
ignore_index: If not None, targets may contain values to be ignored.
Target values equal to ignore_index will be ignored from loss computation.
normalized: Compute normalized focal loss (https://arxiv.org/pdf/1909.07829.pdf).
reduced_threshold: Switch to reduced focal loss. Note, when using this mode you should use `reduction="sum"`.
Shape
- **y_pred** - torch.Tensor of shape (N, C, H, W)
- **y_true** - torch.Tensor of shape (N, H, W) or (N, C, H, W)
Reference
https://github.com/BloodAxe/pytorch-toolbelt
"""
assert mode in {BINARY_MODE, MULTILABEL_MODE, MULTICLASS_MODE}
super().__init__()
self.mode = mode
self.ignore_index = ignore_index
self.focal_loss_fn = partial(
focal_loss_with_logits,
alpha=alpha,
gamma=gamma,
reduced_threshold=reduced_threshold,
reduction=reduction,
normalized=normalized,
)
def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:
if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
y_true = y_true.view(-1)
y_pred = y_pred.view(-1)
if self.ignore_index is not None:
# Filter predictions with ignore label from loss computation
not_ignored = y_true != self.ignore_index
y_pred = y_pred[not_ignored]
y_true = y_true[not_ignored]
loss = self.focal_loss_fn(y_pred, y_true)
elif self.mode == MULTICLASS_MODE:
num_classes = y_pred.size(1)
loss = 0
# Filter anchors with -1 label from loss computation
if self.ignore_index is not None:
not_ignored = y_true != self.ignore_index
for cls in range(num_classes):
cls_y_true = (y_true == cls).long()
cls_y_pred = y_pred[:, cls, ...]
if self.ignore_index is not None:
cls_y_true = cls_y_true[not_ignored]
cls_y_pred = cls_y_pred[not_ignored]
loss += self.focal_loss_fn(cls_y_pred, cls_y_true)
return loss
|
from typing import Generic, TypeVar
CQ = TypeVar('CQ', covariant=True)
class CommandQueryBase(Generic[CQ]):
def __init__(self) -> CQ:
pass
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import requests
import sys
import traceback
from bs4 import BeautifulSoup as Soup
parser = argparse.ArgumentParser(
description='Get github repo from go import path.')
parser.add_argument(
'go_dependency_list_file',
nargs='?',
default='dep.txt',
help=
'File path of a golang dependency list file, one line has a dependency name. '
+'(default: %(default)s)',
)
parser.add_argument(
'-o',
'--output',
dest='output_file',
nargs='?',
default='repo.txt',
help=
'Output file with one line per resolved github repo. Format: org/repo. (default: %(default)s)',
)
parser.add_argument(
'--manual-dep-repo-mapping',
dest='manual_dep_repo_mapping_file',
nargs='?',
default='dep_repo.manual.csv',
help=
'Optional dependency to repo mapping maintained manually for dependencies we cannot '
+'automatically resolve. Format: each line has dependency import name and its github repo '
+'separated by comma. Like, "upper.io/db.v3,upper/db". Note: github/upper/db is the repo. '
+'(default: %(default)s)'
)
args = parser.parse_args()
protocol = 'https://'
godoc_base = 'godoc.org/'
github_base = 'github.com/'
gopkg_base = 'gopkg.in/'
def github_link_to_repo(repo):
'''
Removes extra sub folder in github url.
'''
if len(repo.split('/')) > 2:
print('repo {} has subfolder'.format(repo), file=sys.stderr)
repo = '/'.join(repo.split('/')[:2])
assert len(repo.split(
'/')) == 2, 'repo name should be org/repo, but is {}'.format(repo)
return repo
def get_github_repo(url):
'''
Tries to resolve github repo from a github url.
Returns org/repo format github repo string.
'''
if url.startswith(protocol):
url = url[len(protocol):]
if not url.startswith(github_base):
raise Exception('Package url is not github: {}'.format(url))
github_repo = url[len(github_base):]
github_repo = github_link_to_repo(github_repo)
if github_repo[-1] == '/':
github_repo = github_repo[:-1]
return github_repo
def fetch_github_uri_from_godoc(url):
'''
Tries to resolve github repo from godoc website.
Implementation: Godoc is a standard way for a lot of golang libraries to
host its documentation. Godoc page usually has a link on top left with
github repo url. This function crawls godoc page for the library and finds
the github url there. If the link there isn't a github url, it throws an
exception.
'''
full_url = protocol + godoc_base + url
print('fetching godoc {}'.format(full_url), file=sys.stderr)
response = requests.get(full_url)
assert response.ok, 'it failed with {} {}'.format(response.status_code,
response.reason)
soup = Soup(response.text, features="html.parser")
navs = soup.select('#x-projnav')
if len(navs) != 1:
raise Exception(
'#x-projnav should occur exactly once, but {} found for {}'.format(len(navs), url))
nav = navs[0]
package_name = nav.select_one('span').contents[0]
assert package_name == url, 'fetched package name should be the same'
link = nav.select_one('a').attrs.get('href')
return get_github_repo(link)
def fetch_gopkg_uri(url):
'''
Tries to resolve github repo for gopkg libraries.
Implementation: gopkg library page has a button with text 'Source code', its
url is usually the corresponding github repo. Throws an exception if the url
found is not github.
'''
response = requests.get(protocol + url)
assert response.ok, 'fetching {} failed with {} {}'.format(
url, response.status_code, response.reason)
soup = Soup(response.text, features="html.parser")
def is_source_code_link(link):
return link.getText().find('Source Code') >= 0
source_code_links = list(filter(is_source_code_link, soup.select('a')))
assert len(
source_code_links) == 1, 'Expect exactly one source code link found'
link = source_code_links[0].attrs.get('href')
return get_github_repo(link)
def get_github_repo_for_dep(dep):
'''
Tries to resolve github repo by three ways:
1. fetch gopkg website
2. parse from github url
3. fetch godoc website
'''
print('Fetching github uri for {}'.format(dep), file=sys.stderr)
repo = None
if dep.startswith(gopkg_base):
print('Try fetching {} from gopkg'.format(dep), file=sys.stderr)
repo = fetch_gopkg_uri(dep)
elif dep.startswith(github_base):
print('{} is already github'.format(dep), file=sys.stderr)
repo = get_github_repo(dep)
else:
print('Try fetching {} repo from godoc'.format(dep), file=sys.stderr)
repo = fetch_github_uri_from_godoc(dep)
return repo
def main():
with open(args.go_dependency_list_file,
'r') as dep_file, open(args.output_file, 'w') as output_file:
mappings = {}
try:
with open(args.manual_dep_repo_mapping_file, 'r') as dep_repo_mapping_file:
for line in dep_repo_mapping_file:
mapping = line.strip().split(',')
assert len(mapping) == 2
[dep, repo] = mapping
mappings[dep] = repo
except Exception: # pylint: disable=broad-except
print('ignore manual_dep_repo_mapping_file', file=sys.stderr)
deps = [line.strip() for line in dep_file]
repo_seen = set()
dep_succeeded = []
# Dependencies that we couldn't resolve their github repos.
dep_failed = []
for dep in deps:
try:
# Get dep's repo from manually maintained mapping first.
repo = mappings.get(dep)
if repo is not None:
print('repo of {} is already configured to {}'.format(dep, repo), file=sys.stderr)
else:
# Try to resolve if not found
repo = get_github_repo_for_dep(dep)
if repo in repo_seen:
print('repo {} is seen more than once'.format(repo), file=sys.stderr)
else:
repo_seen.add(repo)
print(repo, file=output_file)
dep_succeeded.append(dep)
except Exception as e: # pylint: disable=broad-except
print('[failed]', e, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
dep_failed.append(dep)
print()
print((
'Successfully resolved github repo for {} dependencies and saved to {}. '
+'Failed to resolve {} dependencies.'
).format(len(dep_succeeded), args.output_file, len(dep_failed)),
file=sys.stderr)
if dep_failed:
print('We failed to resolve the following dependencies:', file=sys.stderr)
for dep in dep_failed:
print(dep, file=sys.stderr)
if __name__ == '__main__':
main()
|
from django.contrib.sites.models import Site
from django.db import models
class Article(models.Model):
sites = models.ManyToManyField(Site)
headline = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", null=True, blank=True,)
class Meta:
app_label = 'model_package'
|
'''
@author: Xu Yan
@file: ModelNet.py
@time: 2021/3/19 15:51
'''
import os
import numpy as np
import warnings
import pickle
from tqdm import tqdm
from torch.utils.data import Dataset
warnings.filterwarnings('ignore')
def pc_normalize(pc):
centroid = np.mean(pc, axis=0)
pc = pc - centroid
m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
pc = pc / m
return pc
def farthest_point_sample(point, npoint):
"""
Input:
xyz: pointcloud data, [N, D]
npoint: number of samples
Return:
centroids: sampled pointcloud index, [npoint, D]
"""
N, D = point.shape
xyz = point[:,:3]
centroids = np.zeros((npoint,))
distance = np.ones((N,)) * 1e10
farthest = np.random.randint(0, N)
for i in range(npoint):
centroids[i] = farthest
centroid = xyz[farthest, :]
dist = np.sum((xyz - centroid) ** 2, -1)
mask = dist < distance
distance[mask] = dist[mask]
farthest = np.argmax(distance, -1)
point = point[centroids.astype(np.int32)]
return point
class ModelNetDataLoader(Dataset):
def __init__(self, root, args, split='train', process_data=False):
self.root = root
self.npoints = args.num_point
self.process_data = process_data
self.uniform = args.use_uniform_sample
self.use_normals = args.use_normals
self.num_category = args.num_category
if self.num_category == 10:
self.catfile = os.path.join(self.root, 'modelnet10_shape_names.txt')
else:
self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')
self.cat = [line.rstrip() for line in open(self.catfile)]
self.classes = dict(zip(self.cat, range(len(self.cat))))
shape_ids = {}
if self.num_category == 10:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet10_test.txt'))]
else:
shape_ids['train'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))]
shape_ids['test'] = [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]
assert (split == 'train' or split == 'test')
shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]
self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i
in range(len(shape_ids[split]))]
print('The size of %s data is %d' % (split, len(self.datapath)))
if self.uniform:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, split, self.npoints))
else:
self.save_path = os.path.join(root, 'modelnet%d_%s_%dpts.dat' % (self.num_category, split, self.npoints))
if self.process_data:
if not os.path.exists(self.save_path):
print('Processing data %s (only running in the first time)...' % self.save_path)
self.list_of_points = [None] * len(self.datapath)
self.list_of_labels = [None] * len(self.datapath)
for index in tqdm(range(len(self.datapath)), total=len(self.datapath)):
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
cls = np.array([cls]).astype(np.int32)
point_set = np.genfromtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
self.list_of_points[index] = point_set
self.list_of_labels[index] = cls
with open(self.save_path, 'wb') as f:
pickle.dump([self.list_of_points, self.list_of_labels], f)
else:
print('Load processed data from %s...' % self.save_path)
with open(self.save_path, 'rb') as f:
self.list_of_points, self.list_of_labels = pickle.load(f)
def __len__(self):
return len(self.datapath)
def _get_item(self, index):
if self.process_data:
point_set, label = self.list_of_points[index], self.list_of_labels[index]
else:
fn = self.datapath[index]
cls = self.classes[self.datapath[index][0]]
label = np.array([cls]).astype(np.int32)
point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)
if self.uniform:
point_set = farthest_point_sample(point_set, self.npoints)
else:
point_set = point_set[0:self.npoints, :]
point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])
if not self.use_normals:
point_set = point_set[:, 0:3]
return point_set, label[0]
def __getitem__(self, index):
return self._get_item(index)
if __name__ == '__main__':
import torch
data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train')
DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)
for point, label in DataLoader:
print(point.shape)
print(label.shape)
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import gufunc_sampler as gs
# Mandelbrot set
rows = np.linspace(-1.25, 1.25, 1250)
cols = np.linspace(-2, 0.5, 1250)
mandel_set = gs.mandelbrot(cols + 1.j * rows[:, np.newaxis], 50)
plt.subplot(121)
plt.imshow(mandel_set, interpolation='nearest', cmap='hot')
# Point in polygon
points = np.random.rand(1000, 2)
triangle = np.random.rand(5, 2)
inside = gs.point_in_polygon(triangle[:, 0], triangle[:, 1],
points[:, 0], points[:, 1])
points_in = points[inside]
points_out = points[~inside]
plt.subplot(122, aspect='equal')
triangle = np.vstack((triangle, triangle[0]))
plt.plot(triangle[:, 0], triangle[:, 1], 'k-')
plt.plot(points_in[:, 0], points_in[:, 1], 'bx')
plt.plot(points_out[:, 0], points_out[:, 1], 'rx')
|
import os.path
from django.conf import settings
from django.core.management.base import BaseCommand
from sheerlike.indexer import index
LOCATION = os.environ.get('SHEER_LOCATION', os.getcwd())
ELASTICSEARCH_HOSTS = settings.SHEER_ELASTICSEARCH_SERVER
ELASTICSEARCH_INDEX = settings.SHEER_ELASTICSEARCH_INDEX
class Command(BaseCommand):
help = "Run the classic 'sheer' indexer"
def add_arguments(self, parser):
parser.add_argument('--reindex', '-r', action="store_true",
help="Recreate the index and reindex all content.")
parser.add_argument('--processors', '-p', nargs='*',
help='Content processors to index.')
parser.add_argument(
'--elasticsearch',
'-e',
default=ELASTICSEARCH_HOSTS,
help=("Elasticsearch host:port pairs. Separate hosts with commas. "
"Default is localhost:9200. You can also set the "
"SHEER_ELASTICSEARCH_HOSTS environment variable."))
parser.add_argument(
'--index',
'-i',
default=ELASTICSEARCH_INDEX,
help=("Elasticsearch index name. Default is 'content'. You can "
"also set the SHEER_ELASTICSEARCH_INDEX environment "
"variable."))
def handle(self, *args, **options):
index(args, options)
|
""""A system check for testing integration of various libraries with mozetl.
This sub-module will print out relevant version info. It will also read data
from `main_summary` and print basic statistics to verify that the system is
correctly set-up.
"""
import sys
import click
import logging
from datetime import datetime, timedelta
from pyspark.sql import SparkSession
from mozetl.utils import (
format_as_submission_date,
format_spark_path,
stop_session_safely,
)
logging.basicConfig(level=logging.DEBUG)
@click.command()
@click.option("--local/--no-local", default=False)
@click.option(
"--submission-date-s3",
type=str,
default=format_as_submission_date(datetime.now() - timedelta(2)),
)
@click.option("--input-bucket", type=str, default="telemetry-parquet")
@click.option("--input-prefix", type=str, default="main_summary/v4")
@click.option("--output-bucket", type=str, default="telemetry-test-bucket")
@click.option("--output-prefix", type=str, default="mozetl_system_check")
def main(
local, submission_date_s3, input_bucket, input_prefix, output_bucket, output_prefix
):
# print argument information
for k, v in locals().items():
print("{}: {}".format(k, v))
print("Python version: {}".format(sys.version_info))
spark = SparkSession.builder.getOrCreate()
print("Spark version: {}".format(spark.version))
# run a basic count over a sample of `main_summary` from 2 days ago
if not local:
ds_nodash = submission_date_s3
input_path = format_spark_path(input_bucket, input_prefix)
output_path = format_spark_path(output_bucket, output_prefix)
print(
"Reading data for {ds_nodash} from {input_path} and writing to {output_path}".format(
ds_nodash=ds_nodash, input_path=input_path, output_path=output_path
)
)
path = "{}/submission_date_s3={}/sample_id={}".format(input_path, ds_nodash, 1)
subset = spark.read.parquet(path)
print("Saw {} documents".format(subset.count()))
summary = subset.select(
"memory_mb", "cpu_cores", "subsession_length"
).describe()
summary.show()
summary.write.parquet(
output_path + "/submission_date_s3={}/".format(ds_nodash), mode="overwrite"
)
stop_session_safely(spark)
print("Done!")
|
import bw_projects
def test_version():
assert bw_projects.__version__
|
DEBUG=on
SECRET_KEY=CHANGE_ME!!!!
ALLOWED_HOSTS=,
DATABASE_URL=postgres://{{cookiecutter.db_user}}:{{cookiecutter.db_pwd}}@db/{{cookiecutter.db_name}}
EMAIL_HOST_USER={{cookiecutter.author_email}}
EMAIL_PWD=email_pwd
EMAIL_ADMIN={{cookiecutter.author_email}}
|
import datetime
# from sqlalchemy import Column, Integer, String
import hashlib
from server.extensions import Base, argon2, bcrypt
class User(Base):
__table__ = Base.metadata.tables["users"]
__table_args__ = {"autoload": True}
# Attribute names to help out with functions
# id = Column(Integer, primary_key=True, unique=True)
# ip_address = Column(String(64))
# username = Column(String(64), index=True, unique=True)
# email = Column(String(120), index=True, unique=True)
# password = Column(String(240))
# activation_selector = Column(String(120))#Unique
# activation_code = Column(String(120))
# forgotten_password_selector = Column(String(120))#Unique
# forgotten_password_code = Column(String(120))
# forgotten_password_time = Column(String(120))
# remember_selector = Column(String(120))#Unique
# remember_code = Column(String(120))
# created_on = Column(String(120))
# last_login = Column(String(120))
# active = Column(String(120))
# first_name = Column(String(120))
# last_name = Column(String(120))
# company = Column(String(120))
# phone = Column(String(120))
# country = Column(String(120))
# image = Column(String(120))
# bio = Column(String(240))
# core = Column(String(240))
# external_source = Column(String(120))
# external_id = Column(String(120))
# session_hash = Column(String(120))# session hash is API key
# password_hash = Column(String(120))
def set_password(self, password):
self.password = argon2.generate_password_hash(password)
def check_password(self, passwd):
"""
Check if the passwordhash is in Argon2 or Bcrypt(old) format
Resets the password hash to argon2 format if stored in bcrypt
Returns value for login route
"""
try:
if bcrypt.check_password_hash(self.password, passwd):
bpass = True
except ValueError as error:
print(error)
bpass = False
if argon2.check_password_hash(self.password, passwd):
return True
elif not argon2.check_password_hash(self.password, passwd) and not bpass:
return False
elif not argon2.check_password_hash(self.password, passwd) and bpass:
self.set_password(passwd)
return True
def update_bio(self, new_bio):
self.bio = new_bio
def update_email(self, email):
self.email = email
def update_first_name(self, first_name):
self.first_name = first_name
def update_last_name(self, last_name):
self.last_name = last_name
def update_forgotten_code(self, code):
self.forgotten_password_code = code
def update_activation_code(self, code):
self.activation_code = code
def update_activation(self):
self.active = "1"
print("user activated successfully")
def update_forgotten_time(self, time):
self.forgotten_password_time = time
def set_session_hash(self):
timestamp = datetime.datetime.now()
timestamp1 = timestamp.strftime("%Y-%m-%d %H:%M:%S")
md5_digest = hashlib.md5(timestamp1.encode()).hexdigest()
self.session_hash = md5_digest
def update_image_address(self, path):
self.image = path
def __repr__(self):
return "<User {}>".format(self.username)
class UserGroups(Base):
__table__ = Base.metadata.tables["users_groups"]
__table_args__ = {"autoload": True}
def set_group(self):
self.group_id = 2
print('group updated')
def __repr__(self):
return "<User {}>".format(self.username)
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from gcloud import err_code
from gcloud.taskflow3.models import TaskFlowInstance
from gcloud.tests.mock import * # noqa
from gcloud.tests.mock_settings import * # noqa
class GetNodeDetailTestCase(TestCase):
def test_node_does_not_exist(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.has_node = MagicMock(return_value=False)
detail = taskflow.get_node_detail(node_id="node_id", username="username")
self.assertFalse(detail["result"])
self.assertEqual(detail["code"], err_code.REQUEST_PARAM_INVALID.code)
def test_get_node_data_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_data_return)
def test_get_node_detail_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_detail_return)
def test_include_data_is_false(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": True, "data": {}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = False
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_not_called()
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(detail, {"code": 0, "data": {}, "message": "", "result": True})
def test_success(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {"data": "data"}}
get_node_detail_return = {"result": True, "data": {"detail": "detail"}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = True
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
project_id="project_id",
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
project_id="project_id",
)
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(
detail, {"code": 0, "data": {"data": "data", "detail": "detail"}, "message": "", "result": True}
)
|
import vlc
instance = vlc.Instance()
class VLCPlayer:
def __init__(self):
self.player = instance.media_player_new()
self.player.audio_output_set("Scaletempo")
self.length = 0
def set_position(self,position) :
self.player.set_position(position / self.get_length())
def get_position(self) :
return self.player.get_position()
def get_length(self) :
self.length = self.player.get_length() / 1000
return self.length
def is_playing(self) :
return self.player.get_state() == vlc.State.Playing;
def set_tempo(self,tempo) :
self.player.set_rate(tempo);
def get_tempo(self) :
return self.player.get_rate()
def play(self) :
self.player.play()
def stop(self) :
self.player.stop()
def pause(self) :
self.player.pause()
def load_song(self,filename) :
media = instance.media_new(filename)
self.player.set_media(media)
|
import torch
from torch import nn
from d2l import torch as d2l
BATCH_SIZE, LR, NUM_EPOCHS = 256, 0.1, 10
ACTIVATE_FUNCS = [nn.ReLU(), nn.Sigmoid(), nn.Tanh()]
def init_weights(m):
if type(m) == nn.Linear:
nn.init.normal_(m.weight, std=0.01)
if __name__ == "__main__":
for i in range(0, 3):
net = nn.Sequential(
nn.Flatten(),
nn.Linear(784, 256),
ACTIVATE_FUNCS[i],
nn.Linear(256, 10),
)
net.apply(init_weights)
loss = nn.CrossEntropyLoss()
trainer = torch.optim.SGD(net.parameters(), lr=LR)
train_iter, test_iter = d2l.load_data_fashion_mnist(BATCH_SIZE)
d2l.train_ch3(net, train_iter, test_iter, loss, NUM_EPOCHS, trainer)
print("Train Accuracy", d2l.evaluate_accuracy(net, train_iter))
print("Test Accuracy", d2l.evaluate_accuracy(net, test_iter))
|
import asyncio
import json
import random
import subprocess
import discord
import discord.ext
import requests
from discord.ext import commands
import os
from botlibrary.utils import get_variable
from botlibrary import constants
# Wichs Codierung
# ä=ü
# ö=ö
# assign constant variables
constants.assignVariables()
VERSION = constants.VERSION
bot_prefix = constants.bot_prefix
client = commands.Bot(command_prefix=bot_prefix, intents=discord.Intents.all())
def tokenchecker():
riotapi = constants.lol_token
base_riot_url = "https://euw1.api.riotgames.com/lol/summoner/v4/summoners/by-name/DCGALAXY?api_key="
rioturl = base_riot_url + riotapi
response = requests.get(rioturl)
if response.status_code == 200:
pass
else:
print("Der Riot-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf "
"https://developer.riotgames.com/api-status/ nach ob es nicht vllt an Riot selber liegt")
riotnotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if riotnotworkingexe == "j":
pass
else:
raise Exception("Der Riot-API Key hat nicht funktioniert.")
osuapi = constants.osu_token
base_osu_url = "https://osu.ppy.sh/api/get_user_best?u=Aftersh0ock&k="
osuurl = base_osu_url + osuapi
osuresponse = requests.get(osuurl)
if osuresponse.status_code == 200:
pass
else:
print("Der Osu-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ppy.sh nach ob es nicht vllt an Osu selber liegt")
osunotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if osunotworkingexe == "j":
pass
else:
raise Exception("Der Osu-API Key hat nicht funktioniert.")
token = constants.bot_token
headers = {
"Authorization": "Bot " + token
}
response = requests.get('https://discordapp.com/api/v8/auth/login', headers=headers)
if response.status_code == 200:
pass
else:
raise Exception("Der Discord Bot Token funktioniert nicht!")
ipdata = constants.ipdata_token
baseipurl = "https://api.ipdata.co/8.8.8.8"
ipurl = baseipurl + "?api-key=" + ipdata
ipresponse = requests.get(ipurl)
if ipresponse.status_code == 200:
pass
else:
print("Der IPData-API Key hat nicht funktioniert :((")
print(
"Bitte checke ob der Key in der config.json richtig gesetzt ist und schau auf https://status.ipdata.co nach ob es nicht vllt an Osu selber liegt")
ipdatanotworkingexe = input("Willst du trotzdem starten? (j/n): ")
if ipdatanotworkingexe == "j":
pass
else:
raise Exception("Der IPData Key hat nicht funktioniert.")
tokenchecker()
@client.event
async def on_ready():
print("Yess der bot läuft :)".format(client))
print("Du hast derzeit Release " + str(VERSION) + " installiert")
print("Du bist eingeloggt als {0.user} auf discord.py Version {1}".format(client, discord.__version__))
if os.path.exists("config/mysql.json"):
print("MySQL-Logging ist AKTIVIERT")
else:
print("MySQL-Logging ist DEAKTIVIERT")
print("Der Bot ist zurzeit auf folgenden " + str(len(client.guilds)) + " Servern:")
for guild in client.guilds:
print("- " + str(guild.name))
client.loop.create_task(status_task())
async def status_task():
while True:
await client.change_presence(activity=discord.Game("https://git.io/antonsbot"),
status=discord.Status.online)
await asyncio.sleep(60)
await client.change_presence(
activity=discord.Game(bot_prefix + "corona auf " + str(len(client.guilds)) + " Servern"))
await asyncio.sleep(60)
await client.change_presence(activity=discord.Game("ein heißes Spiel mit der Stiefschwester"))
await asyncio.sleep(5)
await client.change_presence(
activity=discord.Activity(type=discord.ActivityType.watching, name="auf deine Nachrichten"))
await asyncio.sleep(60)
"""
##############################################################################################################################################################
Ole rewrite paradise
##############################################################################################################################################################
"""
def owner_only(func):
async def wrapper(self, *args, **kwargs):
ctx = get_variable('ctx')
info = await client.application_info()
if ctx.author.id == info.owner.id:
return await func(self, *args, **kwargs)
else:
await ctx.channel.send("Error, only the bot owner can use this command!")
return wrapper
@client.command(name="reload")
@owner_only
async def reload_cog(ctx, cogName):
try:
await unload_cog(ctx, cogName)
await load_cog(ctx, cogName)
except Exception as e:
await ctx.channel.send(f"Während dem versuch die Erweiterung {cogName} neu zu laden ist etwas schiefgelaufen!")
@client.command(name="unload")
@owner_only
async def unload_cog(ctx, cogName):
try:
client.unload_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} entladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schong entladen, oder sie wurde nicht gefunden!")
@client.command(name="load")
@owner_only
async def load_cog(ctx, cogName):
try:
client.load_extension(f"cogs.{cogName}")
await ctx.channel.send(f"Erfolgreich erweiterung {cogName} geladen!")
except Exception as e:
await ctx.channel.send(f"Fehler, entweder ist die erweiterung schon geladen, oder sie wurde nicht gefunden.")
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
token = json_stuff["token"]
# load cogs
for filename in os.listdir("./cogs"):
if filename.endswith(".py"):
client.load_extension(f"cogs.{filename[:-3]}")
# run bot
client.run(token)
|
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
print(" [x] Sent %r" % message)
connection.close()
|
from islelib import __version__
def test_example(example_fixture: int) -> None:
assert __version__ is not False
|
from paz.core import Processor
from paz.core import SequentialProcessor
import numpy as np
class ProcessorA(Processor):
def __init__(self):
super(ProcessorA, self).__init__()
def call(self, image, boxes):
boxes = boxes - 1.0
return image, boxes
class ProcessorB(Processor):
def __init__(self):
super(ProcessorB, self).__init__()
def call(self, image, boxes):
boxes = boxes - 2.0
return image, boxes
class ProcessorC(Processor):
def __init__(self, probability=0.5):
super(ProcessorC, self).__init__()
def call(self, image):
return image / 255.0
class TransformA(SequentialProcessor):
def __init__(self):
super(TransformA, self).__init__()
self.add(ProcessorC())
class TransformB(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
self.add(ProcessorB())
self.add(ProcessorB())
class TransformC(SequentialProcessor):
def __init__(self):
super(TransformB, self).__init__()
self.add(ProcessorA())
def test_arg_in_sequential_processor_input():
transformA = TransformA()
values = transformA(255.0)
assert np.isclose(values == 1.0)
def test_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(image=1.0, boxes=2.0)
assert np.allclose([1.0, -3.0], values)
def test_kwargs_invariance_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=2.0, image=1.0)
assert np.allclose([1.0, -3.0], values)
def test_flipped_kwargs_in_sequential_processor_input():
transformB = TransformB()
values = transformB(boxes=1.0, image=2.0)
assert np.allclose([2.0, -4.0], values)
|
# Model architecture analyzer
import os
import logging
import logging.handlers as handlers
import json
import numpy as np
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
tf.keras.backend.set_floatx('float32')
from tensorflow.keras import layers
from tensorflow.keras.experimental import PeepholeLSTMCell
from tensorflow.keras.layers import TimeDistributed
from tensorflow.keras.layers import RepeatVector
from tensorflow.keras import regularizers
from tensorflow.keras import optimizers
from tensorflow.keras import losses, models
from tensorflow.keras import metrics
from tensorflow.keras import callbacks as cb
from tensorflow.keras import backend as kb
from sklearn.metrics import mean_squared_error
from tensorflow.keras.utils import plot_model, model_to_dot
# open local settings
with open('./settings.json') as local_json_file:
local_submodule_settings = json.loads(local_json_file.read())
local_json_file.close()
# log setup
current_script_name = os.path.basename(__file__).split('.')[0]
log_path_filename = ''.join([local_submodule_settings['log_path'], current_script_name, '.log'])
logging.basicConfig(filename=log_path_filename, level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(name)s %(message)s')
logger = logging.getLogger(__name__)
logHandler = handlers.RotatingFileHandler(log_path_filename, maxBytes=10485760, backupCount=5)
logger.addHandler(logHandler)
class model_structure:
def analize(self, local_model_name, local_settings):
try:
# loading model (h5 format)
print('trying to open model file (assuming h5 format)')
local_model = models.load_model(''.join([local_settings['models_path'], local_model_name]))
# saving architecture in JSON format
local_model_json = local_model.to_json()
with open(''.join([local_settings['models_path'], local_model_name,
'_analyzed_.json']), 'w') as json_file:
json_file.write(local_model_json)
json_file.close()
# changing for subclassing to functional model
local_model_json = json.loads(local_model_json)
print(type(local_model_json))
local_batch_size = None
local_time_step_days = local_model_json['config']['build_input_shape'][1]
local_features = local_model_json['config']['build_input_shape'][2]
input_layer = layers.Input(batch_shape=(local_batch_size, local_time_step_days, local_features))
prev_layer = input_layer
for layer in local_model.layers:
prev_layer = layer(prev_layer)
functional_model = models.Model([input_layer], [prev_layer])
# plotting (exporting to png) the model
plot_path = ''.join([local_settings['models_path'], local_model_name, '_model.png'])
# model_to_dot(functional_model, show_shapes=True, show_layer_names=True, rankdir='TB',
# expand_nested=True, dpi=96, subgraph=True)
plot_model(functional_model, to_file=plot_path, show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True, dpi=216)
plot_model(functional_model, to_file=''.join([plot_path, '.pdf']), show_shapes=True, show_layer_names=True,
rankdir='TB', expand_nested=True)
except Exception as e1:
print('Error reading or saving model')
print(e1)
return False
return True
|
# system imports
import os, json
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Class used to hold configurations that are used in both website and admin applications
"""
MONGO_HOST = "localhost"
MONGO_DBNAME = "panoptes"
THREADS_PER_PAGE = 8
mailcfg = json.load(open('mailcfg.json'))
MAIL_SERVER = mailcfg['MAIL_SERVER']
MAIL_PORT = mailcfg['MAIL_PORT']
MAIL_USE_TLS = mailcfg['MAIL_USE_TLS']
MAIL_USERNAME = mailcfg['MAIL_USERNAME']
MAIL_PASSWORD = mailcfg['MAIL_PASSWORD']
MAIL_ADDRESS = mailcfg['MAIL_ADDRESS']
PERMISSIONS = {'admin':3, 'manager':2, 'user':1}
SERVICE_TYPES = ["mongo","redis"]
SERVICES = [('/services/db/'+x,'services',x.title()) for x in SERVICE_TYPES]
SERVICES += [('/services/setari/lista','services','Settings')]
MENU = [('services.home','hardware','Hardware',''),('services.home','services','Software services',SERVICES),('services.home','hardware','App','')]
@staticmethod
def init_app(app):
pass
class DevConfig(Config):
"""
Class used to hold admin-specific configurations
"""
SECRET_KEY = 'hardtoguesstring'
DEBUG = True
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = False
CSRF_SESSION_KEY = "somethingimpossibletoguess"
class AdminConfig(Config):
"""
Class used to hold admin-specific configurations
"""
SECRET_KEY = 'hardtoguesstring'
DEBUG = False
ADMINS = json.load(open('admins.json'))
CSRF_ENABLED = True
CSRF_SESSION_KEY = "somethingimpossibletoguess"
config = {
'dev': DevConfig,
'admin':AdminConfig
}
|
'''
Define functions for the detuning of an atomic system
'''
from LASED.state import *
from LASED.constants import *
def delta(e, g):
"""Detunings between substates.
Parameters:
e (State): State object
g (State): State object
Returns:
float: Difference in angular frequency of states (Grad/s).
"""
return e.w - g.w
def angularFreq(wavelength):
"""Calculates the angular frequency in Grad/s from a given wavelength.
Parameters:
wavelength (float): A wavelength in nm
Returns:
float: The angular frequency in Grad/s
"""
return 2*PI*C/wavelength*1e-9
def dopplerDelta(e, g, w_q, lambda_q, v_z):
""" The detuning between excited and ground states.
Accounts for a fixed motion of the atoms. Used between excited and ground states.
Parameters:
e (State): State object for excited state.
g (State): State object for ground state.
w_q (float): Angular frequency of exciting laser in rad/s.
lambda_q (float): Wavelength of exciting laser in m.
v_z (float): Velocity component of atoms in direction of laser in m/s.
Returns:
float: The detuning between ground and excited states including the doppler detuning due to a given atomic velocity.
"""
return w_q - v_z/lambda_q - e.w + g.w
|
import sys, copy
from itertools import *
from StringIO import StringIO
import benchbase
from benchbase import with_attributes, with_text, onlylib, serialized
############################################################
# Benchmarks
############################################################
class XSLTBenchMark(benchbase.TreeBenchMark):
@onlylib('lxe')
def bench_xslt_extensions_old(self, root):
tree = self.etree.XML("""\
<xsl:stylesheet version="1.0"
xmlns:l="test"
xmlns:testns="testns"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<l:data>TEST</l:data>
<xsl:template match="/">
<l:result>
<xsl:for-each select="*/*">
<xsl:copy-of select="testns:child(.)"/>
</xsl:for-each>
</l:result>
</xsl:template>
</xsl:stylesheet>
""")
def return_child(_, elements):
return elements[0][0]
extensions = {('testns', 'child') : return_child}
transform = self.etree.XSLT(tree, extensions)
for i in range(10):
transform(root)
@onlylib('lxe')
def bench_xslt_document(self, root):
transform = self.etree.XSLT(self.etree.XML("""\
<xsl:stylesheet version="1.0"
xmlns:l="test"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<l:data>TEST</l:data>
<xsl:template match="/">
<l:result>
<xsl:for-each select="*/*">
<l:test><xsl:copy-of select="document('')//l:data/text()"/></l:test>
</xsl:for-each>
</l:result>
</xsl:template>
</xsl:stylesheet>
"""))
transform(root)
if __name__ == '__main__':
benchbase.main(XSLTBenchMark)
|
'''
Created on 08-10-2012
@author: Jacek Przemieniecki
'''
from . import errors
class Database(object):
def get_atom_valency(self, symbol):
return valency[symbol]
def get_q_r(self, symbol):
grp_id = str_to_id[symbol][0]
return q_r_data[grp_id]
def get_parameter(self, symbol1, symbol2):
if symbol1 == symbol2:
return 0.0
grp1 = str_to_id[symbol1][1] - 1 # Adjust for list indexing starting at 0
grp2 = str_to_id[symbol2][1] - 1
param = params[grp1][grp2]
if param is None:
raise errors.ValueNotFound()
else:
return param
def iterate_strings(self):
for key in str_to_id:
yield key
def __init__(self):
pass
valency = {"C" : 4,
"N" : 3,
"O" : 2,
"S" : 2,
"Si" : 4,
"Cl" : 1,
"Br" : 1,
"I" : 1,
"F" : 1}
### Data from http://www.aim.env.uea.ac.uk/aim/info/UNIFACgroups.html
params = [[0.0, 86.02, 61.13, 76.5, 986.5, 697.2, 1318.0, 1333.0, 476.4, 677.0, 232.1, 507.0, 251.5, 391.5, 255.7, 206.6, 920.7, 287.77, 597.0, 663.5, 35.93, 53.76, 24.9, 104.3, 11.44, 661.5, 543.0, 153.6, 184.4, 354.55, 3025.0, 335.8, 479.5, 298.9, 526.5, 689.0, -4.189, 125.8, 485.3, -2.859, 387.1, -450.4, 252.7, 220.3, -5.869, 390.9, 553.3, 187.0, 216.1, 92.99, None, 808.59, 408.3, 718.01, None, 153.72, ], #1
[-35.36, 0.0, 38.81, 74.15, 524.1, 787.6, 270.6, 526.1, 182.6, 448.8, 37.85, 333.5, 214.5, 240.9, 163.9, 61.11, 749.3, 280.5, 336.9, 318.9, -36.87, 58.55, -13.99, -109.7, 100.1, 357.5, None, 76.302, None, 262.9, None, None, 183.8, 31.14, 179.0, -52.87, -66.46, 359.3, -70.45, 449.4, 48.33, None, None, 86.46, None, 200.2, 268.1, -617.0, 62.56, None, None, 200.94, 219.9, -677.25, None, None, ], #2
[-11.12, 3.446, 0.0, 167.0, 636.1, 637.35, 903.8, 1329.0, 25.77, 347.3, 5.994, 287.1, 32.14, 161.7, 122.8, 90.49, 648.2, -4.449, 212.5, 537.4, -18.81, -144.4, -231.9, 3.0, 187.0, 168.0, 194.9, 52.07, -10.43, -64.69, 210.4, 113.3, 261.3, 154.26, 169.9, 383.9, -259.1, 389.3, 245.6, 22.67, 103.5, -432.3, 238.9, 30.04, -88.11, None, 333.3, None, -59.58, -39.16, None, 360.82, 171.49, 272.33, 22.06, 174.35, ], #3
[-69.7, -113.6, -146.8, 0.0, 803.2, 603.25, 5695.0, 884.9, -52.1, 586.6, 5688.0, 197.8, 213.1, 19.02, -49.29, 23.5, 664.2, 52.8, 6096.0, 872.3, -114.1, -111.0, -80.25, -141.3, -211.0, 3629.0, 4448.0, -9.451, 393.6, 48.49, 4975.0, 259.0, 210.0, -152.55, 4284.0, -119.2, -282.5, 101.4, 5629.0, -245.39, 69.26, 683.3, 355.5, 46.38, None, None, 421.9, None, -203.6, 184.9, None, 233.51, -184.68, 9.63, 795.38, -280.9, ], #4
[156.4, 457.0, 89.6, 25.82, 0.0, -137.1, 353.5, -259.7, 84.0, -203.6, 101.1, 267.8, 28.06, 83.02, 42.7, -323.0, -52.39, 170.0, 6.712, 199.0, 75.62, 65.28, -98.12, 143.1, 123.5, 256.5, 157.1, 488.9, 147.5, -120.5, -318.9, 313.5, 202.1, 727.8, -202.1, 74.27, 225.8, 44.78, -143.9, None, 190.3, -817.7, 202.7, -504.2, 72.96, -382.7, -248.3, None, 104.7, 57.65, None, 215.81, 6.39, None, None, 147.97, ], #5
[16.51, -12.52, -50.0, -44.5, 249.1, 0.0, -181.0, -101.7, 23.39, 306.4, -10.72, 179.7, -128.6, 359.3, -20.98, 53.9, 489.7, 580.5, 53.28, -202.0, -38.32, -102.5, -139.4, -44.76, -28.25, 75.14, 457.88, -31.09, 17.5, -61.76, -119.2, 212.1, 106.3, -119.1, -399.3, -5.224, 33.47, -48.25, -172.4, None, 165.7, None, None, None, -52.1, None, None, 37.63, -59.4, -46.01, None, 150.02, 98.2, None, None, None, ], #6
[300.0, 496.1, 362.3, 377.6, -229.1, 289.6, 0.0, 324.5, -195.4, -116.0, 72.87, 233.87, 540.5, 48.89, 168.0, 304.0, 459.0, 459.0, 112.6, -14.09, 325.4, 370.4, 353.7, 497.5, 133.9, 220.6, 399.5, 887.1, None, 188.0, 12.72, None, 777.1, None, -139.0, 160.8, None, None, 319.0, None, -197.5, -363.8, None, -452.2, None, 835.6, 139.6, None, 407.9, None, None, -255.63, -144.77, None, None, 580.28, ], #7
[275.8, 217.5, 25.34, 244.2, -451.6, -265.2, -601.8, 0.0, -356.1, -271.1, -449.4, -32.52, -162.9, -832.97, None, None, -305.5, -305.5, None, 408.9, None, 517.27, None, 1827.0, 6915.0, None, -413.48, 8484.0, None, None, -687.1, None, None, None, None, None, None, None, None, None, -494.2, None, None, -659.0, None, None, None, None, None, 1005.0, None, None, None, None, None, None, ], #8
[26.76, 42.92, 140.1, 365.8, 164.5, 108.7, 472.5, -133.1, 0.0, -37.36, -213.7, -190.4, -103.6, None, -174.2, -169.0, 6201.0, 7.341, 481.7, 669.4, -191.7, -130.3, -354.6, -39.2, -119.8, 137.5, 548.5, 216.1, -46.28, -163.7, 71.46, 53.59, 245.2, -246.6, -44.58, -63.5, -34.57, None, -61.7, None, -18.8, -588.9, None, None, None, None, 37.54, None, None, -162.6, None, None, -288.94, 91.01, None, 179.74, ], #9
[505.7, 56.3, 23.39, 106.0, 529.0, -340.2, 480.8, -155.6, 128.0, 0.0, -110.3, 766.0, 304.1, None, None, None, None, None, -106.4, 497.5, 751.9, 67.52, -483.7, None, None, None, None, None, None, None, None, 117.0, None, 2.21, None, -339.2, 172.4, None, -268.8, None, -275.5, None, None, None, None, None, None, None, None, None, None, None, 79.71, None, None, None, ], #10
[114.8, 132.1, 85.84, -170.0, 245.4, 249.63, 200.8, -36.72, 372.2, 185.1, 0.0, -241.8, -235.7, None, -73.5, -196.7, 475.5, -0.13, 494.6, 660.2, -34.74, 108.9, -209.7, 54.57, 442.4, -81.13, None, 183.0, None, 202.3, -101.7, 148.3, 18.88, 71.48, 52.08, -28.61, -275.2, None, 85.33, None, 560.2, None, None, None, None, None, 151.8, None, None, None, None, None, 36.34, 446.9, None, None, ], #11
[329.3, 110.4, 18.12, 428.0, 139.4, 227.8, 124.63, -234.25, 385.4, -236.5, 1167.0, 0.0, -234.0, None, None, None, None, -233.4, -47.25, -268.1, None, 31.0, -126.2, 179.7, 24.28, None, None, None, 103.9, None, None, None, 298.13, None, None, None, -11.4, None, 308.9, None, -70.24, None, None, None, None, None, None, None, None, None, None, None, -77.96, None, None, None, ], #12
[83.36, 26.51, 52.13, 65.69, 237.7, 238.4, -314.7, -178.5, 191.1, -7.838, 461.3, 457.3, 0.0, -78.36, 251.5, 5422.3, -46.39, 213.2, -18.51, 664.6, 301.1, 137.8, -154.3, 47.67, 134.8, 95.18, 155.11, 140.9, -8.538, 170.1, -20.11, -149.5, -202.3, -156.57, 128.8, None, 240.2, -273.9, 254.8, -172.51, 417.0, 1338.0, None, None, None, None, None, None, None, None, None, None, 567.0, 102.21, None, None, ], #13
[-30.48, 1.163, -44.85, 296.4, -242.8, -481.7, -330.48, -870.8, None, None, None, None, 222.1, 0.0, -107.2, -41.11, -200.7, None, 358.9, None, -82.92, None, None, -99.81, 30.05, None, None, None, -70.14, None, None, None, None, None, 874.19, None, None, None, -164.0, None, None, -664.4, 275.9, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #14
[65.33, -28.7, -22.31, 223.0, -150.0, -370.3, -448.2, None, 394.6, None, 136.0, None, -56.08, 127.4, 0.0, -189.2, 138.54, 431.49, 147.1, None, None, None, None, 71.23, -18.93, None, None, None, None, None, 939.07, None, None, None, None, None, None, 570.9, -255.22, None, -38.77, 448.1, -1327.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #15
[-83.98, -25.38, -223.9, 109.9, 28.6, -406.8, -598.8, None, 225.3, None, 2889.0, None, -194.1, 38.89, 865.9, 0.0, 287.43, None, 1255.1, None, -182.91, -73.85, -352.9, -262.0, -181.9, None, None, None, None, None, None, None, None, None, 243.1, None, None, -196.3, 22.05, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #16
[1139.0, 2000.0, 247.5, 762.8, -17.4, -118.1, -341.6, -253.1, -450.3, None, -294.8, None, 285.36, -15.07, 64.3, -24.46, 0.0, 89.7, -281.6, -396.0, 287.0, -111.0, None, 882.0, 617.5, None, -139.3, None, None, None, 0.1004, None, None, None, None, None, None, None, -334.4, None, -89.42, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #17
[-101.6, -47.63, 31.87, 49.8, -132.3, -378.2, -332.9, -341.6, 29.1, None, 8.87, 554.4, -156.1, None, -207.66, None, 117.4, 0.0, -169.7, -153.7, None, -351.6, -114.7, -205.3, -2.17, None, 2845.0, None, None, None, None, None, -60.78, None, None, None, 160.7, -158.8, None, None, None, None, None, None, None, None, None, None, None, -136.6, None, None, None, 98.82, None, None, ], #18
[24.82, -40.62, -22.97, -138.4, 185.4, 162.6, 242.8, None, -287.5, 224.66, -266.6, 99.37, 38.81, -157.3, -108.5, -446.86, 777.4, 134.3, 0.0, 205.27, 4.933, -152.7, -15.62, -54.86, -4.624, -0.515, None, 230.9, 0.4604, None, 177.5, None, -62.17, -203.0, None, 81.57, -55.77, None, -151.5, None, 120.3, None, None, None, None, None, 16.23, None, None, None, None, None, None, None, None, None, ], #19
[315.3, 1264.0, 62.32, 89.86, -151.0, 339.8, -66.17, -11.0, -297.8, -165.5, -256.3, 193.9, -338.5, None, None, None, 493.8, -313.5, 92.07, 0.0, 13.41, -44.7, 39.63, 183.4, -79.08, None, None, None, None, -208.9, None, 228.4, -95.0, None, -463.6, None, -11.16, None, -228.0, None, -337.0, 169.3, 127.2, None, None, -322.3, None, None, None, None, None, None, 12.55, -60.07, 88.09, None, ], #20
[91.46, 40.25, 4.68, 122.9, 562.2, 529.0, 698.2, None, 286.3, -47.51, 35.38, None, 225.4, 131.2, None, 151.38, 429.7, None, 54.32, 519.1, 0.0, 108.3, 249.2, 62.42, 153.0, 32.73, 86.2, 450.1, 59.02, 65.56, None, 2.22, 344.4, None, None, None, -168.2, None, 6.57, None, 63.67, None, None, None, None, None, None, None, None, None, None, None, -127.9, None, None, None, ], #21
[34.01, -23.5, 121.3, 140.8, 527.6, 669.9, 708.7, 1633.5, 82.86, 190.6, -132.9, 80.99, -197.7, None, None, -141.4, 140.8, 587.3, 258.6, 543.3, -84.53, 0.0, 0.0, 56.33, 223.1, 108.9, None, None, None, 149.56, None, 177.6, 315.9, None, 215.0, None, -91.8, None, -160.28, None, -96.87, None, None, None, None, None, 361.1, None, None, None, None, None, None, None, None, None, ], #22
[36.7, 51.06, 288.5, 69.9, 742.1, 649.1, 826.76, None, 552.1, 242.8, 176.5, 235.6, -20.93, None, None, -293.7, None, 18.98, 74.04, 504.2, -157.1, 0.0, 0.0, -30.1, 192.1, None, None, 116.6, None, -64.38, None, 86.4, 168.8, None, 363.7, None, 111.2, None, None, None, 255.8, None, None, -35.68, None, None, None, 565.9, None, None, None, None, 165.67, None, None, None, ], #23
[-78.45, 160.9, -4.7, 134.7, 856.3, 709.6, 1201.0, 10000.0, 372.0, None, 129.5, 351.9, 113.9, 261.1, 91.13, 316.9, 898.2, 368.5, 492.0, 631.0, 11.8, 17.97, 51.9, 0.0, -75.97, 490.9, 534.7, 132.2, None, 546.7, None, 247.8, 146.6, None, 337.7, 369.5, 187.1, 215.2, 498.6, None, 256.5, None, 233.1, None, None, None, 423.1, 63.95, None, 108.5, None, 585.19, 291.87, 532.73, None, 127.16, ], #24
[106.8, 70.32, -97.27, 402.5, 325.7, 612.8, -274.5, 622.3, 518.4, None, -171.1, 383.3, -25.15, 108.5, 102.2, 2951.0, 334.9, 20.18, 363.5, 993.4, -129.7, -8.309, -0.2266, -248.4, 0.0, 132.7, 2213.0, None, None, None, None, None, 593.4, None, 1337.37, None, None, None, 5143.14, 309.58, -145.1, None, None, -209.7, None, None, 434.1, None, None, None, None, None, None, None, None, 8.48, ], #25
[-32.69, -1.996, 10.38, -97.05, 261.6, 252.6, 417.9, None, -142.6, None, 129.3, None, -94.49, None, None, None, None, None, 0.2827, None, 113.0, -9.639, None, -34.68, 132.9, 0.0, 533.2, 320.2, None, None, 139.8, 304.3, 10.17, -27.7, None, None, 10.76, None, -223.1, None, 248.4, None, None, None, -218.9, None, None, None, None, -4.565, None, None, None, None, None, None, ], #26
[5541.0, None, 1824.0, -127.8, 561.6, 511.29, 360.7, 815.12, -101.5, None, None, None, 220.66, None, None, None, 134.9, 2475.0, None, None, 1971.0, None, None, 514.6, -123.1, -85.12, 0.0, None, None, None, None, 2990.0, -124.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 1742.53, ], #27
[-52.65, 16.62, 21.5, 40.68, 609.8, 914.2, 1081.0, 1421.0, 303.7, None, 243.8, None, 112.4, None, None, None, None, None, 335.7, None, -73.09, None, -26.06, -60.71, None, 277.8, None, 0.0, None, None, None, 292.7, None, None, None, None, -47.37, None, None, None, 469.8, None, None, None, None, None, None, None, None, None, None, None, None, 684.78, None, None, ], #28
[-7.481, None, 28.41, 19.56, 461.6, 448.6, None, None, 160.6, None, None, 201.5, 63.71, 106.7, None, None, None, None, 161.0, None, -27.94, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, 31.66, None, None, None, 78.92, None, None, None, None, 1004.0, None, None, None, -18.27, None, None, None, None, None, None, None, None, ], #29
[-25.31, 82.64, 157.3, 128.8, 521.6, 287.0, 23.48, None, 317.5, None, -146.3, None, -87.31, None, None, None, None, None, None, 570.6, -39.46, -116.21, 48.48, -133.16, None, None, None, None, None, 0.0, None, None, None, None, None, None, 262.9, None, None, None, 43.37, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #30
[140.0, None, 221.4, 150.6, 267.6, 240.8, -137.4, 838.4, 135.4, None, 152.0, None, 9.207, None, -213.74, None, 192.3, None, 169.6, None, None, None, None, None, None, 481.3, None, None, None, None, 0.0, None, None, None, -417.2, None, None, None, 302.2, None, 347.8, None, None, -262.0, None, None, -353.5, None, None, None, None, None, None, None, None, None, ], #31
[128.0, None, 58.68, 26.41, 501.3, 431.3, None, None, 138.0, 245.9, 21.92, None, 476.6, None, None, None, None, None, None, 616.6, 179.25, -40.82, 21.76, 48.49, None, 64.28, 2448.0, -27.45, None, None, None, 0.0, 6.37, None, None, None, None, None, None, None, 68.55, None, None, None, None, None, None, None, None, None, None, None, None, 190.81, None, None, ], #32
[-31.52, 174.6, -154.2, 1112.0, 524.9, 494.7, 79.18, None, -142.6, None, 24.37, -92.26, 736.4, None, None, None, None, -42.71, 136.9, 5256.0, -262.3, -174.5, -46.8, 77.55, -185.3, 125.3, 4288.0, None, None, None, None, 37.1, 0.0, None, 32.9, None, -48.33, None, 336.25, None, -195.1, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #33
[-72.88, 41.38, -101.12, 614.52, 68.95, 967.71, None, None, 443.6, -55.87, -111.45, None, 173.77, None, None, None, None, None, 329.1, None, None, None, None, None, None, 174.4, None, None, None, None, None, None, None, 0.0, None, None, 2073.0, None, -119.8, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #34
[50.49, 64.07, -2.504, -143.2, -25.87, 695.0, -240.0, None, 110.4, None, 41.57, None, -93.51, -366.51, None, -257.2, None, None, None, -180.2, None, -215.0, -343.6, -58.43, -334.12, None, None, None, 85.7, None, 535.8, None, -111.2, None, 0.0, None, None, None, -97.71, None, 153.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #35
[-165.9, 573.0, -123.6, 397.4, 389.3, 218.8, 386.6, None, 114.55, 354.0, 175.5, None, None, None, None, None, None, None, -42.31, None, None, None, None, -85.15, None, None, None, None, None, None, None, None, None, None, None, 0.0, -208.8, None, -8.804, None, 423.4, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #36
[47.41, 124.2, 395.8, 419.1, 738.9, 528.0, None, None, -40.9, 183.8, 611.3, 134.5, -217.9, None, None, None, None, 281.6, 335.2, 898.2, 383.2, 301.9, -149.8, -134.2, None, 379.4, None, 167.9, None, 82.64, None, None, 322.42, 631.5, None, 837.2, 0.0, None, 255.0, None, 730.8, None, None, None, None, None, None, 2429.0, None, None, None, None, -127.06, None, None, None, ], #37
[-5.132, -131.7, -237.2, -157.3, 649.7, 645.9, None, None, None, None, None, None, 167.1, None, -198.8, 116.5, None, 159.8, None, None, None, None, None, -124.6, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -110.65, -117.2, None, None, None, 26.35, None, None, None, None, None, None, None, None, None, None, None, 117.59, ], #38
[-31.95, 249.0, -133.9, -240.2, 64.16, 172.2, -287.1, None, 97.04, 13.89, -82.12, -116.7, -158.2, 49.7, 10.03, -185.2, 343.7, None, 150.6, -97.77, -55.21, 397.24, None, -186.7, -374.16, 223.6, None, None, -71.0, None, -191.7, None, -176.26, 6.699, 136.6, 5.15, -137.7, 50.06, 0.0, -5.579, 72.31, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 39.84, ], #39
[147.3, 62.4, 140.6, 839.83, None, None, None, None, None, None, None, None, 278.15, None, None, None, None, None, None, None, None, None, None, None, 33.95, None, None, None, None, None, None, None, None, None, None, None, None, 185.6, 55.8, 0.0, None, None, None, None, 111.8, None, None, None, None, None, None, None, None, None, None, None, ], #40
[529.0, 1397.0, 317.6, 615.8, 88.63, 171.0, 284.4, -167.3, 123.4, 577.5, -234.9, 65.37, -247.8, None, 284.5, None, -22.1, None, -61.6, 1179.0, 182.2, 305.4, -193.0, 335.7, 1107.0, -124.7, None, 885.5, None, -64.28, -264.3, 288.1, 627.7, None, -29.34, -53.91, -198.0, None, -28.65, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, -100.53, None, None, ], #41
[-34.36, None, 787.9, 191.6, 1913.0, None, 180.2, None, 992.4, None, None, None, 448.5, 961.8, 1464.0, None, None, None, None, 2450.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, -2166.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #42
[110.2, None, 234.4, 221.8, 84.85, None, None, None, None, None, None, None, None, -125.2, 1604.0, None, None, None, None, 2496.0, None, None, None, 70.81, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 745.3, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #43
[13.89, -16.11, -23.88, 6.214, 796.9, None, 832.2, -234.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -196.2, None, 161.5, None, None, None, -274.1, None, 262.0, None, None, None, None, None, -66.31, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, None, ], #44
[30.74, None, 167.9, None, 794.4, 762.7, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 844.0, None, None, None, None, None, None, None, None, None, None, None, None, None, -32.17, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, None, ], #45
[27.97, 9.755, None, None, 394.8, None, -509.3, None, None, None, None, None, None, None, None, None, None, None, None, -70.25, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, None, ], #46
[-11.92, 132.4, -86.88, -19.45, 517.5, None, -205.7, None, 156.4, None, -3.444, None, None, None, None, None, None, None, 119.2, None, None, -194.7, None, 3.163, 7.082, None, None, None, None, None, 515.8, None, None, None, None, None, None, None, None, None, 101.2, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, None, ], #47
[39.93, 543.6, None, None, None, 420.0, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -363.1, -11.3, None, None, None, None, 6.971, None, None, None, None, None, None, None, 148.9, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, None, ], #48
[-23.61, 161.1, 142.9, 274.1, -61.2, -89.24, -384.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 0.0, None, None, None, None, None, None, None, ], #49
[-8.479, None, 23.93, 2.845, 682.5, 597.8, None, 810.5, 278.8, None, None, None, None, None, None, None, None, 221.4, None, None, None, None, None, -79.34, None, 176.3, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #50
[None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #51
[245.21, 384.45, 47.05, 347.13, 72.19, 265.75, 627.39, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 75.04, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #52
[21.49, -2.8, 344.42, 510.32, 244.67, 163.76, 833.21, None, 569.18, -1.25, -38.4, 69.7, -375.6, None, None, None, None, None, None, 600.78, 291.1, None, -286.26, -52.93, None, None, None, None, None, None, None, None, None, None, None, None, 177.12, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #53
[272.82, 569.71, 165.18, 369.89, None, None, None, None, -62.02, None, -229.01, None, -196.59, None, None, None, None, 100.25, None, 472.04, None, None, None, 196.73, None, None, None, 434.32, None, None, None, 313.14, None, None, None, None, None, None, None, None, -244.59, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #54
[None, None, 920.49, 305.77, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 171.94, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], #55
[-20.31, None, -106.7, 568.47, 284.28, None, 401.2, None, 106.21, None, None, None, None, None, None, None, None, None, None, None, None, None, None, -108.37, 5.76, None, -272.01, None, None, None, None, None, None, None, None, None, None, 107.84, -33.93, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ]] #56
# {symbol : (group_unique_id, main_group_id), ...}
# main_group_id stands for ids as listed at:
# http://www.aim.env.uea.ac.uk/aim/info/UNIFACgroups.html
# in "Group" column
str_to_id = { 'AC': (11, 3),
'ACBr': (119, 56),
'ACC#N': (118, 55),
'ACCH': (14, 4),
'ACCH2': (13, 4),
'ACCH3': (12, 4),
'ACCl': (54, 25),
'ACF': (71, 38),
'ACH': (10, 3),
'ACN(=O)=O': (58, 27),
'ACNH2': (37, 17),
'ACOH': (18, 8),
'Br': (65, 33),
'C#C': (67, 34),
'C(=O)N(CH2)CH2': (99, 46),
'C(=O)N(CH3)CH2': (98, 46),
'C(=O)N(CH3)CH3': (97, 46),
'C(=O)NH2': (94, 46),
'C(=O)NHCH2': (96, 46),
'C(=O)NHCH3': (95, 46),
'C(=O)OH': (43, 20),
# 'C2H4O2': (101, 47),
# 'C2H5O2': (100, 47),
# 'C4H2S': (108, 50),
# 'C4H3S': (107, 50),
# 'C4H4S': (106, 50),
# 'C5H3N': (40, 18),
# 'C5H4N': (39, 18),
# 'C5H5N': (38, 18),
'C=CCl': (70, 37),
'CCl': (47, 21),
'CCl2': (50, 22),
'CCl2F': (87, 45),
'CCl2F2': (93, 45),
'CCl3': (52, 23),
'CCl3F': (86, 45),
'CCl4': (53, 24),
'CClF2': (90, 45),
'CClF3': (92, 45),
'CF': (76, 40),
'CF2': (75, 40),
'CF3': (74, 40),
'CH': (3, 1),
'CH#C': (66, 34),
'CH(=O)O': (24, 12),
'CH(=O)OH': (44, 20),
'CH0': (4, 1),
'CH0=CH0': (9, 2),
'CH0OCH0': (116, 53),
'CH2': (2, 1),
'CH2=CH': (5, 2),
'CH2=CH0': (7, 2),
'CH2=CHC#N': (69, 36),
'CH2OH0': (26, 13),
'CH2C#N': (42, 19),
'CH2C(=O)O': (23, 11),
'CH2C=O': (20, 9),
'CH2Cl': (45, 21),
'CH2Cl2': (48, 22),
'CH2N(=O)=O': (56, 26),
'CH2NH': (33, 15),
'CH2NH0': (36, 16),
'CH2NH2': (30, 14),
# 'CH2OCH': (112, 53), # these are oxides, not ethers
# 'CH2OCH0': (113, 53),
# 'CH2OCH2': (111, 53),
'CH2S': (103, 48),
'CH2SH': (61, 29),
# 'CH2SuCH': (110, 52),
# 'CH2SuCH2': (109, 52),
'CH3': (1, 1),
'CH3OH0': (25, 13),
'CH3C#N': (41, 19),
'CH3C(=O)O': (22, 11),
'CH3C=O': (19, 9),
'CH3N(=O)=O': (55, 26),
'CH3NH': (32, 15),
'CH3NH0': (35, 16),
'CH3NH2': (29, 14),
'CH3OH': (16, 6),
'CH3S': (102, 48),
'CH3SH': (60, 29),
'CH=CH': (6, 2),
'CH=CH0': (8, 2),
'CHOH0': (27, 13),
'CHCl': (46, 21),
'CHCl2': (49, 22),
'CHCl2F': (88, 45),
'CHCl3': (51, 23),
'CHClF': (89, 45),
'CHClF2': (91, 45),
'CHN(=O)=O': (57, 26),
'CHNH': (34, 15),
'CHNH2': (31, 14),
# 'CHOCH': (114, 53), #these are oxides, not ethers
# 'CHOCH0': (115, 53),
'CHS': (104, 48),
# 'COO': (77, 41),
# 'DMF': (72, 39),
# 'DMSO': (68, 35),
# 'DOH': (63, 31),
# 'HCON(CH2)2': (73, 39),
'I': (64, 32),
# 'MORPH': (105, 49),
# 'NMP': (85, 44),
'O=COC=O': (117, 54),
'OH': (15, 5),
'OH2': (17, 7),
'SCS': (59, 28),
'Si': (81, 42),
'SiH': (80, 42),
'SiH2': (79, 42),
'SiH2O': (82, 43),
'SiH3': (78, 42),
'SiHO': (83, 43),
'SiO': (84, 43),
# 'THF': (28, 13),
# 'furfural': (62, 30)
}
q_r_data = {
1: (0.848, 0.9011),
2: (0.54, 0.6744),
3: (0.228, 0.4469),
4: (0.0, 0.2195),
5: (1.176, 1.3454),
6: (0.867, 1.1167),
7: (0.988, 1.1173),
8: (0.676, 0.8886),
9: (0.485, 0.6605),
10: (0.4, 0.5313),
11: (0.12, 0.3652),
12: (0.968, 1.2663),
13: (0.66, 1.0396),
14: (0.348, 0.8121),
15: (1.2, 1.0),
16: (1.432, 1.4311),
17: (1.4, 0.92),
18: (0.68, 0.8952),
19: (1.448, 1.6724),
20: (1.18, 1.4457),
21: (0.948, 0.998),
22: (1.728, 1.9031),
23: (1.42, 1.6764),
24: (1.188, 1.242),
25: (1.088, 1.145),
26: (0.78, 0.9183),
27: (0.468, 0.6908),
28: (1.1, 0.9183),
29: (1.544, 1.5959),
30: (1.236, 1.3692),
31: (0.924, 1.1417),
32: (1.244, 1.4337),
33: (0.936, 1.207),
34: (0.624, 0.9795),
35: (0.94, 1.1865),
36: (0.632, 0.9597),
37: (0.816, 1.06),
38: (2.113, 2.9993),
39: (1.833, 2.8332),
40: (1.553, 2.667),
41: (1.724, 1.8701),
42: (1.416, 1.6434),
43: (1.224, 1.3013),
44: (1.532, 1.528),
45: (1.264, 1.4654),
46: (0.952, 1.238),
47: (0.724, 1.0106),
48: (1.998, 2.2564),
49: (1.684, 2.0606),
50: (1.448, 1.8016),
51: (2.41, 2.87),
52: (2.184, 2.6401),
53: (2.91, 3.39),
54: (0.844, 1.1562),
55: (1.868, 2.0086),
56: (1.56, 1.7818),
57: (1.248, 1.5544),
58: (1.104, 1.4199),
59: (1.65, 2.057),
60: (1.676, 1.877),
61: (1.368, 1.651),
62: (2.484, 3.168),
63: (2.248, 2.4088),
64: (0.992, 1.264),
65: (0.832, 0.9492),
66: (1.088, 1.292),
67: (0.784, 1.0613),
68: (2.472, 2.8266),
69: (2.052, 2.3144),
70: (0.724, 0.791),
71: (0.524, 0.6948),
72: (2.736, 3.0856),
73: (2.12, 2.6322),
74: (1.38, 1.406),
75: (0.92, 1.0105),
76: (0.46, 0.615),
77: (1.2, 1.38),
78: (1.263, 1.6035),
79: (1.006, 1.4443),
80: (0.749, 1.2853),
81: (0.41, 1.047),
82: (1.062, 1.4838),
83: (0.764, 1.303),
84: (0.466, 1.1044),
85: (3.2, 3.981),
86: (2.644, 3.0356),
87: (1.916, 2.2287),
88: (2.116, 2.406),
89: (1.416, 1.6493),
90: (1.648, 1.8174),
91: (1.828, 1.967),
92: (2.1, 2.1721),
93: (2.376, 2.6243),
94: (1.248, 1.4515),
95: (1.796, 2.1905),
96: (1.488, 1.9637),
97: (2.428, 2.8589),
98: (2.12, 2.6322),
99: (1.812, 2.4054),
100: (1.904, 2.1226),
101: (1.592, 1.8952),
102: (1.368, 1.613),
103: (1.06, 1.3863),
104: (0.748, 1.1589),
105: (2.796, 3.474),
106: (2.14, 2.8569),
107: (1.86, 2.6908),
108: (1.58, 2.5247),
109: (2.12, 2.6869),
110: (1.808, 2.4595),
111: (1.32, 1.5926),
112: (1.008, 1.3652),
113: (0.78, 1.1378),
114: (0.696, 1.1378),
115: (0.468, 0.9103),
116: (0.24, 0.6829),
117: (1.52, 1.7732),
118: (0.996, 1.3342),
119: (0.972, 1.3629)}
|
from symbol.builder import FasterRcnn as Detector
from symbol.builder import add_anchor_to_arg
from models.efficientnet.builder import EfficientNetB5FPN as Backbone
from models.FPN.builder import FPNNeck as Neck
from models.FPN.builder import FPNRpnHead as RpnHead
from models.FPN.builder import FPNRoiAlign as RoiExtractor
from models.FPN.builder import FPNBbox2fcHead as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 8 if is_train else 1
fp16 = True
loader_worker = 8
class KvstoreParam:
kvstore = "nccl"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
normalizer = normalizer_factory(type="localbn", ndev=len(KvstoreParam.gpus))
# normalizer = normalizer_factory(type="gn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
nnvm_proposal = True
nnvm_rpn_target = False
class anchor_generate:
scale = (4,)
ratio = (0.5, 1.0, 2.0)
stride = (4, 8, 16, 32, 64)
image_anchor = 256
max_side = 700
class anchor_assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
image_anchor = 256
pos_fraction = 0.5
class head:
conv_channel = 256
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 2000 if is_train else 1000
post_nms_top_n = 2000 if is_train else 1000
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 81
class_agnostic = False
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = False
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
class DatasetParam:
if is_train:
image_set = ("coco_train2014", "coco_valminusminival2014")
total_image = 82783 + 35504
else:
image_set = ("coco_minival2014", )
total_image = 5000
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head)
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, bbox_head)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = True
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = None
epoch = 0
fixed_param = []
def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio)
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 1e-4
clip_gradient = None
class schedule:
mult = 6
begin_epoch = 0
end_epoch = 6 * mult
if mult <= 2:
lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
else:
# follow the setting in Rethinking ImageNet Pre-training
# reduce the lr in the last 60k and 20k iterations
lr_iter = [(DatasetParam.total_image * 2 // 16 * end_epoch - 60000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image),
(DatasetParam.total_image * 2 // 16 * end_epoch - 20000) * 16 //
(len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0
iter = 500
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: x
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
# data processing
class ResizeParam:
short = 400
long = 600
class PadParam:
short = 400
long = 600
max_num_gt = 100
class AnchorTarget2DParam:
def __init__(self):
self.generate = self._generate()
class _generate:
def __init__(self):
self.stride = (4, 8, 16, 32, 64)
self.short = (100, 50, 25, 13, 7)
self.long = (150, 75, 38, 19, 10)
scales = (4)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.FPN.input import PyramidAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data"]
label_name = ["gt_bbox", "im_info"]
if not RpnParam.nnvm_rpn_target:
transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam()))
label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
|
#!/usr/bin/env python3
from flask import Flask, render_template, request
from werkzeug import secure_filename
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
f = request.files.get('file')
fname = secure_filename(f.filename)
f.save(fname)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False)
|
import copy
import json
from django_admin_json_editor import JSONEditorWidget
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class JSONEditorWidget(JSONEditorWidget):
template_name = 'lanthanum/_json_editor_widget.html'
def render(self, name, value, attrs=None, renderer=None):
"""
Fix the JSON Editor widget by doing a standard json dump for dict data
This will not convert booleans to ints like the standard JSON Editor.
"""
if callable(self._schema):
schema = self._schema(self)
else:
schema = copy.copy(self._schema)
schema['title'] = ' '
schema['options'] = {'collapsed': int(self._collapsed)}
context = {
'name': name,
'schema': json.dumps(schema),
'data': value,
'sceditor': int(self._sceditor),
}
return mark_safe(render_to_string(self.template_name, context))
|
# -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
from lettuce import step, world
from lettuce_tools.dataset_utils.dataset_utils import DatasetUtils
from tools import http
from tools import environment_request
from tools.tier import Tier
from tools.constants import NAME, DESCRIPTION, PRODUCTS, NETWORKS, PAAS,\
TIER_IMAGE
dataset_utils = DatasetUtils()
@step(u'the paas manager is up and properly configured')
def the_paas_manager_is_up_and_properly_configured(step):
pass # Nothing to do here, the set up should be done by external means
@step(u'a list of tiers has been defined with data:')
def a_list_of_tiers_has_been_defined_with_data(step):
world.tiers = []
for row in step.hashes:
data = dataset_utils.prepare_data(row)
tier = Tier(data.get(NAME), world.config[PAAS][TIER_IMAGE])
tier.parse_and_add_products(data.get(PRODUCTS))
tier.parse_and_add_networks(data.get(NETWORKS))
world.tiers.append(tier)
@step(u'an environment has already been created with data:')
def an_environment_has_already_been_created_with_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION))
@step(u'an environment has already been created with the previous tiers and data:')
def an_environment_has_already_been_created_with_the_previous_tiers_and_data(step):
data = dataset_utils.prepare_data(step.hashes[0])
world.env_requests.add_environment(data.get(NAME), data.get(DESCRIPTION), world.tiers)
@step(u'there is no environment with name "([^"]*)" already created')
def there_is_no_environment_with_name_already_created(step, name):
world.env_requests.delete_environment(name) # Just in case it exists
@step(u'I request the details of the environment with name "([^"]*)"')
def i_request_the_list_of_existing_environments(step, name):
name = dataset_utils.generate_fixed_length_param(name)
world.env_requests.get_environment(name)
@step(u'I receive an? "([^"]*)" response with data:')
def i_receive_a_response_of_type_with_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION))
@step(u'I receive an? "([^"]*)" response with the previous tiers and data:')
def i_receive_a_response_of_type_with_the_previous_tiers_and_data(step, response_type):
status_code = http.status_codes[response_type]
data = dataset_utils.prepare_data(step.hashes[0])
environment_request.check_get_environment_response(world.response, status_code,
data.get(NAME), data.get(DESCRIPTION),
world.tiers)
@step(u'I receive an? "([^"]*)" response$')
def i_receive_a_response_of_type(step, response_type):
status_code = http.status_codes[response_type]
environment_request.check_get_environment_response(world.response, status_code)
|
from setuptools import setup, find_packages
setup(
name="home-assistant-frontend",
version="20211215.0",
description="The Home Assistant frontend",
url="https://github.com/home-assistant/frontend",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
license="Apache-2.0",
packages=find_packages(include=["hass_frontend", "hass_frontend.*"]),
include_package_data=True,
zip_safe=False,
)
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from typing import Optional, Union
import os
import shutil
import tempfile
from synthtool import _tracked_paths, metadata, shell
from synthtool.log import logger
from synthtool.sources import git
GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis")
GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private")
DISCOVERY_ARTIFACT_MANAGER_URL: str = git.make_repo_clone_url(
"googleapis/discovery-artifact-manager"
)
LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS")
LOCAL_DISCOVERY_ARTIFACT_MANAGER: Optional[str] = os.environ.get(
"SYNTHTOOL_DISCOVERY_ARTIFACT_MANAGER"
)
class GAPICBazel:
"""A synthtool component that can produce libraries using bazel build.
"""
def __init__(self):
self._ensure_dependencies_installed()
self._googleapis = None
self._googleapis_private = None
self._discovery_artifact_manager = None
def py_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "python", **kwargs)
def go_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "go", **kwargs)
def node_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "nodejs", **kwargs)
def csharp_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "csharp", **kwargs)
def php_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "php", **kwargs)
def java_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "java", **kwargs)
def ruby_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "ruby", **kwargs)
def _generate_code(
self,
service: str,
version: str,
language: str,
*,
private: bool = False,
discogapic: bool = False,
proto_path: Union[str, Path] = None,
output_dir: Union[str, Path] = None,
bazel_target: str = None,
include_protos: bool = False,
proto_output_path: Union[str, Path] = None,
):
# Determine which googleapis repo to use
if discogapic:
api_definitions_repo = self._clone_discovery_artifact_manager()
api_definitions_repo_name = "discovery-artifact-manager"
elif private:
api_definitions_repo = self._clone_googleapis_private()
api_definitions_repo_name = "googleapis_private"
else:
api_definitions_repo = self._clone_googleapis()
api_definitions_repo_name = "googleapis"
# Sanity check: We should have a googleapis repo; if we do not,
# something went wrong, and we should abort.
if not api_definitions_repo:
raise RuntimeError(
f"Unable to generate {service}, the sources repository repository"
"is unavailable."
)
# Calculate proto_path if necessary.
if not bazel_target or include_protos:
# If bazel_target is not specified explicitly, we will need
# proto_path to calculate it. If include_protos is True,
# we will need the proto_path to copy the protos.
if not proto_path:
if bazel_target:
# Calculate proto_path from the full bazel target, which is
# in the format "//proto_path:target_name
proto_path = bazel_target.split(":")[0][2:]
else:
# If bazel_target is not specified, assume the protos are
# simply under google/cloud, where the most of the protos
# usually are.
proto_path = f"google/cloud/{service}/{version}"
protos = Path(proto_path)
if protos.is_absolute():
protos = protos.relative_to("/")
# Determine bazel target based on per-language patterns
# Java: google-cloud-{{assembly_name}}-{{version}}-java
# Go: gapi-cloud-{{assembly_name}}-{{version}}-go
# Python: {{assembly_name}}-{{version}}-py
# PHP: google-cloud-{{assembly_name}}-{{version}}-php
# Node.js: {{assembly_name}}-{{version}}-nodejs
# Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby
# C#: google-cloud-{{assembly_name}}-{{version}}-csharp
if not bazel_target:
# Determine where the protos we are generating actually live.
# We can sometimes (but not always) determine this from the service
# and version; in other cases, the user must provide it outright.
parts = list(protos.parts)
while len(parts) > 0 and parts[0] != "google":
parts.pop(0)
if len(parts) == 0:
raise RuntimeError(
f"Cannot determine bazel_target from proto_path {protos}."
"Please set bazel_target explicitly."
)
if language == "python":
suffix = f"{service}-{version}-py"
elif language == "nodejs":
suffix = f"{service}-{version}-nodejs"
elif language == "go":
suffix = f"gapi-{'-'.join(parts[1:])}-go"
else:
suffix = f"{'-'.join(parts)}-{language}"
bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"
# Sanity check: Do we have protos where we think we should?
if not (api_definitions_repo / protos).exists():
raise FileNotFoundError(
f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
)
if not tuple((api_definitions_repo / protos).glob("*.proto")):
raise FileNotFoundError(
f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
)
if not (api_definitions_repo / protos / "BUILD.bazel"):
raise FileNotFoundError(
f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
)
# Ensure the desired output directory exists.
# If none was provided, create a temporary directory.
if not output_dir:
output_dir = tempfile.mkdtemp()
output_dir = Path(output_dir).resolve()
# Let's build some stuff now.
cwd = os.getcwd()
os.chdir(str(api_definitions_repo))
bazel_run_args = [
"bazel",
"--max_idle_secs=240",
"build",
bazel_target,
]
logger.debug(f"Generating code for: {bazel_target}.")
shell.run(bazel_run_args)
# We've got tar file!
# its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
# bazel_target: //google/cloud/language/v1:language-v1-nodejs
tar_file = (
f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
)
tar_run_args = [
"tar",
"-C",
str(output_dir),
"--strip-components=1",
"-xzf",
tar_file,
]
shell.run(tar_run_args)
# Get the *.protos files and put them in a protos dir in the output
if include_protos:
proto_files = protos.glob("**/*.proto")
# By default, put the protos at the root in a folder named 'protos'.
# Specific languages can be cased here to put them in a more language
# appropriate place.
if not proto_output_path:
proto_output_path = output_dir / "protos"
if language == "python":
# place protos alongsize the *_pb2.py files
proto_output_path = (
output_dir / f"google/cloud/{service}_{version}/proto"
)
else:
proto_output_path = Path(output_dir / proto_output_path)
os.makedirs(proto_output_path, exist_ok=True)
for i in proto_files:
logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
shutil.copyfile(i, proto_output_path / i.name)
logger.success(f"Placed proto files into {proto_output_path}.")
os.chdir(cwd)
# Sanity check: Does the output location have code in it?
# If not, complain.
if not tuple(output_dir.iterdir()):
raise RuntimeError(
f"Code generation seemed to succeed, but {output_dir} is empty."
)
# Huzzah, it worked.
logger.success(f"Generated code into {output_dir}.")
# Record this in the synthtool metadata.
metadata.add_client_destination(
source=api_definitions_repo_name,
api_name=service,
api_version=version,
language=language,
generator="bazel",
)
_tracked_paths.add(output_dir)
return output_dir
def _clone_googleapis(self):
if self._googleapis:
return self._googleapis
if LOCAL_GOOGLEAPIS:
self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(f"Using local googleapis at {self._googleapis}")
else:
logger.debug("Cloning googleapis.")
self._googleapis = git.clone(GOOGLEAPIS_URL)
return self._googleapis
def _clone_googleapis_private(self):
if self._googleapis_private:
return self._googleapis_private
if LOCAL_GOOGLEAPIS:
self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(
f"Using local googleapis at {self._googleapis_private} for googleapis-private"
)
else:
logger.debug("Cloning googleapis-private.")
self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)
return self._googleapis_private
def _clone_discovery_artifact_manager(self):
if self._discovery_artifact_manager:
return self._discovery_artifact_manager
if LOCAL_DISCOVERY_ARTIFACT_MANAGER:
self._discovery_artifact_manager = Path(
LOCAL_DISCOVERY_ARTIFACT_MANAGER
).expanduser()
logger.debug(
f"Using local discovery_artifact_manager at {self._discovery_artifact_manager} for googleapis-private"
)
else:
logger.debug("Cloning discovery-artifact-manager.")
self._discovery_artifact_manager = git.clone(DISCOVERY_ARTIFACT_MANAGER_URL)
return self._discovery_artifact_manager
def _ensure_dependencies_installed(self):
logger.debug("Ensuring dependencies.")
dependencies = ["bazel", "zip", "unzip", "tar"]
failed_dependencies = []
for dependency in dependencies:
return_code = shell.run(["which", dependency], check=False).returncode
if return_code:
failed_dependencies.append(dependency)
if failed_dependencies:
raise EnvironmentError(
f"Dependencies missing: {', '.join(failed_dependencies)}"
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @brief This module has unit tests for the classes of EasyIPC.
# @author Luis C. Garcia-Peraza Herrera (luiscarlos.gph@gmail.com).
# @date 25 June 2020.
import unittest
import os
import sys
import numpy as np
# My imports
import easyipc
class TestEasyIPC(unittest.TestCase):
def test_pipe(self):
data = [np.random.rand(1000, 1000) for i in range(100)]
newpid = os.fork()
if newpid == 0:
client = easyipc.Pipe('hoho')
client.connect()
client.send_whatever({'Hello': 'from the client'})
for i in range(len(data)):
client.send_array(data[i])
else:
server = easyipc.Pipe('hoho')
server.listen()
whatever = None
while whatever is None:
whatever = server.recv_whatever(blocking=False)
self.assertTrue(whatever['Hello'] == 'from the client')
for i in range(len(data)):
data_back = server.recv_array()
self.assertTrue(np.sum(data[i] - data_back) == 0)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Public APIs for algorithm developer using weight compression API."""
import abc
from typing import List, Any
import dataclasses
import tensorflow as tf
from tensorflow_model_optimization.python.core.common.keras.compression.internal import optimize
@dataclasses.dataclass
class WeightRepr:
args: Any = None
kwargs: Any = None
class WeightCompressor(metaclass=abc.ABCMeta):
"""Interface for weight compression algorithm that acts on a per-layer basis.
This allows both options of either decompressing during inference or
decompressing prior to inference (where compression occurs by applying a
tool such as zip to the model file).
This interface is a purely functional one.
"""
update_ops = [] # type: List
# TODO(tfmot): Consider separate from algorithm API for custom layer supports.
def get_compressible_weights(
self, original_layer: tf.keras.layers.Layer) -> List[tf.Variable]:
"""Define compressible weights for each layer.
Args:
original_layer: tf.keras.layers.Layer representing a layer from the
original model.
Returns:
List of compressible weights for the given layer.
"""
del original_layer
return []
@abc.abstractmethod
def init_training_weights(
self, pretrained_weight: tf.Tensor):
"""Initialize training weights for the compressible weight.
It calls the `add_training_weight` to add a training weight for a given
`pretrained_weight`. A `pretrained_weight` can have multiple training
weights. We initialize the training weights for each compressible
weight by just calling this function for each.
Args:
pretrained_weight: tf.Tensor of a pretrained weight of a layer that will
be compressed eventually.
"""
def add_training_weight(
self, *args, **kwargs):
"""Add a training weight for the compressible weight.
When this method is called from the `init_training_weights`, this adds
training weights for the pretrained_weight that is the input of the
`init_training_weights`.
Args:
*args: Passed through to training_model.add_weight.
**kwargs: Passed through to training_model.add_weight.
"""
weight_repr = WeightRepr(args=args, kwargs=kwargs)
if hasattr(self, 'weight_reprs'):
self.weight_reprs.append(weight_repr)
else:
self.weight_reprs = [weight_repr]
@abc.abstractmethod
def project_training_weights(
self, *training_weights: tf.Tensor) -> tf.Tensor:
"""Define a piece of the forward pass during training.
It operates on a single compressible weight.
The default throws an error when training occurs.
Args:
*training_weights: tf.Tensors representing any variables used during
training, for a single compressible weight, in the order returned in
`init_training_weights`.
Returns:
tf.Tensor to set the compressible weight to.
"""
def init_update_ops(self, tensor_weight_pairs):
self.update_ops = []
self.tensor_weight_pairs = tensor_weight_pairs
def update_training_weight(
self, training_weight: tf.Tensor, value: tf.Tensor):
"""Add training weight assign op to the model update list.
This method is for the case that training weight should update to a
specific value not from the model optimizer. It will throw an error if it
can't find the training weight.
This method should called in project_training_weights. During the training,
We collect all update_training_weight calls and make an UpdateOp for each
call. Finally, we put all these update ops to model.add_update.
Args:
training_weight: tf.Tensor representing a training weight.
value: tf.Tensor representing a value to be assigned to the training
weight.
Raises:
ValueError if it can't find the training weight.
"""
for tensor, weight in self.tensor_weight_pairs:
if training_weight is tensor:
self.update_ops.append(weight.assign(value))
return
raise ValueError('Training weight not found. Please call '
'the update_training_weight with given training '
'weight tensor.')
def get_update_ops(self):
return self.update_ops
def compress_training_weights(
self, *training_weights: tf.Tensor) -> List[tf.Tensor]:
"""Define the operations to compress a single weight’s training form.
'compress_training_weights' can refer to making the weight more amenable to
compression or actually compress the weight.
The default is an identity.
Args:
*training_weights: tf.Tensors representing all variables used during
training, for a single compressible weight, in the order returned in
`init_training_weights`.
Returns:
List of tf.Tensors to set to compressed or more compressible form.
"""
return list(training_weights)
@abc.abstractmethod
def decompress_weights(
self, *compressed_weights: tf.Tensor) -> tf.Tensor:
"""Define the operations to decompress a single weight’s compressed form.
The default is an identity.
Args:
*compressed_weights: tf.Tensors representing a single weight’s compressed
form, coming from what’s returned in `compress`.
Returns:
A tf.Tensor representing the decompressed `compressed_weights`.
"""
def create_layer_for_training(
layer: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_training(layer, algorithm)
def create_layer_for_inference(
layer_for_training: tf.keras.layers.Layer,
algorithm: WeightCompressor) -> tf.keras.layers.Layer:
return optimize.create_layer_for_inference(layer_for_training, algorithm)
|
# Copyright 2021 Juan L. Gamella
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ---------------------------------------------------------------------
# Unit tests
import unittest
import numpy as np
import copy
# Tested functions
from causalicp.data import _Data
class DataTests(unittest.TestCase):
def setUp(self):
self.p = 20
self.n_obs = [2, 3, 4]
self.N = np.sum(self.n_obs)
self.e = len(self.n_obs)
self.target = 3
XX = []
for i, ne in enumerate(self.n_obs):
X = np.tile(np.ones(self.p), (ne, 1))
X *= (i + 1)
X[:, self.target] *= -1
XX.append(X)
self.XX = XX
def test_basic(self):
data = _Data(self.XX)
self.assertEqual(data.N, self.N)
self.assertTrue((data.n_obs == self.n_obs).all())
self.assertEqual(data.p, self.p)
self.assertEqual(data.e, self.e)
self.assertEqual(data.e, len(self.XX))
def test_memory(self):
# Test that the data is copied into the class
XX = copy.deepcopy(self.XX)
data = _Data(XX)
XX[0][0, 0] = -100
data_pooled = data._pooled_data
self.assertFalse(data_pooled[0, 0] == XX[0][0, 0])
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import time
from nova import vendor
from twisted.internet import defer
from nova import exception
from nova import flags
from nova import test
from nova import utils
from nova.compute import model
from nova.compute import node
FLAGS = flags.FLAGS
class ModelTestCase(test.TrialTestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.flags(fake_libvirt=True,
fake_storage=True,
fake_users=True)
def tearDown(self):
model.Instance('i-test').destroy()
model.Host('testhost').destroy()
model.Daemon('testhost', 'nova-testdaemon').destroy()
def create_instance(self):
inst = model.Instance('i-test')
inst['reservation_id'] = 'r-test'
inst['launch_time'] = '10'
inst['user_id'] = 'fake'
inst['project_id'] = 'fake'
inst['instance_type'] = 'm1.tiny'
inst['node_name'] = FLAGS.node_name
inst['mac_address'] = utils.generate_mac()
inst['ami_launch_index'] = 0
inst.save()
return inst
def create_host(self):
host = model.Host('testhost')
host.save()
return host
def create_daemon(self):
daemon = model.Daemon('testhost', 'nova-testdaemon')
daemon.save()
return daemon
@defer.inlineCallbacks
def test_create_instance(self):
"""store with create_instace, then test that a load finds it"""
instance = yield self.create_instance()
old = yield model.Instance(instance.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_instance(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_instance()
yield instance.destroy()
newinst = yield model.Instance('i-test')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_instance_added_to_set(self):
"""create, then check that it is listed for the project"""
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().all:
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_instance_associates_project(self):
"""create, then check that it is listed for the project"""
instance = yield self.create_instance()
found = False
for x in model.InstanceDirectory().by_project(instance.project):
if x.identifier == 'i-test':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_host_class_finds_hosts(self):
host = yield self.create_host()
self.assertEqual('testhost', model.Host.lookup('testhost').identifier)
@defer.inlineCallbacks
def test_host_class_doesnt_find_missing_hosts(self):
rv = yield model.Host.lookup('woahnelly')
self.assertEqual(None, rv)
@defer.inlineCallbacks
def test_create_host(self):
"""store with create_host, then test that a load finds it"""
host = yield self.create_host()
old = yield model.Host(host.identifier)
self.assertFalse(old.is_new_record())
@defer.inlineCallbacks
def test_delete_host(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_host()
yield instance.destroy()
newinst = yield model.Host('testhost')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_host_added_to_set(self):
"""create, then check that it is included in list"""
instance = yield self.create_host()
found = False
for x in model.Host.all():
if x.identifier == 'testhost':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_create_daemon_two_args(self):
"""create a daemon with two arguments"""
d = yield self.create_daemon()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_create_daemon_single_arg(self):
"""Create a daemon using the combined host:bin format"""
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_of_daemon_single_and_double_args(self):
"""Create a daemon using the combined host:bin arg, find with 2"""
d = yield model.Daemon("testhost:nova-testdaemon")
d.save()
d = model.Daemon('testhost', 'nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_equality_daemon_of_double_and_single_args(self):
"""Create a daemon using the combined host:bin arg, find with 2"""
d = yield self.create_daemon()
d = model.Daemon('testhost:nova-testdaemon')
self.assertFalse(d.is_new_record())
@defer.inlineCallbacks
def test_delete_daemon(self):
"""create, then destroy, then make sure loads a new record"""
instance = yield self.create_daemon()
yield instance.destroy()
newinst = yield model.Daemon('testhost', 'nova-testdaemon')
self.assertTrue(newinst.is_new_record())
@defer.inlineCallbacks
def test_daemon_heartbeat(self):
"""Create a daemon, sleep, heartbeat, check for update"""
d = yield self.create_daemon()
ts = d['updated_at']
time.sleep(2)
d.heartbeat()
d2 = model.Daemon('testhost', 'nova-testdaemon')
ts2 = d2['updated_at']
self.assert_(ts2 > ts)
@defer.inlineCallbacks
def test_daemon_added_to_set(self):
"""create, then check that it is included in list"""
instance = yield self.create_daemon()
found = False
for x in model.Daemon.all():
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assert_(found)
@defer.inlineCallbacks
def test_daemon_associates_host(self):
"""create, then check that it is listed for the host"""
instance = yield self.create_daemon()
found = False
for x in model.Daemon.by_host('testhost'):
if x.identifier == 'testhost:nova-testdaemon':
found = True
self.assertTrue(found)
|
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class RepositorySummary(object):
"""
Summary of the repository.
"""
def __init__(self, **kwargs):
"""
Initializes a new RepositorySummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this RepositorySummary.
:type id: str
:param name:
The value to assign to the name property of this RepositorySummary.
:type name: str
:param compartment_id:
The value to assign to the compartment_id property of this RepositorySummary.
:type compartment_id: str
:param project_id:
The value to assign to the project_id property of this RepositorySummary.
:type project_id: str
:param namespace:
The value to assign to the namespace property of this RepositorySummary.
:type namespace: str
:param project_name:
The value to assign to the project_name property of this RepositorySummary.
:type project_name: str
:param description:
The value to assign to the description property of this RepositorySummary.
:type description: str
:param default_branch:
The value to assign to the default_branch property of this RepositorySummary.
:type default_branch: str
:param repository_type:
The value to assign to the repository_type property of this RepositorySummary.
:type repository_type: str
:param ssh_url:
The value to assign to the ssh_url property of this RepositorySummary.
:type ssh_url: str
:param http_url:
The value to assign to the http_url property of this RepositorySummary.
:type http_url: str
:param mirror_repository_config:
The value to assign to the mirror_repository_config property of this RepositorySummary.
:type mirror_repository_config: oci.devops.models.MirrorRepositoryConfig
:param time_created:
The value to assign to the time_created property of this RepositorySummary.
:type time_created: datetime
:param time_updated:
The value to assign to the time_updated property of this RepositorySummary.
:type time_updated: datetime
:param lifecycle_state:
The value to assign to the lifecycle_state property of this RepositorySummary.
:type lifecycle_state: str
:param lifecycle_details:
The value to assign to the lifecycle_details property of this RepositorySummary.
:type lifecycle_details: str
:param freeform_tags:
The value to assign to the freeform_tags property of this RepositorySummary.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this RepositorySummary.
:type defined_tags: dict(str, dict(str, object))
:param system_tags:
The value to assign to the system_tags property of this RepositorySummary.
:type system_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'id': 'str',
'name': 'str',
'compartment_id': 'str',
'project_id': 'str',
'namespace': 'str',
'project_name': 'str',
'description': 'str',
'default_branch': 'str',
'repository_type': 'str',
'ssh_url': 'str',
'http_url': 'str',
'mirror_repository_config': 'MirrorRepositoryConfig',
'time_created': 'datetime',
'time_updated': 'datetime',
'lifecycle_state': 'str',
'lifecycle_details': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'system_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'compartment_id': 'compartmentId',
'project_id': 'projectId',
'namespace': 'namespace',
'project_name': 'projectName',
'description': 'description',
'default_branch': 'defaultBranch',
'repository_type': 'repositoryType',
'ssh_url': 'sshUrl',
'http_url': 'httpUrl',
'mirror_repository_config': 'mirrorRepositoryConfig',
'time_created': 'timeCreated',
'time_updated': 'timeUpdated',
'lifecycle_state': 'lifecycleState',
'lifecycle_details': 'lifecycleDetails',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'system_tags': 'systemTags'
}
self._id = None
self._name = None
self._compartment_id = None
self._project_id = None
self._namespace = None
self._project_name = None
self._description = None
self._default_branch = None
self._repository_type = None
self._ssh_url = None
self._http_url = None
self._mirror_repository_config = None
self._time_created = None
self._time_updated = None
self._lifecycle_state = None
self._lifecycle_details = None
self._freeform_tags = None
self._defined_tags = None
self._system_tags = None
@property
def id(self):
"""
**[Required]** Gets the id of this RepositorySummary.
The OCID of the repository. This value is unique and immutable.
:return: The id of this RepositorySummary.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this RepositorySummary.
The OCID of the repository. This value is unique and immutable.
:param id: The id of this RepositorySummary.
:type: str
"""
self._id = id
@property
def name(self):
"""
Gets the name of this RepositorySummary.
Unique name of a repository. This value is mutable.
:return: The name of this RepositorySummary.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this RepositorySummary.
Unique name of a repository. This value is mutable.
:param name: The name of this RepositorySummary.
:type: str
"""
self._name = name
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this RepositorySummary.
The OCID of the repository's compartment.
:return: The compartment_id of this RepositorySummary.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this RepositorySummary.
The OCID of the repository's compartment.
:param compartment_id: The compartment_id of this RepositorySummary.
:type: str
"""
self._compartment_id = compartment_id
@property
def project_id(self):
"""
**[Required]** Gets the project_id of this RepositorySummary.
The OCID of the DevOps project containing the repository.
:return: The project_id of this RepositorySummary.
:rtype: str
"""
return self._project_id
@project_id.setter
def project_id(self, project_id):
"""
Sets the project_id of this RepositorySummary.
The OCID of the DevOps project containing the repository.
:param project_id: The project_id of this RepositorySummary.
:type: str
"""
self._project_id = project_id
@property
def namespace(self):
"""
Gets the namespace of this RepositorySummary.
Tenancy unique namespace.
:return: The namespace of this RepositorySummary.
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""
Sets the namespace of this RepositorySummary.
Tenancy unique namespace.
:param namespace: The namespace of this RepositorySummary.
:type: str
"""
self._namespace = namespace
@property
def project_name(self):
"""
Gets the project_name of this RepositorySummary.
Unique project name in a namespace.
:return: The project_name of this RepositorySummary.
:rtype: str
"""
return self._project_name
@project_name.setter
def project_name(self, project_name):
"""
Sets the project_name of this RepositorySummary.
Unique project name in a namespace.
:param project_name: The project_name of this RepositorySummary.
:type: str
"""
self._project_name = project_name
@property
def description(self):
"""
Gets the description of this RepositorySummary.
Details of the repository. Avoid entering confidential information.
:return: The description of this RepositorySummary.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this RepositorySummary.
Details of the repository. Avoid entering confidential information.
:param description: The description of this RepositorySummary.
:type: str
"""
self._description = description
@property
def default_branch(self):
"""
Gets the default_branch of this RepositorySummary.
The default branch of the repository.
:return: The default_branch of this RepositorySummary.
:rtype: str
"""
return self._default_branch
@default_branch.setter
def default_branch(self, default_branch):
"""
Sets the default_branch of this RepositorySummary.
The default branch of the repository.
:param default_branch: The default_branch of this RepositorySummary.
:type: str
"""
self._default_branch = default_branch
@property
def repository_type(self):
"""
Gets the repository_type of this RepositorySummary.
Type of repository.
:return: The repository_type of this RepositorySummary.
:rtype: str
"""
return self._repository_type
@repository_type.setter
def repository_type(self, repository_type):
"""
Sets the repository_type of this RepositorySummary.
Type of repository.
:param repository_type: The repository_type of this RepositorySummary.
:type: str
"""
self._repository_type = repository_type
@property
def ssh_url(self):
"""
Gets the ssh_url of this RepositorySummary.
SSH URL that you use to git clone, pull and push.
:return: The ssh_url of this RepositorySummary.
:rtype: str
"""
return self._ssh_url
@ssh_url.setter
def ssh_url(self, ssh_url):
"""
Sets the ssh_url of this RepositorySummary.
SSH URL that you use to git clone, pull and push.
:param ssh_url: The ssh_url of this RepositorySummary.
:type: str
"""
self._ssh_url = ssh_url
@property
def http_url(self):
"""
Gets the http_url of this RepositorySummary.
HTTP URL that you use to git clone, pull and push.
:return: The http_url of this RepositorySummary.
:rtype: str
"""
return self._http_url
@http_url.setter
def http_url(self, http_url):
"""
Sets the http_url of this RepositorySummary.
HTTP URL that you use to git clone, pull and push.
:param http_url: The http_url of this RepositorySummary.
:type: str
"""
self._http_url = http_url
@property
def mirror_repository_config(self):
"""
Gets the mirror_repository_config of this RepositorySummary.
:return: The mirror_repository_config of this RepositorySummary.
:rtype: oci.devops.models.MirrorRepositoryConfig
"""
return self._mirror_repository_config
@mirror_repository_config.setter
def mirror_repository_config(self, mirror_repository_config):
"""
Sets the mirror_repository_config of this RepositorySummary.
:param mirror_repository_config: The mirror_repository_config of this RepositorySummary.
:type: oci.devops.models.MirrorRepositoryConfig
"""
self._mirror_repository_config = mirror_repository_config
@property
def time_created(self):
"""
Gets the time_created of this RepositorySummary.
The time the repository was created. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_created of this RepositorySummary.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this RepositorySummary.
The time the repository was created. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_created: The time_created of this RepositorySummary.
:type: datetime
"""
self._time_created = time_created
@property
def time_updated(self):
"""
Gets the time_updated of this RepositorySummary.
The time the repository was updated. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_updated of this RepositorySummary.
:rtype: datetime
"""
return self._time_updated
@time_updated.setter
def time_updated(self, time_updated):
"""
Sets the time_updated of this RepositorySummary.
The time the repository was updated. Format defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_updated: The time_updated of this RepositorySummary.
:type: datetime
"""
self._time_updated = time_updated
@property
def lifecycle_state(self):
"""
Gets the lifecycle_state of this RepositorySummary.
The current state of the repository.
:return: The lifecycle_state of this RepositorySummary.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this RepositorySummary.
The current state of the repository.
:param lifecycle_state: The lifecycle_state of this RepositorySummary.
:type: str
"""
self._lifecycle_state = lifecycle_state
@property
def lifecycle_details(self):
"""
Gets the lifecycle_details of this RepositorySummary.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:return: The lifecycle_details of this RepositorySummary.
:rtype: str
"""
return self._lifecycle_details
@lifecycle_details.setter
def lifecycle_details(self, lifecycle_details):
"""
Sets the lifecycle_details of this RepositorySummary.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param lifecycle_details: The lifecycle_details of this RepositorySummary.
:type: str
"""
self._lifecycle_details = lifecycle_details
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this RepositorySummary.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See `Resource Tags`__. Example: `{\"bar-key\": \"value\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this RepositorySummary.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this RepositorySummary.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See `Resource Tags`__. Example: `{\"bar-key\": \"value\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this RepositorySummary.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this RepositorySummary.
Defined tags for this resource. Each key is predefined and scoped to a namespace. See `Resource Tags`__. Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this RepositorySummary.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this RepositorySummary.
Defined tags for this resource. Each key is predefined and scoped to a namespace. See `Resource Tags`__. Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this RepositorySummary.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def system_tags(self):
"""
Gets the system_tags of this RepositorySummary.
Usage of system tag keys. These predefined keys are scoped to namespaces. See `Resource Tags`__. Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The system_tags of this RepositorySummary.
:rtype: dict(str, dict(str, object))
"""
return self._system_tags
@system_tags.setter
def system_tags(self, system_tags):
"""
Sets the system_tags of this RepositorySummary.
Usage of system tag keys. These predefined keys are scoped to namespaces. See `Resource Tags`__. Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param system_tags: The system_tags of this RepositorySummary.
:type: dict(str, dict(str, object))
"""
self._system_tags = system_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
import json
import math
import operator
import os
import random
from io import open
from queue import PriorityQueue
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
import functools
import tatk.policy.mdrg.multiwoz.default_policy as policy
SOS_token = 0
EOS_token = 1
UNK_token = 2
PAD_token = 3
# Shawn beam search decoding
class BeamSearchNode(object):
def __init__(self, h, prevNode, wordid, logp, leng):
self.h = h
self.prevNode = prevNode
self.wordid = wordid
self.logp = logp
self.leng = leng
def eval(self, repeatPenalty, tokenReward, scoreTable, alpha=1.0):
reward = 0
alpha = 1.0
return self.logp / float(self.leng - 1 + 1e-6) + alpha * reward
def init_lstm(cell, gain=1):
init_gru(cell, gain)
# positive forget gate bias (Jozefowicz et al., 2015)
for _, _, ih_b, hh_b in cell.all_weights:
l = len(ih_b)
ih_b[l // 4:l // 2].data.fill_(1.0)
hh_b[l // 4:l // 2].data.fill_(1.0)
def init_gru(gru, gain=1):
gru.reset_parameters()
for _, hh, _, _ in gru.all_weights:
for i in range(0, hh.size(0), gru.hidden_size):
torch.nn.init.orthogonal_(hh[i:i+gru.hidden_size],gain=gain)
def whatCellType(input_size, hidden_size, cell_type, dropout_rate):
if cell_type == 'rnn':
cell = nn.RNN(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'gru':
cell = nn.GRU(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'lstm':
cell = nn.LSTM(input_size, hidden_size, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
elif cell_type == 'bigru':
cell = nn.GRU(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_gru(cell)
return cell
elif cell_type == 'bilstm':
cell = nn.LSTM(input_size, hidden_size, bidirectional=True, dropout=dropout_rate, batch_first=False)
init_lstm(cell)
return cell
class EncoderRNN(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, cell_type, depth, dropout):
super(EncoderRNN, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.n_layers = depth
self.dropout = dropout
self.bidirectional = False
if 'bi' in cell_type:
self.bidirectional = True
padding_idx = 3
self.embedding = nn.Embedding(input_size, embedding_size, padding_idx=padding_idx)
self.rnn = whatCellType(embedding_size, hidden_size,
cell_type, dropout_rate=self.dropout)
def forward(self, input_seqs, input_lens, hidden=None):
"""
forward procedure. **No need for inputs to be sorted**
:param input_seqs: Variable of [T,B]
:param hidden:
:param input_lens: *numpy array* of len for each input sequence
:return:
"""
input_lens = np.asarray(input_lens)
input_seqs = input_seqs.transpose(0,1)
#batch_size = input_seqs.size(1)
embedded = self.embedding(input_seqs)
embedded = embedded.transpose(0, 1) # [B,T,E]
sort_idx = np.argsort(-input_lens)
unsort_idx = torch.LongTensor(np.argsort(sort_idx))
input_lens = input_lens[sort_idx]
sort_idx = torch.LongTensor(sort_idx)
embedded = embedded[sort_idx].transpose(0, 1) # [T,B,E]
packed = torch.nn.utils.rnn.pack_padded_sequence(embedded, input_lens)
outputs, hidden = self.rnn(packed, hidden)
outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)
if self.bidirectional:
outputs = outputs[:, :, :self.hidden_size] + outputs[:, :, self.hidden_size:]
outputs = outputs.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
if isinstance(hidden, tuple):
hidden = list(hidden)
hidden[0] = hidden[0].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden[1] = hidden[1].transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
hidden = tuple(hidden)
else:
hidden = hidden.transpose(0, 1)[unsort_idx].transpose(0, 1).contiguous()
return outputs, hidden
class Attn(nn.Module):
def __init__(self, method, hidden_size):
super(Attn, self).__init__()
self.method = method
self.hidden_size = hidden_size
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, hidden, encoder_outputs):
'''
:param hidden:
previous hidden state of the decoder, in shape (layers*directions,B,H)
:param encoder_outputs:
encoder outputs from Encoder, in shape (T,B,H)
:return
attention energies in shape (B,T)
'''
max_len = encoder_outputs.size(0)
H = hidden.repeat(max_len,1,1).transpose(0,1)
encoder_outputs = encoder_outputs.transpose(0,1) # [T,B,H] -> [B,T,H]
attn_energies = self.score(H,encoder_outputs) # compute attention score
return F.softmax(attn_energies, dim=1).unsqueeze(1) # normalize with softmax
def score(self, hidden, encoder_outputs):
cat = torch.cat([hidden, encoder_outputs], 2)
energy = torch.tanh(self.attn(cat)) # [B*T*2H]->[B*T*H]
energy = energy.transpose(2,1) # [B*H*T]
v = self.v.repeat(encoder_outputs.data.shape[0],1).unsqueeze(1) #[B*1*H]
energy = torch.bmm(v,energy) # [B*1*T]
return energy.squeeze(1) # [B*T]
class SeqAttnDecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout_p=0.1, max_length=30):
super(SeqAttnDecoderRNN, self).__init__()
# Define parameters
self.hidden_size = hidden_size
self.embed_size = embedding_size
self.output_size = output_size
self.n_layers = 1
self.dropout_p = dropout_p
# Define layers
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout = nn.Dropout(dropout_p)
if 'bi' in cell_type: # we dont need bidirectionality in decoding
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size + hidden_size, hidden_size, cell_type, dropout_rate=self.dropout_p)
self.out = nn.Linear(hidden_size, output_size)
self.score = nn.Linear(self.hidden_size + self.hidden_size, self.hidden_size)
self.attn_combine = nn.Linear(embedding_size + hidden_size, embedding_size)
# attention
self.method = 'concat'
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.v = nn.Parameter(torch.rand(hidden_size))
stdv = 1. / math.sqrt(self.v.size(0))
self.v.data.normal_(mean=0, std=stdv)
def forward(self, input, hidden, encoder_outputs):
if isinstance(hidden, tuple):
h_t = hidden[0]
else:
h_t = hidden
encoder_outputs = encoder_outputs.transpose(0, 1)
embedded = self.embedding(input) # .view(1, 1, -1)
# embedded = F.dropout(embedded, self.dropout_p)
# SCORE 3
max_len = encoder_outputs.size(1)
h_t = h_t.transpose(0, 1) # [1,B,D] -> [B,1,D]
h_t = h_t.repeat(1, max_len, 1) # [B,1,D] -> [B,T,D]
energy = self.attn(torch.cat((h_t, encoder_outputs), 2)) # [B,T,2D] -> [B,T,D]
energy = torch.tanh(energy)
energy = energy.transpose(2, 1) # [B,H,T]
v = self.v.repeat(encoder_outputs.size(0), 1).unsqueeze(1) # [B,1,H]
energy = torch.bmm(v, energy) # [B,1,T]
attn_weights = F.softmax(energy, dim=2) # [B,1,T]
# getting context
context = torch.bmm(attn_weights, encoder_outputs) # [B,1,H]
# context = torch.bmm(attn_weights.unsqueeze(0), encoder_outputs.unsqueeze(0)) #[B,1,H]
# Combine embedded input word and attended context, run through RNN
rnn_input = torch.cat((embedded, context), 2)
rnn_input = rnn_input.transpose(0, 1)
output, hidden = self.rnn(rnn_input, hidden)
output = output.squeeze(0) # (1,B,V)->(B,V)
output = F.log_softmax(self.out(output), dim=1)
return output, hidden # , attn_weights
class DecoderRNN(nn.Module):
def __init__(self, embedding_size, hidden_size, output_size, cell_type, dropout=0.1):
super(DecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.cell_type = cell_type
padding_idx = 3
self.embedding = nn.Embedding(num_embeddings=output_size,
embedding_dim=embedding_size,
padding_idx=padding_idx
)
if 'bi' in cell_type: # we dont need bidirectionality in decoding
cell_type = cell_type.strip('bi')
self.rnn = whatCellType(embedding_size, hidden_size, cell_type, dropout_rate=dropout)
self.dropout_rate = dropout
self.out = nn.Linear(hidden_size, output_size)
def forward(self, input, hidden, not_used):
embedded = self.embedding(input).transpose(0, 1) # [B,1] -> [ 1,B, D]
embedded = F.dropout(embedded, self.dropout_rate)
output = embedded
#output = F.relu(embedded)
output, hidden = self.rnn(output, hidden)
out = self.out(output.squeeze(0))
output = F.log_softmax(out, dim=1)
return output, hidden
class Model(nn.Module):
def __init__(self, args, input_lang_index2word, output_lang_index2word, input_lang_word2index, output_lang_word2index):
super(Model, self).__init__()
self.args = args
self.max_len = args.max_len
self.output_lang_index2word = output_lang_index2word
self.input_lang_index2word = input_lang_index2word
self.output_lang_word2index = output_lang_word2index
self.input_lang_word2index = input_lang_word2index
self.hid_size_enc = args.hid_size_enc
self.hid_size_dec = args.hid_size_dec
self.hid_size_pol = args.hid_size_pol
self.emb_size = args.emb_size
self.db_size = args.db_size
self.bs_size = args.bs_size
self.cell_type = args.cell_type
if 'bi' in self.cell_type:
self.num_directions = 2
else:
self.num_directions = 1
self.depth = args.depth
self.use_attn = args.use_attn
self.attn_type = args.attention_type
self.dropout = args.dropout
self.device = torch.device("cuda" if args.cuda else "cpu")
self.model_dir = args.model_dir
self.model_name = args.model_name
self.teacher_forcing_ratio = args.teacher_ratio
self.vocab_size = args.vocab_size
self.epsln = 10E-5
torch.manual_seed(args.seed)
self.build_model()
self.getCount()
try:
assert self.args.beam_width > 0
self.beam_search = True
except:
self.beam_search = False
self.global_step = 0
def cuda_(self, var):
return var.cuda() if self.args.cuda else var
def build_model(self):
self.encoder = EncoderRNN(len(self.input_lang_index2word), self.emb_size, self.hid_size_enc,
self.cell_type, self.depth, self.dropout).to(self.device)
self.policy = policy.DefaultPolicy(self.hid_size_pol, self.hid_size_enc, self.db_size, self.bs_size).to(self.device)
if self.use_attn:
if self.attn_type == 'bahdanau':
self.decoder = SeqAttnDecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout, self.max_len).to(self.device)
else:
self.decoder = DecoderRNN(self.emb_size, self.hid_size_dec, len(self.output_lang_index2word), self.cell_type, self.dropout).to(self.device)
if self.args.mode == 'train':
self.gen_criterion = nn.NLLLoss(ignore_index=3, size_average=True) # logsoftmax is done in decoder part
self.setOptimizers()
def train(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor, dial_name=None):
proba, _, decoded_sent = self.forward(input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor)
proba = proba.view(-1, self.vocab_size)
self.gen_loss = self.gen_criterion(proba, target_tensor.view(-1))
self.loss = self.gen_loss
self.loss.backward()
grad = self.clipGradients()
self.optimizer.step()
self.optimizer.zero_grad()
#self.printGrad()
return self.loss.item(), 0, grad
def setOptimizers(self):
self.optimizer_policy = None
if self.args.optim == 'sgd':
self.optimizer = optim.SGD(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adadelta':
self.optimizer = optim.Adadelta(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
elif self.args.optim == 'adam':
self.optimizer = optim.Adam(lr=self.args.lr_rate, params=filter(lambda x: x.requires_grad, self.parameters()), weight_decay=self.args.l2_norm)
def forward(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
"""Given the user sentence, user belief state and database pointer,
encode the sentence, decide what policy vector construct and
feed it as the first hiddent state to the decoder."""
target_length = target_tensor.size(1)
# for fixed encoding this is zero so it does not contribute
batch_size, seq_len = input_tensor.size()
# ENCODER
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
# POLICY
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
# GENERATOR
# Teacher forcing: Feed the target as the next input
_, target_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
proba = torch.zeros(batch_size, target_length, self.vocab_size) # [B,T,V]
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
use_teacher_forcing = True if random.random() < self.args.teacher_ratio else False
if use_teacher_forcing:
decoder_input = target_tensor[:, t].view(-1, 1) # [B,1] Teacher forcing
else:
# Without teacher forcing: use its own predictions as the next input
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach() # detach from history as input
proba[:, t, :] = decoder_output
decoded_sent = None
return proba, None, decoded_sent
def predict(self, input_tensor, input_lengths, target_tensor, target_lengths, db_tensor, bs_tensor):
with torch.no_grad():
# ENCODER
encoder_outputs, encoder_hidden = self.encoder(input_tensor, input_lengths)
# POLICY
decoder_hidden = self.policy(encoder_hidden, db_tensor, bs_tensor)
# GENERATION
decoded_words = self.decode(target_tensor, decoder_hidden, encoder_outputs)
return decoded_words, 0
def decode(self, target_tensor, decoder_hidden, encoder_outputs):
decoder_hiddens = decoder_hidden
if self.beam_search: # wenqiang style - sequicity
decoded_sentences = []
for idx in range(target_tensor.size(0)):
if isinstance(decoder_hiddens, tuple): # LSTM case
decoder_hidden = (decoder_hiddens[0][:,idx, :].unsqueeze(0),decoder_hiddens[1][:,idx, :].unsqueeze(0))
else:
decoder_hidden = decoder_hiddens[:, idx, :].unsqueeze(0)
encoder_output = encoder_outputs[:,idx, :].unsqueeze(1)
# Beam start
self.topk = 1
endnodes = [] # stored end nodes
number_required = min((self.topk + 1), self.topk - len(endnodes))
decoder_input = torch.LongTensor([[SOS_token]], device=self.device)
# starting node hidden vector, prevNode, wordid, logp, leng,
node = BeamSearchNode(decoder_hidden, None, decoder_input, 0, 1)
nodes = PriorityQueue() # start the queue
nodes.put((-node.eval(None, None, None, None),
node))
# start beam search
qsize = 1
while True:
# give up when decoding takes too long
if qsize > 2000: break
# fetch the best node
score, n = nodes.get()
decoder_input = n.wordid
decoder_hidden = n.h
if n.wordid.item() == EOS_token and n.prevNode != None: # its not empty
endnodes.append((score, n))
# if reach maximum # of sentences required
if len(endnodes) >= number_required:
break
else:
continue
# decode for one step using decoder
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_output)
log_prob, indexes = torch.topk(decoder_output, self.args.beam_width)
nextnodes = []
for new_k in range(self.args.beam_width):
decoded_t = indexes[0][new_k].view(1, -1)
log_p = log_prob[0][new_k].item()
node = BeamSearchNode(decoder_hidden, n, decoded_t, n.logp + log_p, n.leng + 1)
score = -node.eval(None, None, None, None)
nextnodes.append((score, node))
# put them into queue
for i in range(len(nextnodes)):
score, nn = nextnodes[i]
nodes.put((score, nn))
# increase qsize
qsize += len(nextnodes)
# choose nbest paths, back trace them
if len(endnodes) == 0:
endnodes = [nodes.get() for n in range(self.topk)]
utterances = []
for score, n in sorted(endnodes, key=operator.itemgetter(0)):
utterance = []
utterance.append(n.wordid)
# back trace
while n.prevNode != None:
n = n.prevNode
utterance.append(n.wordid)
utterance = utterance[::-1]
utterances.append(utterance)
decoded_words = utterances[0]
decoded_sentence = [self.output_index2word(str(ind.item())) for ind in decoded_words]
#print(decoded_sentence)
decoded_sentences.append(' '.join(decoded_sentence[1:-1]))
return decoded_sentences
else: # GREEDY DECODING
decoded_sentences = self.greedy_decode(decoder_hidden, encoder_outputs, target_tensor)
return decoded_sentences
def greedy_decode(self, decoder_hidden, encoder_outputs, target_tensor):
decoded_sentences = []
batch_size, seq_len = target_tensor.size()
decoder_input = torch.LongTensor([[SOS_token] for _ in range(batch_size)], device=self.device)
decoded_words = torch.zeros((batch_size, self.max_len))
for t in range(self.max_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
topv, topi = decoder_output.data.topk(1) # get candidates
topi = topi.view(-1)
decoded_words[:, t] = topi
decoder_input = topi.detach().view(-1, 1)
for sentence in decoded_words:
sent = []
for ind in sentence:
if self.output_index2word(str(int(ind.item()))) == self.output_index2word(str(EOS_token)):
break
sent.append(self.output_index2word(str(int(ind.item()))))
decoded_sentences.append(' '.join(sent))
return decoded_sentences
def clipGradients(self):
grad = torch.nn.utils.clip_grad_norm_(self.parameters(), self.args.clip)
return grad
def saveModel(self, iter):
print('Saving parameters..')
if not os.path.exists(os.path.join(os.path.dirname(__file__), self.model_dir)):
os.makedirs(os.path.join(os.path.dirname(__file__), self.model_dir))
# print(self.model_dir)
torch.save(self.encoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc'))
torch.save(self.policy.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol'))
torch.save(self.decoder.state_dict(), os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec'))
with open(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '.config'), 'w') as f:
f.write(json.dumps(vars(self.args), ensure_ascii=False, indent=4))
def loadModel(self, iter=0):
print('Loading parameters of iter %s ' % iter)
self.encoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.enc')))
self.policy.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.pol')))
self.decoder.load_state_dict(torch.load(os.path.join(os.path.dirname(__file__), self.model_dir + self.model_name + '-' + str(iter) + '.dec')))
def input_index2word(self, index):
if index in self.input_lang_index2word.has_key:
return self.input_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def output_index2word(self, index):
if index in self.output_lang_index2word:
return self.output_lang_index2word[index]
else:
raise UserWarning('We are using UNK')
def input_word2index(self, index):
if index in self.input_lang_word2index:
return self.input_lang_word2index[index]
else:
return 2
def output_word2index(self, index):
if index in self.output_lang_word2index:
return self.output_lang_word2index[index]
else:
return 2
def getCount(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
param_cnt = sum([functools.reduce((lambda x, y: x * y), param.shape) for param in learnable_parameters])
print('Model has', param_cnt, ' parameters.')
def printGrad(self):
learnable_parameters = filter(lambda p: p.requires_grad, self.parameters())
for idx, param in enumerate(learnable_parameters):
print(param.grad, param.shape)
|
from .pybitrix24 import Bitrix24
__version__ = '0.5.0'
|
#!/usr/bin/env python
######################################################################
## File: create_public_pileup_plots.py
######################################################################
# NOTE: Typical way to create the pileup ROOT file from the cached txt
# files (maintained by Mike Hildreth):
# pileupCalc.py -i /afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions12/8TeV/DCSOnly/json_DCSONLY.txt \
# --inputLumiJSON=/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions12/8TeV/PileUp/pileup_latest.txt \
# --calcMode true --maxPileupBin=40 pu2012DCSONLY.root
import sys
import os
import commands
import math
import optparse
import ConfigParser
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
# FIX FIX FIX
# This fixes a well-know bug with stepfilled logarithmic histograms in
# Matplotlib.
from RecoLuminosity.LumiDB.mpl_axes_hist_fix import hist
if matplotlib.__version__ != '1.0.1':
print >> sys.stderr, \
"ERROR The %s script contains a hard-coded bug-fix " \
"for Matplotlib 1.0.1. The Matplotlib version loaded " \
"is %s" % (__file__, matplotlib.__version__)
sys.exit(1)
matplotlib.axes.Axes.hist = hist
# FIX FIX FIX end
from ROOT import gROOT
gROOT.SetBatch(True)
from ROOT import PyConfig
PyConfig.IgnoreCommandLineOptions = True
from ROOT import TFile
from RecoLuminosity.LumiDB.public_plots_tools import ColorScheme
from RecoLuminosity.LumiDB.public_plots_tools import LatexifyUnits
from RecoLuminosity.LumiDB.public_plots_tools import AddLogo
from RecoLuminosity.LumiDB.public_plots_tools import InitMatplotlib
from RecoLuminosity.LumiDB.public_plots_tools import RoundAwayFromZero
from RecoLuminosity.LumiDB.public_plots_tools import SavePlot
from RecoLuminosity.LumiDB.public_plots_tools import FONT_PROPS_SUPTITLE
from RecoLuminosity.LumiDB.public_plots_tools import FONT_PROPS_TITLE
from RecoLuminosity.LumiDB.public_plots_tools import FONT_PROPS_AX_TITLE
from RecoLuminosity.LumiDB.public_plots_tools import FONT_PROPS_TICK_LABEL
try:
import debug_hook
import pdb
except ImportError:
pass
######################################################################
def TweakPlot(fig, ax, add_extra_head_room=False):
# Fiddle with axes ranges etc.
ax.relim()
ax.autoscale_view(False, True, True)
for label in ax.get_xticklabels():
label.set_ha("right")
label.set_rotation(30.)
# Bit of magic here: increase vertical scale by one tick to make
# room for the legend.
if add_extra_head_room:
y_ticks = ax.get_yticks()
(y_min, y_max) = ax.get_ylim()
is_log = (ax.get_yscale() == "log")
y_max_new = y_max
if is_log:
tmp = y_ticks[-1] / y_ticks[-2]
y_max_new = y_max * math.pow(tmp, add_extra_head_room)
else:
tmp = y_ticks[-1] - y_ticks[-2]
y_max_new = y_max + add_extra_head_room * tmp
ax.set_ylim(y_min, y_max_new)
# Add a second vertical axis on the right-hand side.
ax_sec = ax.twinx()
ax_sec.set_ylim(ax.get_ylim())
ax_sec.set_yscale(ax.get_yscale())
for ax_tmp in fig.axes:
for sub_ax in [ax_tmp.xaxis, ax_tmp.yaxis]:
for label in sub_ax.get_ticklabels():
label.set_font_properties(FONT_PROPS_TICK_LABEL)
if is_log:
fig.subplots_adjust(top=.89, bottom=.125, left=.11, right=.925)
else:
fig.subplots_adjust(top=.89, bottom=.125, left=.1, right=.925)
# End of TweakPlot().
######################################################################
if __name__ == "__main__":
desc_str = "This script creates the official CMS pileup plots " \
"based on the output from the pileupCalc.py script."
arg_parser = optparse.OptionParser(description=desc_str)
arg_parser.add_option("--ignore-cache", action="store_true",
help="Ignore all cached PU results " \
"and run pileupCalc. " \
"(Rebuilds the cache as well.)")
(options, args) = arg_parser.parse_args()
if len(args) != 1:
print >> sys.stderr, \
"ERROR Need exactly one argument: a config file name"
sys.exit(1)
config_file_name = args[0]
ignore_cache = options.ignore_cache
cfg_defaults = {
"pileupcalc_flags" : "",
"color_schemes" : "Joe, Greg",
"verbose" : False
}
cfg_parser = ConfigParser.SafeConfigParser(cfg_defaults)
if not os.path.exists(config_file_name):
print >> sys.stderr, \
"ERROR Config file '%s' does not exist" % config_file_name
sys.exit(1)
cfg_parser.read(config_file_name)
# Location of the cached ROOT file.
cache_file_dir = cfg_parser.get("general", "cache_dir")
# Which color scheme to use for drawing the plots.
color_scheme_names_tmp = cfg_parser.get("general", "color_schemes")
color_scheme_names = [i.strip() for i in color_scheme_names_tmp.split(",")]
# Flag to turn on verbose output.
verbose = cfg_parser.getboolean("general", "verbose")
# Some details on how to invoke pileupCalc.
pileupcalc_flags_from_cfg = cfg_parser.get("general", "pileupcalc_flags")
input_json = cfg_parser.get("general", "input_json")
input_lumi_json = cfg_parser.get("general", "input_lumi_json")
# Some things needed for titles etc.
particle_type_str = cfg_parser.get("general", "particle_type_str")
year = int(cfg_parser.get("general", "year"))
cms_energy_str = cfg_parser.get("general", "cms_energy_str")
##########
# Tell the user what's going to happen.
print "Using configuration from file '%s'" % config_file_name
print "Using color schemes '%s'" % ", ".join(color_scheme_names)
print "Using additional pileupCalc flags from configuration: '%s'" % \
pileupcalc_flags_from_cfg
print "Using input JSON filter: %s" % input_json
print "Using input lumi JSON filter: %s" % input_lumi_json
##########
InitMatplotlib()
##########
# First run pileupCalc.
tmp_file_name = os.path.join(cache_file_dir,"pileup_calc_tmp.root")
if ignore_cache:
cmd = "pileupCalc.py -i %s --inputLumiJSON=%s %s %s" % \
(input_json, input_lumi_json,
pileupcalc_flags_from_cfg, tmp_file_name)
print "Running pileupCalc (this may take a while)"
if verbose:
print " pileupCalc cmd: '%s'" % cmd
(status, output) = commands.getstatusoutput(cmd)
if status != 0:
print >> sys.stderr, \
"ERROR Problem running pileupCalc: %s" % output
sys.exit(1)
##########
in_file = TFile.Open(tmp_file_name, "READ")
if not in_file or in_file.IsZombie():
print >> sys.stderr, \
"ERROR Could not read back pileupCalc results"
sys.exit(1)
pileup_hist = in_file.Get("pileup")
pileup_hist.SetDirectory(0)
in_file.Close()
##########
# And this is where the plotting starts.
print "Drawing things..."
ColorScheme.InitColors()
# Turn the ROOT histogram into a Matplotlib one.
bin_edges = [pileup_hist.GetBinLowEdge(i) \
for i in xrange(1, pileup_hist.GetNbinsX() + 1)]
vals = [pileup_hist.GetBinCenter(i) \
for i in xrange(1, pileup_hist.GetNbinsX() + 1)]
weights = [pileup_hist.GetBinContent(i) \
for i in xrange(1, pileup_hist.GetNbinsX() + 1)]
# NOTE: Convert units to /pb!
weights = [1.e-6 * i for i in weights]
# Loop over all color schemes.
for color_scheme_name in color_scheme_names:
print " color scheme '%s'" % color_scheme_name
color_scheme = ColorScheme(color_scheme_name)
color_line_pileup = color_scheme.color_line_pileup
color_fill_pileup = color_scheme.color_fill_pileup
logo_name = color_scheme.logo_name
file_suffix = color_scheme.file_suffix
fig = plt.figure()
for type in ["lin", "log"]:
is_log = (type == "log")
log_setting = False
if is_log:
min_val = min(weights)
exp = RoundAwayFromZero(math.log10(min_val))
log_setting = math.pow(10., exp)
fig.clear()
ax = fig.add_subplot(111)
ax.hist(vals, bins=bin_edges, weights=weights, log=log_setting,
histtype="stepfilled",
edgecolor=color_line_pileup,
facecolor=color_fill_pileup)
# Set titles and labels.
fig.suptitle(r"CMS Average Pileup, " \
"%s, %d, $\mathbf{\sqrt{s} =}$ %s" % \
(particle_type_str, year, cms_energy_str),
fontproperties=FONT_PROPS_SUPTITLE)
ax.set_xlabel(r"Mean number of interactions per crossing",
fontproperties=FONT_PROPS_AX_TITLE)
ax.set_ylabel(r"Recorded Luminosity (%s/%.2f)" % \
(LatexifyUnits("pb^{-1}"),
pileup_hist.GetBinWidth(1)),
fontproperties=FONT_PROPS_AX_TITLE)
# Add the average pileup number to the top right.
ax.text(.95, .925, r"<$\mathbf{\mu}$> = %.0f" % \
round(pileup_hist.GetMean()),
transform = ax.transAxes,
horizontalalignment="right",
fontproperties=FONT_PROPS_AX_TITLE)
# Add the logo.
AddLogo(logo_name, ax)
TweakPlot(fig, ax, True)
log_suffix = ""
if is_log:
log_suffix = "_log"
SavePlot(fig, "pileup_%s_%d%s%s" % \
(particle_type_str, year,
log_suffix, file_suffix))
plt.close()
##########
print "Done"
######################################################################
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###################################################################
# Author: Mu yanru
# Date : 2019.3
# Email : muyanru345@163.com
###################################################################
from dayu_widgets3.collapse import MCollapse
from dayu_widgets3.label import MLabel
from dayu_widgets3.qt import *
class CollapseExample(QWidget):
def __init__(self, parent=None):
super(CollapseExample, self).__init__(parent)
self._init_ui()
def _init_ui(self):
label_1 = MLabel(u'史蒂夫·乔布斯(Steve Jobs),1955年2月24日生于美国加利福尼亚州旧金山,美国发明家、企业家、美国苹果公司联合创办人。')
label_2 = MLabel(
u'斯蒂夫·盖瑞·沃兹尼亚克(Stephen Gary Wozniak),美国电脑工程师,曾与史蒂夫·乔布斯合伙创立苹果电脑(今之苹果公司)。斯蒂夫·盖瑞·沃兹尼亚克曾就读于美国科罗拉多大学,后转学入美国著名高等学府加州大学伯克利分校(UC Berkeley)并获得电机工程及计算机(EECS)本科学位(1987年)。')
label_3 = MLabel(
u'乔纳森·伊夫是一位工业设计师,现任Apple公司设计师兼资深副总裁,英国爵士。他曾参与设计了iPod,iMac,iPhone,iPad等众多苹果产品。除了乔布斯,他是对苹果那些著名的产品最有影响力的人。')
label_1.setWordWrap(True)
label_2.setWordWrap(True)
label_3.setWordWrap(True)
section_list = [
{
'title': u'史蒂夫乔布斯',
'expand': True,
'widget': label_1
}, {
'title': u'斯蒂夫·盖瑞·沃兹尼亚克',
'expand': True,
'widget': label_2
}
]
section_group = MCollapse()
section_group.add_section_list(section_list)
main_lay = QVBoxLayout()
main_lay.addWidget(section_group)
main_lay.addStretch()
self.setLayout(main_lay)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
test = CollapseExample()
from dayu_widgets3 import dayu_theme
dayu_theme.apply(test)
test.show()
sys.exit(app.exec_())
|
"""
Here is probably the place to write the docs, since the test-cases
show how the type behave.
Later...
"""
from ctypes import *
import sys, unittest
try:
WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
dll = CDLL(_ctypes_test.__file__)
if sys.platform == "win32":
windll = WinDLL(_ctypes_test.__file__)
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class RECT(Structure):
_fields_ = [("left", c_int), ("top", c_int),
("right", c_int), ("bottom", c_int)]
class FunctionTestCase(unittest.TestCase):
def test_mro(self):
# in Python 2.3, this raises TypeError: MRO conflict among bases classes,
# in Python 2.2 it works.
#
# But in early versions of _ctypes.c, the result of tp_new
# wasn't checked, and it even crashed Python.
# Found by Greg Chapman.
try:
class X(object, Array):
_length_ = 5
_type_ = "i"
except TypeError:
pass
from _ctypes import _Pointer
try:
class X(object, _Pointer):
pass
except TypeError:
pass
from _ctypes import _SimpleCData
try:
class X(object, _SimpleCData):
_type_ = "i"
except TypeError:
pass
try:
class X(object, Structure):
_fields_ = []
except TypeError:
pass
def test_wchar_parm(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double]
result = f(1, "x", 3, 4, 5.0, 6.0)
self.assertEqual(result, 139)
self.assertEqual(type(result), int)
def test_wchar_result(self):
try:
c_wchar
except NameError:
return
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_wchar
result = f(0, 0, 0, 0, 0, 0)
self.assertEqual(result, '\x00')
def test_voidresult(self):
f = dll._testfunc_v
f.restype = None
f.argtypes = [c_int, c_int, POINTER(c_int)]
result = c_int()
self.assertEqual(None, f(1, 2, byref(result)))
self.assertEqual(result.value, 3)
def test_intresult(self):
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_int
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), int)
# If we declare the function to return a short,
# is the high part split off?
f.restype = c_short
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
result = f(1, 2, 3, 0x10004, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), int)
# You cannot assign character format codes as restype any longer
self.assertRaises(TypeError, setattr, f, "restype", "i")
def test_floatresult(self):
f = dll._testfunc_f_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_float
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_doubleresult(self):
f = dll._testfunc_d_bhilfd
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
f.restype = c_double
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longdoubleresult(self):
f = dll._testfunc_D_bhilfD
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_longdouble]
f.restype = c_longdouble
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
self.assertEqual(type(result), float)
result = f(-1, -2, -3, -4, -5.0, -6.0)
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
def test_longlongresult(self):
try:
c_longlong
except NameError:
return
f = dll._testfunc_q_bhilfd
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double]
result = f(1, 2, 3, 4, 5.0, 6.0)
self.assertEqual(result, 21)
f = dll._testfunc_q_bhilfdq
f.restype = c_longlong
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double, c_longlong]
result = f(1, 2, 3, 4, 5.0, 6.0, 21)
self.assertEqual(result, 42)
def test_stringresult(self):
f = dll._testfunc_p_p
f.argtypes = None
f.restype = c_char_p
result = f(b"123")
self.assertEqual(result, b"123")
result = f(None)
self.assertEqual(result, None)
def test_pointers(self):
f = dll._testfunc_p_p
f.restype = POINTER(c_int)
f.argtypes = [POINTER(c_int)]
# This only works if the value c_int(42) passed to the
# function is still alive while the pointer (the result) is
# used.
v = c_int(42)
self.assertEqual(pointer(v).contents.value, 42)
result = f(pointer(v))
self.assertEqual(type(result), POINTER(c_int))
self.assertEqual(result.contents.value, 42)
# This on works...
result = f(pointer(v))
self.assertEqual(result.contents.value, v.value)
p = pointer(c_int(99))
result = f(p)
self.assertEqual(result.contents.value, 99)
arg = byref(v)
result = f(arg)
self.assertNotEqual(result.contents, v.value)
self.assertRaises(ArgumentError, f, byref(c_short(22)))
# It is dangerous, however, because you don't control the lifetime
# of the pointer:
result = f(byref(c_int(99)))
self.assertNotEqual(result.contents, 99)
def test_errors(self):
f = dll._testfunc_p_p
f.restype = c_int
class X(Structure):
_fields_ = [("y", c_int)]
self.assertRaises(TypeError, f, X()) #cannot convert parameter
################################################################
def test_shorts(self):
f = dll._testfunc_callback_i_if
args = []
expected = [262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048,
1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1]
def callback(v):
args.append(v)
return v
CallBack = CFUNCTYPE(c_int, c_int)
cb = CallBack(callback)
f(2**18, cb)
self.assertEqual(args, expected)
################################################################
def test_callbacks(self):
f = dll._testfunc_callback_i_if
f.restype = c_int
MyCallback = CFUNCTYPE(c_int, c_int)
def callback(value):
#print "called back with", value
return value
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
# test with prototype
f.argtypes = [c_int, MyCallback]
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
AnotherCallback = WINFUNCTYPE(c_int, c_int, c_int, c_int, c_int)
# check that the prototype works: we call f with wrong
# argument types
cb = AnotherCallback(callback)
self.assertRaises(ArgumentError, f, -10, cb)
def test_callbacks_2(self):
# Can also use simple datatypes as argument type specifiers
# for the callback function.
# In this case the call receives an instance of that type
f = dll._testfunc_callback_i_if
f.restype = c_int
MyCallback = CFUNCTYPE(c_int, c_int)
f.argtypes = [c_int, MyCallback]
def callback(value):
#print "called back with", value
self.assertEqual(type(value), int)
return value
cb = MyCallback(callback)
result = f(-10, cb)
self.assertEqual(result, -18)
def test_longlong_callbacks(self):
f = dll._testfunc_callback_q_qf
f.restype = c_longlong
MyCallback = CFUNCTYPE(c_longlong, c_longlong)
f.argtypes = [c_longlong, MyCallback]
def callback(value):
self.assertTrue(isinstance(value, int))
return value & 0x7FFFFFFF
cb = MyCallback(callback)
self.assertEqual(13577625587, f(1000000000000, cb))
def test_errors(self):
self.assertRaises(AttributeError, getattr, dll, "_xxx_yyy")
self.assertRaises(ValueError, c_int.in_dll, dll, "_xxx_yyy")
def test_byval(self):
# without prototype
ptin = POINT(1, 2)
ptout = POINT()
# EXPORT int _testfunc_byval(point in, point *pout)
result = dll._testfunc_byval(ptin, byref(ptout))
got = result, ptout.x, ptout.y
expected = 3, 1, 2
self.assertEqual(got, expected)
# with prototype
ptin = POINT(101, 102)
ptout = POINT()
dll._testfunc_byval.argtypes = (POINT, POINTER(POINT))
dll._testfunc_byval.restype = c_int
result = dll._testfunc_byval(ptin, byref(ptout))
got = result, ptout.x, ptout.y
expected = 203, 101, 102
self.assertEqual(got, expected)
def test_struct_return_2H(self):
class S2H(Structure):
_fields_ = [("x", c_short),
("y", c_short)]
dll.ret_2h_func.restype = S2H
dll.ret_2h_func.argtypes = [S2H]
inp = S2H(99, 88)
s2h = dll.ret_2h_func(inp)
self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
if sys.platform == "win32":
def test_struct_return_2H_stdcall(self):
class S2H(Structure):
_fields_ = [("x", c_short),
("y", c_short)]
windll.s_ret_2h_func.restype = S2H
windll.s_ret_2h_func.argtypes = [S2H]
s2h = windll.s_ret_2h_func(S2H(99, 88))
self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
def test_struct_return_8H(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
dll.ret_8i_func.restype = S8I
dll.ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = dll.ret_8i_func(inp)
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
if sys.platform == "win32":
def test_struct_return_8H_stdcall(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
windll.s_ret_8i_func.restype = S8I
windll.s_ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = windll.s_ret_8i_func(inp)
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
def test_sf1651235(self):
# see http://www.python.org/sf/1651235
proto = CFUNCTYPE(c_int, RECT, POINT)
def callback(*args):
return 0
callback = proto(callback)
self.assertRaises(ArgumentError, lambda: callback((1, 2, 3, 4), POINT()))
if __name__ == '__main__':
unittest.main()
|
# coding: utf-8
# - run the command:
# > pyccel-quickstart poisson
# this will compile pyccel extensions and install them in $PWD/poisson/usr
# - export the following variables
# > export INCLUDE_DIR=$PWD/poisson/usr/include/poisson
# > export LIB_DIR=$PWD/poisson/usr/lib
# Usage:
# > pyccel poisson_v2.py --include='$INCLUDE_DIR' --libdir='$LIB_DIR' --libs=poisson --no-modules --execute
# Cleaning:
# > rm -f *.mod *.pyccel *.f90 *.o
from pyccelext.math.quadratures import legendre
from pyccelext.math.external.bsp import spl_make_open_knots
from pyccelext.math.external.bsp import spl_compute_spans
from pyccelext.math.external.bsp import spl_construct_grid_from_knots
from pyccelext.math.external.bsp import spl_construct_quadrature_grid
from pyccelext.math.external.bsp import spl_eval_on_grid_splines_ders
# ...
p1 = 2
p2 = 2
n_elements_1 = 8
n_elements_2 = 8
n_elements_1 = n_elements_1 - p1
n_elements_2 = n_elements_2 - p2
# number of derivatives
d1 = 1
d2 = 1
n1 = p1 + n_elements_1
n2 = p2 + n_elements_2
k1 = p1 + 1
k2 = p2 + 1
verbose = False
#verbose = True
# ...
# ...
[u1,w1] = legendre(p1)
# ...
# ...
[u2,w2] = legendre(p2)
# ...
# ...
m1 = n1 + p1 + 1
m2 = n2 + p2 + 1
knots1 = zeros(m1, double)
knots2 = zeros(m2, double)
# call to spl
knots1 = spl_make_open_knots (n1, p1)
# call to spl
knots2 = spl_make_open_knots (n2, p2)
# ...
# ... TODO fix args of zeros
m1 = n_elements_1+1
m2 = n_elements_2+1
grid_1 = zeros(m1, double)
grid_2 = zeros(m2, double)
# call to spl
grid_1 = spl_construct_grid_from_knots(p1, n1, n_elements_1, knots1)
# call to spl
grid_2 = spl_construct_grid_from_knots(p2, n2, n_elements_2, knots2)
# ...
# ... construct the quadrature points grid
points_1 = zeros((k1, n_elements_1), double)
points_2 = zeros((k2, n_elements_2), double)
weights_1 = zeros((k1, n_elements_1), double)
weights_2 = zeros((k2, n_elements_2), double)
# call to spl
[points_1, weights_1] = spl_construct_quadrature_grid(u1, w1, grid_1)
# call to spl
[points_2, weights_2] = spl_construct_quadrature_grid(u2, w2, grid_2)
# ...
# ...
basis_1 = zeros((p1+1, d1+1, k1, n_elements_1), double)
basis_2 = zeros((p2+1, d2+1, k2, n_elements_2), double)
# call to spl
basis_1 = spl_eval_on_grid_splines_ders(n1, p1, d1, knots1, points_1)
# call to spl
basis_2 = spl_eval_on_grid_splines_ders(n2, p2, d2, knots2, points_2)
# ...
# ...
spans_1 = zeros(n_elements_1, int)
spans_2 = zeros(n_elements_2, int)
spans_1 = spl_compute_spans(p1, n1, knots1)
spans_2 = spl_compute_spans(p2, n2, knots2)
# ...
# ...
start_1 = 0
end_1 = n1-1
pad_1 = p1
start_2 = 0
end_2 = n2-1
pad_2 = p2
# ...
# ...
mass = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
stiffness = stencil((start_1, start_2), (end_1, end_2), (pad_1, pad_2))
rhs = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
# ...
# ... build matrix
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for jl_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
for jl_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
j1 = i_span_1 - p1 - 1 + jl_1
i2 = i_span_2 - p2 - 1 + il_2
j2 = i_span_2 - p2 - 1 + jl_2
v_m = 0.0
v_s = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
bj_0 = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_x = basis_1[jl_1, 1, g1, ie1] * basis_2[jl_2, 0, g2, ie2]
bj_y = basis_1[jl_1, 0, g1, ie1] * basis_2[jl_2, 1, g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v_m += bi_0 * bj_0 * wvol
v_s += (bi_x * bj_x + bi_y * bj_y) * wvol
mass[j1 - i1, j2 - i2, i1, i2] += v_m
stiffness[j1 - i1, j2 - i2, i1, i2] += v_s
# ...
for i1 in range(0, n1):
for i2 in range(0, n2):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
print (i1, i2, k1, k2, mass[k1,k2,i1,i2])
print ('done')
pass
# ... build rhs
for ie1 in range(0, n_elements_1):
for ie2 in range(0, n_elements_2):
i_span_1 = spans_1[ie1]
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
i1 = i_span_1 - p1 - 1 + il_1
i2 = i_span_2 - p2 - 1 + il_2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_x = basis_1[il_1, 1, g1, ie1] * basis_2[il_2, 0, g2, ie2]
bi_y = basis_1[il_1, 0, g1, ie1] * basis_2[il_2, 1, g2, ie2]
x1 = points_1[g1, ie1]
x2 = points_2[g2, ie2]
wvol = weights_1[g1, ie1] * weights_2[g2, ie2]
v += bi_0 * x1 * (1.0 - x1) * x2 * (1.0 - x2) * wvol
rhs[i1, i2] += v
# ...
# ... define matrix-vector product
#$ header procedure mv(double [:,:,:,:], double [:,:], double [:,:])
def mv(mat, x, y):
y = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
j1 = k1+i1
j2 = k2+i2
y[i1,i2] = y[i1,i2] + mat[k1,k2,i1,i2] * x[j1,j2]
# ...
# ... define dot for 2d arrays
#$ header function vdot(double[:,:], double[:,:]) results(double)
def vdot(xl, xr):
r = 0.0
for i1 in range(start_1, end_1+1):
for i2 in range(start_2, end_2+1):
for k1 in range(-p1, p1+1):
for k2 in range(-p2, p2+1):
r += xl[k1,i1] * xr[k2,i2]
return r
# ...
# ... CGL performs maxit CG iterations on the linear system Ax = b
# starting from x = x0
#$ header procedure cgl(double [:,:,:,:], double [:,:], double [:,:], int, double)
def cgl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
rdr = vdot(r,r)
for i_iter in range(1, maxit+1):
mv(mat, p, q)
alpha = rdr / vdot (p, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
rdrold = rdr
rdr = vdot(r, r)
beta = rdr / rdrold
p = r + beta * p
x0 = xk
# ...
# ... CRL performs maxit CG iterations on the linear system Ax = b
# where A is a symmetric positive definite matrix, using CG method
# starting from x = x0
#$ header procedure crl(double [:,:,:,:], double [:,:], double [:,:], int, double)
def crl(mat, b, x0, maxit, tol):
xk = zeros_like(x0)
mx = zeros_like(x0)
p = zeros_like(x0)
q = zeros_like(x0)
r = zeros_like(x0)
s = zeros_like(x0)
xk = x0
mv(mat, x0, mx)
r = b - mx
p = r
mv(mat, p, q)
s = q
sdr = vdot(s,r)
for i_iter in range(1, maxit+1):
alpha = sdr / vdot (q, q)
xk = xk + alpha * p
r = r - alpha * q
norm_err = sqrt(vdot(r, r))
print (i_iter, norm_err)
if norm_err < tol:
x0 = xk
break
mv(mat, r, s)
sdrold = sdr
sdr = vdot(s, r)
beta = sdr / sdrold
p = r + beta * p
q = s + beta * q
x0 = xk
# ...
# ...
x0 = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
xn = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
y = vector((start_1-pad_1, start_2-pad_2), (end_1+pad_1, end_2+pad_2))
# ...
# ...
n_maxiter = 100
tol = 1.0e-7
xn = 0.0
cgl(mass, rhs, xn, n_maxiter, tol)
# TODO crl is converging slowly. must be investigated
#xn = 0.0
#crl(stiffness, rhs, xn, n_maxiter, tol)
mv(mass, xn, x0)
print ('> residual error = ', max(abs(x0-rhs)))
# ...
del knots1
del grid_1
del points_1
del weights_1
del basis_1
del spans_1
del knots2
del grid_2
del points_2
del weights_2
del basis_2
del spans_2
del mass
del stiffness
del rhs
|
#!/usr/bin/env python
import roslib
import rospy
import tf
import geometry_msgs.msg
from geometry_msgs.msg import Pose
if __name__ == '__main__':
rospy.init_node('world_aruco_ID_1_tf_listener')
listener = tf.TransformListener()
# Publishing the transform of the ArUco seen from the magnet aka end-effector
# Is not a target_pose (only the position in terms of location is used)
pub_aruco_world_pose = rospy.Publisher("world_to_aruco_ID_1/target_pose", geometry_msgs.msg.Pose,queue_size=1)
#rate = rospy.Rate(10.0) # buffers them for up to 10 seconds
while not rospy.is_shutdown():
msg = geometry_msgs.msg.Pose()
try:
# Create the transform of interest (base_link == base_link)
(position,orientation) = listener.lookupTransform("world", "camera_aruco_ID_1", rospy.Time(0))
msg.position.x = position[0]
msg.position.y = position[1]
msg.position.z = position[2]
msg.orientation.x = orientation[0]
msg.orientation.y = orientation[1]
msg.orientation.z = orientation[2]
msg.orientation.w = orientation[3]
pub_aruco_world_pose.publish(msg)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
rospy.sleep(1)
|
import asyncio
import logging
from typing import Any # noqa: F401
from typing import Awaitable, Dict, List, Optional, Tuple
from libp2p.exceptions import ParseError
from libp2p.io.exceptions import IncompleteReadError
from libp2p.network.connection.exceptions import RawConnError
from libp2p.peer.id import ID
from libp2p.security.secure_conn_interface import ISecureConn
from libp2p.stream_muxer.abc import IMuxedConn, IMuxedStream
from libp2p.typing import TProtocol
from libp2p.utils import (
decode_uvarint_from_stream,
encode_uvarint,
encode_varint_prefixed,
read_varint_prefixed_bytes,
)
from .constants import HeaderTags
from .datastructures import StreamID
from .exceptions import MplexUnavailable
from .mplex_stream import MplexStream
MPLEX_PROTOCOL_ID = TProtocol("/mplex/6.7.0")
logger = logging.getLogger("libp2p.stream_muxer.mplex.mplex")
class Mplex(IMuxedConn):
"""
reference: https://github.com/libp2p/go-mplex/blob/master/multiplex.go
"""
secured_conn: ISecureConn
peer_id: ID
next_channel_id: int
streams: Dict[StreamID, MplexStream]
streams_lock: asyncio.Lock
new_stream_queue: "asyncio.Queue[IMuxedStream]"
event_shutting_down: asyncio.Event
event_closed: asyncio.Event
_tasks: List["asyncio.Future[Any]"]
def __init__(self, secured_conn: ISecureConn, peer_id: ID) -> None:
"""
create a new muxed connection.
:param secured_conn: an instance of ``ISecureConn``
:param generic_protocol_handler: generic protocol handler
for new muxed streams
:param peer_id: peer_id of peer the connection is to
"""
self.secured_conn = secured_conn
self.next_channel_id = 0
# Set peer_id
self.peer_id = peer_id
# Mapping from stream ID -> buffer of messages for that stream
self.streams = {}
self.streams_lock = asyncio.Lock()
self.new_stream_queue = asyncio.Queue()
self.event_shutting_down = asyncio.Event()
self.event_closed = asyncio.Event()
self._tasks = []
# Kick off reading
self._tasks.append(asyncio.ensure_future(self.handle_incoming()))
@property
def is_initiator(self) -> bool:
return self.secured_conn.is_initiator
async def close(self) -> None:
"""close the stream muxer and underlying secured connection."""
if self.event_shutting_down.is_set():
return
# Set the `event_shutting_down`, to allow graceful shutdown.
self.event_shutting_down.set()
await self.secured_conn.close()
# Blocked until `close` is finally set.
await self.event_closed.wait()
def is_closed(self) -> bool:
"""
check connection is fully closed.
:return: true if successful
"""
return self.event_closed.is_set()
def _get_next_channel_id(self) -> int:
"""
Get next available stream id.
:return: next available stream id for the connection
"""
next_id = self.next_channel_id
self.next_channel_id += 1
return next_id
async def _initialize_stream(self, stream_id: StreamID, name: str) -> MplexStream:
stream = MplexStream(name, stream_id, self)
async with self.streams_lock:
self.streams[stream_id] = stream
return stream
async def open_stream(self) -> IMuxedStream:
"""
creates a new muxed_stream.
:return: a new ``MplexStream``
"""
channel_id = self._get_next_channel_id()
stream_id = StreamID(channel_id=channel_id, is_initiator=True)
# Default stream name is the `channel_id`
name = str(channel_id)
stream = await self._initialize_stream(stream_id, name)
await self.send_message(HeaderTags.NewStream, name.encode(), stream_id)
return stream
async def _wait_until_shutting_down_or_closed(self, coro: Awaitable[Any]) -> Any:
task_coro = asyncio.ensure_future(coro)
task_wait_closed = asyncio.ensure_future(self.event_closed.wait())
task_wait_shutting_down = asyncio.ensure_future(self.event_shutting_down.wait())
done, pending = await asyncio.wait(
[task_coro, task_wait_closed, task_wait_shutting_down],
return_when=asyncio.FIRST_COMPLETED,
)
for fut in pending:
fut.cancel()
if task_wait_closed in done:
raise MplexUnavailable("Mplex is closed")
if task_wait_shutting_down in done:
raise MplexUnavailable("Mplex is shutting down")
return task_coro.result()
async def accept_stream(self) -> IMuxedStream:
"""accepts a muxed stream opened by the other end."""
return await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.get()
)
async def send_message(
self, flag: HeaderTags, data: Optional[bytes], stream_id: StreamID
) -> int:
"""
sends a message over the connection.
:param header: header to use
:param data: data to send in the message
:param stream_id: stream the message is in
"""
# << by 3, then or with flag
header = encode_uvarint((stream_id.channel_id << 3) | flag.value)
if data is None:
data = b""
_bytes = header + encode_varint_prefixed(data)
return await self._wait_until_shutting_down_or_closed(
self.write_to_stream(_bytes)
)
async def write_to_stream(self, _bytes: bytes) -> int:
"""
writes a byte array to a secured connection.
:param _bytes: byte array to write
:return: length written
"""
await self.secured_conn.write(_bytes)
return len(_bytes)
async def handle_incoming(self) -> None:
"""Read a message off of the secured connection and add it to the
corresponding message buffer."""
while True:
try:
await self._handle_incoming_message()
except MplexUnavailable as e:
logger.debug("mplex unavailable while waiting for incoming: %s", e)
break
# Force context switch
await asyncio.sleep(0)
# If we enter here, it means this connection is shutting down.
# We should clean things up.
await self._cleanup()
async def read_message(self) -> Tuple[int, int, bytes]:
"""
Read a single message off of the secured connection.
:return: stream_id, flag, message contents
"""
# FIXME: No timeout is used in Go implementation.
try:
header = await decode_uvarint_from_stream(self.secured_conn)
message = await asyncio.wait_for(
read_varint_prefixed_bytes(self.secured_conn), timeout=5
)
except (ParseError, RawConnError, IncompleteReadError) as error:
raise MplexUnavailable(
"failed to read messages correctly from the underlying connection"
) from error
except asyncio.TimeoutError as error:
raise MplexUnavailable(
"failed to read more message body within the timeout"
) from error
flag = header & 0x07
channel_id = header >> 3
return channel_id, flag, message
async def _handle_incoming_message(self) -> None:
"""
Read and handle a new incoming message.
:raise MplexUnavailable: `Mplex` encounters fatal error or is shutting down.
"""
channel_id, flag, message = await self._wait_until_shutting_down_or_closed(
self.read_message()
)
stream_id = StreamID(channel_id=channel_id, is_initiator=bool(flag & 1))
if flag == HeaderTags.NewStream.value:
await self._handle_new_stream(stream_id, message)
elif flag in (
HeaderTags.MessageInitiator.value,
HeaderTags.MessageReceiver.value,
):
await self._handle_message(stream_id, message)
elif flag in (HeaderTags.CloseInitiator.value, HeaderTags.CloseReceiver.value):
await self._handle_close(stream_id)
elif flag in (HeaderTags.ResetInitiator.value, HeaderTags.ResetReceiver.value):
await self._handle_reset(stream_id)
else:
# Receives messages with an unknown flag
# TODO: logging
async with self.streams_lock:
if stream_id in self.streams:
stream = self.streams[stream_id]
await stream.reset()
async def _handle_new_stream(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id in self.streams:
# `NewStream` for the same id is received twice...
raise MplexUnavailable(
f"received NewStream message for existing stream: {stream_id}"
)
mplex_stream = await self._initialize_stream(stream_id, message.decode())
await self._wait_until_shutting_down_or_closed(
self.new_stream_queue.put(mplex_stream)
)
async def _handle_message(self, stream_id: StreamID, message: bytes) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# We receive a message of the stream `stream_id` which is not accepted
# before. It is abnormal. Possibly disconnect?
# TODO: Warn and emit logs about this.
return
stream = self.streams[stream_id]
async with stream.close_lock:
if stream.event_remote_closed.is_set():
# TODO: Warn "Received data from remote after stream was closed by them. (len = %d)" # noqa: E501
return
await self._wait_until_shutting_down_or_closed(
stream.incoming_data.put(message)
)
async def _handle_close(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# Ignore unmatched messages for now.
return
stream = self.streams[stream_id]
# NOTE: If remote is already closed, then return: Technically a bug
# on the other side. We should consider killing the connection.
async with stream.close_lock:
if stream.event_remote_closed.is_set():
return
is_local_closed: bool
async with stream.close_lock:
stream.event_remote_closed.set()
is_local_closed = stream.event_local_closed.is_set()
# If local is also closed, both sides are closed. Then, we should clean up
# the entry of this stream, to avoid others from accessing it.
if is_local_closed:
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _handle_reset(self, stream_id: StreamID) -> None:
async with self.streams_lock:
if stream_id not in self.streams:
# This is *ok*. We forget the stream on reset.
return
stream = self.streams[stream_id]
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_reset.set()
stream.event_remote_closed.set()
# If local is not closed, we should close it.
if not stream.event_local_closed.is_set():
stream.event_local_closed.set()
async with self.streams_lock:
self.streams.pop(stream_id, None)
async def _cleanup(self) -> None:
if not self.event_shutting_down.is_set():
self.event_shutting_down.set()
async with self.streams_lock:
for stream in self.streams.values():
async with stream.close_lock:
if not stream.event_remote_closed.is_set():
stream.event_remote_closed.set()
stream.event_reset.set()
stream.event_local_closed.set()
self.streams = None
self.event_closed.set()
|
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Distills a trained style prediction network using a MobileNetV2.
"""
import ast
import os
from magenta.models.arbitrary_image_stylization import arbitrary_image_stylization_build_mobilenet_model as build_mobilenet_model
from magenta.models.arbitrary_image_stylization import arbitrary_image_stylization_build_model as build_model
from magenta.models.image_stylization import image_utils
import tensorflow.compat.v1 as tf
import tf_slim as slim
DEFAULT_CONTENT_WEIGHTS = '{"vgg_16/conv3": 1}'
DEFAULT_STYLE_WEIGHTS = ('{"vgg_16/conv1": 0.5e-3, "vgg_16/conv2": 0.5e-3,'
' "vgg_16/conv3": 0.5e-3, "vgg_16/conv4": 0.5e-3}')
flags = tf.app.flags
flags.DEFINE_float('clip_gradient_norm', 0, 'Clip gradients to this norm')
flags.DEFINE_float('learning_rate', 1e-5, 'Learning rate')
flags.DEFINE_float('total_variation_weight', 1e4, 'Total variation weight')
flags.DEFINE_string('content_weights', DEFAULT_CONTENT_WEIGHTS,
'Content weights')
flags.DEFINE_string('style_weights', DEFAULT_STYLE_WEIGHTS, 'Style weights')
flags.DEFINE_integer('batch_size', 8, 'Batch size.')
flags.DEFINE_integer('image_size', 256, 'Image size.')
flags.DEFINE_boolean('random_style_image_size', True,
'Whether to resize the style images '
'to a random size or not.')
flags.DEFINE_boolean(
'augment_style_images', True,
'Whether to augment style images or not.')
flags.DEFINE_boolean('center_crop', False,
'Whether to center crop the style images.')
flags.DEFINE_integer('ps_tasks', 0,
'Number of parameter servers. If 0, parameters '
'are handled locally by the worker.')
flags.DEFINE_integer('save_summaries_secs', 15,
'Frequency at which summaries are saved, in seconds.')
flags.DEFINE_integer('save_interval_secs', 15,
'Frequency at which the model is saved, in seconds.')
flags.DEFINE_integer('task', 0, 'Task ID. Used when training with multiple '
'workers to identify each worker.')
flags.DEFINE_integer('train_steps', 8000000, 'Number of training steps.')
flags.DEFINE_string('master', '', 'BNS name of the TensorFlow master to use.')
flags.DEFINE_string('style_dataset_file', None, 'Style dataset file.')
flags.DEFINE_string('train_dir', None,
'Directory for checkpoints and summaries.')
flags.DEFINE_string('initial_checkpoint', None,
'Path to the pre-trained arbitrary_image_stylization '
'checkpoint')
flags.DEFINE_string('mobilenet_checkpoint', 'mobilenet_v2_1.0_224.ckpt',
'Path to the pre-trained mobilenet checkpoint')
flags.DEFINE_boolean('use_true_loss', False,
'Add true style loss term based on VGG.')
flags.DEFINE_float('true_loss_weight', 1e-9,
'Scale factor for real loss')
FLAGS = flags.FLAGS
def main(unused_argv=None):
tf.logging.set_verbosity(tf.logging.INFO)
with tf.Graph().as_default():
# Forces all input processing onto CPU in order to reserve the GPU for the
# forward inference and back-propagation.
device = '/cpu:0' if not FLAGS.ps_tasks else '/job:worker/cpu:0'
with tf.device(
tf.train.replica_device_setter(FLAGS.ps_tasks, worker_device=device)):
# Load content images
content_inputs_, _ = image_utils.imagenet_inputs(FLAGS.batch_size,
FLAGS.image_size)
# Loads style images.
[style_inputs_, _,
style_inputs_orig_] = image_utils.arbitrary_style_image_inputs(
FLAGS.style_dataset_file,
batch_size=FLAGS.batch_size,
image_size=FLAGS.image_size,
shuffle=True,
center_crop=FLAGS.center_crop,
augment_style_images=FLAGS.augment_style_images,
random_style_image_size=FLAGS.random_style_image_size)
with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
# Process style and content weight flags.
content_weights = ast.literal_eval(FLAGS.content_weights)
style_weights = ast.literal_eval(FLAGS.style_weights)
# Define the model
stylized_images, \
true_loss, \
_, \
bottleneck_feat = build_mobilenet_model.build_mobilenet_model(
content_inputs_,
style_inputs_,
mobilenet_trainable=True,
style_params_trainable=False,
style_prediction_bottleneck=100,
adds_losses=True,
content_weights=content_weights,
style_weights=style_weights,
total_variation_weight=FLAGS.total_variation_weight,
)
_, inception_bottleneck_feat = build_model.style_prediction(
style_inputs_,
[],
[],
is_training=False,
trainable=False,
inception_end_point='Mixed_6e',
style_prediction_bottleneck=100,
reuse=None,
)
print('PRINTING TRAINABLE VARIABLES')
for x in tf.trainable_variables():
print(x)
mse_loss = tf.losses.mean_squared_error(
inception_bottleneck_feat, bottleneck_feat)
total_loss = mse_loss
if FLAGS.use_true_loss:
true_loss = FLAGS.true_loss_weight*true_loss
total_loss += true_loss
if FLAGS.use_true_loss:
tf.summary.scalar('mse', mse_loss)
tf.summary.scalar('true_loss', true_loss)
tf.summary.scalar('total_loss', total_loss)
tf.summary.image('image/0_content_inputs', content_inputs_, 3)
tf.summary.image('image/1_style_inputs_orig', style_inputs_orig_, 3)
tf.summary.image('image/2_style_inputs_aug', style_inputs_, 3)
tf.summary.image('image/3_stylized_images', stylized_images, 3)
mobilenet_variables_to_restore = slim.get_variables_to_restore(
include=['MobilenetV2'],
exclude=['global_step'])
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
train_op = slim.learning.create_train_op(
total_loss,
optimizer,
clip_gradient_norm=FLAGS.clip_gradient_norm,
summarize_gradients=False
)
init_fn = slim.assign_from_checkpoint_fn(
FLAGS.initial_checkpoint,
slim.get_variables_to_restore(
exclude=['MobilenetV2', 'mobilenet_conv', 'global_step']))
init_pretrained_mobilenet = slim.assign_from_checkpoint_fn(
FLAGS.mobilenet_checkpoint, mobilenet_variables_to_restore)
def init_sub_networks(session):
init_fn(session)
init_pretrained_mobilenet(session)
slim.learning.train(
train_op=train_op,
logdir=os.path.expanduser(FLAGS.train_dir),
master=FLAGS.master,
is_chief=FLAGS.task == 0,
number_of_steps=FLAGS.train_steps,
init_fn=init_sub_networks,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs)
def console_entry_point():
tf.disable_v2_behavior()
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
|
julia_llvmpasses_factory = util.BuildFactory()
julia_llvmpasses_factory.useProgress = True
julia_llvmpasses_factory.addSteps([
# Fetch first (allowing failure if no existing clone is present)
steps.ShellCommand(
name="git fetch",
command=["git", "fetch", "--tags", "--all", "--force"],
flunkOnFailure=False
),
# Clone julia
steps.Git(
name="Julia checkout",
repourl=util.Property('repository', default='git://github.com/JuliaLang/julia.git'),
mode='full',
method='fresh',
submodules=True,
clobberOnFailure=True,
progress=True,
retryFetch=True,
getDescription={'--tags': True},
),
# Make debug build
steps.ShellCommand(
name="make release",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -j%(prop:nthreads)s %(prop:flags)s %(prop:extra_make_flags)s release")],
haltOnFailure = True,
# Fail out if 60 minutes have gone by with nothing printed to stdout
timeout=60*60,
# Kill everything if the overall job has taken more than 2 hours
maxTime=60*60*2,
# Give the process 10 seconds to print out the current backtraces when being killed
sigtermTime=10,
),
steps.ShellCommand(
name="make test/llvmpasses",
command=["/bin/sh", "-c", util.Interpolate("%(prop:make_cmd)s -C test/llvmpasses -j%(prop:nthreads)s %(prop:flags)s %(prop:extra_make_flags)s")],
haltOnFailure = True,
# Fail out if 60 minutes have gone by with nothing printed to stdout
timeout=60*60,
# Kill everything if the overall job has taken more than 2 hours
maxTime=60*60*2,
# Give the process 10 seconds to print out the current backtraces when being killed
sigtermTime=10,
),
])
c['schedulers'].append(schedulers.AnyBranchScheduler(
name="Julia test llvmpasses",
change_filter=util.ChangeFilter(filter_fn=julia_ci_filter),
builderNames=["llvmpasses_linux64"],
treeStableTimer=1,
))
# Add workers for these jobs
c['builders'].append(util.BuilderConfig(
name="llvmpasses_linux64",
workernames=builder_mapping["linux64"],
collapseRequests=False,
tags=["Packaging"],
factory=julia_llvmpasses_factory,
))
# Add a scheduler for building release candidates/triggering builds manually
c['schedulers'].append(schedulers.ForceScheduler(
name="llvmpasses",
label="Force llvmpasses",
builderNames=["llvmpasses_linux64"],
reason=util.FixedParameter(name="reason", default=""),
codebases=[
util.CodebaseParameter(
"",
name="",
branch=util.FixedParameter(name="branch", default=""),
repository=util.FixedParameter(name="repository", default=""),
project=util.FixedParameter(name="project", default="Packaging"),
)
],
properties=[
util.StringParameter(
name="extra_make_flags",
label="Extra Make Flags",
size=30,
default="",
),
],
))
|
from typing import List, Tuple
import pandas as pd
@pd.api.extensions.register_dataframe_accessor("tag")
class CaTaggingAccessor:
def __init__(self, df: pd.DataFrame):
self._df = df
def group_by_sentences(self):
yield from (x[1] for x in self._df.groupby("sentence_id"))
def group_by_documents(self):
yield from (x[1] for x in self._df.groupby("document_id"))
def number_of_sentences(self):
return len(self._df.groupby("sentence_id"))
def number_of_documents(self):
return len(self._df.groupby("document_id"))
def split_x_y_sentencewise(self) -> Tuple[List[List[str]], List[List[str]]]:
X = []
y = []
for sent in self._df.tag.group_by_sentences():
words = list(sent["word"])
labels = list(sent["label"])
X.append(words)
y.append(labels)
return X, y
def get_times_per_document(self) -> List[int]:
t = []
# Right now, we assume that the time per token is the same
# for a sentence. This might be an invalid assumption
for df in self._df.tag.group_by_sentences():
t.append(df["t"].values[0])
return t
def group_by_documents_x_y(self) -> Tuple[List[List[List[str]]], List[List[List[str]]]]:
"""Returns a list of documents that each contain a list of sentences and
their respective labels grouped the same way.
"""
X = []
y = []
for doc in self._df.tag.group_by_documents():
X_doc = []
y_doc = []
for sent in doc.tag.group_by_sentences():
words = list(sent["word"])
labels = list(sent["label"])
X_doc.append(words)
y_doc.append(labels)
X.append(X_doc)
y.append(y_doc)
return X, y
def group_by_sentences_x_y(self) -> Tuple[List[List[str]], List[List[str]]]:
"""Returns a list of sentences and their respective labels grouped the same way."""
X = []
y = []
for sent in self._df.tag.group_by_sentences():
words = list(sent["word"])
labels = list(sent["label"])
assert len(words) == len(labels)
X.append(words)
y.append(labels)
return X, y
@pd.api.extensions.register_dataframe_accessor("dclass")
class CaDocumentClassificationAccessor:
def __init__(self, df: pd.DataFrame):
self._df = df
def split_x_y(self) -> Tuple[List[str], List[str]]:
X = self._df["sentence"]
y = self._df["label"]
return X.values.tolist(), y.values.tolist()
def get_time_per_sentence(self) -> List[int]:
return self._df["t"].values.tolist()
@pd.api.extensions.register_dataframe_accessor("pair")
class CaPairAccessor:
def __init__(self, df: pd.DataFrame):
self._df = df
def split_args_y(self) -> Tuple[List[str], List[str], List[str]]:
args1 = self._df["arg1"].values.tolist()
args2 = self._df["arg2"].values.tolist()
label = self._df["label"].values.tolist()
return args1, args2, label
def get_time_per_sentence(self) -> List[int]:
return self._df["t"].values.tolist()
|
# Copyright 2019 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from collections import OrderedDict
from typing import Optional, Any, List
class Config:
indent: int = 4
line_break: str = os.linesep
def indent(level: int) -> str:
return ' ' * Config.indent * level
def indent_line(line: str, level: int) -> str:
return f"{indent(level)}{line}" if line else ''
def indent_code(code_lines: List[str], level: int = 0) -> str:
lines = []
for code in code_lines:
lines.extend(code.split(Config.line_break) if code is not None else [])
return Config.line_break.join([indent_line(line, level) for line in lines])
def empty_lines(num: int):
return ''.join([Config.line_break] * num)
class Item:
def __init__(self, name: str, description: Optional[str]):
self.name: str = name
self.description: Optional[str] = description
def to_description(self, level: int) -> Optional[str]:
if self.description is not None:
return indent_code([self.description], level)
return None
def to_code(self, level: int) -> str:
raise NotImplementedError
class Property(Item):
def __init__(self,
name: str,
type_str: str,
description: Optional[str] = None,
default: Any = None):
super().__init__(name, description)
self.type_str = type_str
self.default = default
def to_getter_setter_code(self, level) -> str:
"""
Returns: getter and setter functions generated by a property.
"""
name = self.name
lines = [("@property", 0),
(f"def {name}(self):", 0),
(f"return self._{name}", 1),
(empty_lines(0), 0),
(f"def set_{name}(self, {name}: {self.to_code(0)}):", 0),
(f"self.set_fields(_{name}={self.to_field_value()})", 1),
(empty_lines(0), 0)]
return indent_code([indent_line(*line) for line in lines], level)
def to_init_code(self, level: int) -> str:
return indent_line(f"self._{self.name}: {self.to_code(0)} = "
f"{repr(self.default)}", level)
def to_description(self, level: int) -> Optional[str]:
if self.description is not None and self.description.strip() != '':
type_str = f'{self.to_code(0)}'
type_str = f' ({type_str})' if type_str.strip() != '' else type_str
return indent_line(f"{self.name}{type_str}: "
f"{self.description}", level)
return None
def to_field_value(self):
raise NotImplementedError
class ClassAttributeItem(Property):
def to_code(self, level: int = 0) -> str:
return self.type_str
def to_init_code(self, level: int) -> str:
type_code = f'{self.to_code(0)}'
type_ = f': {type_code}' if type_code.strip() != '' else ''
return indent_line(f"{self.name}{type_} = {self.default}", level)
def to_field_value(self):
pass
class BasicItem(Property):
TYPES = {'int', 'float', 'str', 'bool'}
def to_code(self, level: int = 0) -> str:
return f"typing.Optional[{self.type_str}]"
def to_field_value(self):
if self.type_str in self.TYPES:
return self.name
return f"{self.name}.tid"
class CompositeItem(Property):
TYPES = {'List'}
def __init__(self,
name: str,
type_str: str,
item_type: str,
description: Optional[str] = None,
default: Any = None):
super().__init__(name, type_str, description, default)
self.item_type = item_type
def to_code(self, level: int = 0) -> str:
# TODO: Assumes only one type of elements are allowed in the list,
# allow multiple types
# items = list(OrderedDict([(item, None)
# for item in self.items]).keys())
# item_type_str = f"{', '.join(self.item_type)}"
# if len(self.items) > 1:
# item_type_str = f"typing.Union[{item_type_str}]"
return f"typing.Optional[{self.type_str}[{self.item_type}]]"
def to_field_value(self):
item_value_str = BasicItem('item', self.item_type).to_field_value()
return f"[{item_value_str} for item in {self.name}]"
class DefinitionItem(Item):
def __init__(self, name: str,
class_type: str,
init_args: Optional[str] = None,
properties: Optional[List[Property]] = None,
class_attributes: Optional[List[Property]] = None,
description: Optional[str] = None):
super().__init__(name, description)
self.class_type = class_type
self.properties: List[Property] = \
[] if properties is None else properties
self.class_attributes = [] if class_attributes is None \
else class_attributes
self.description = description if description else None
self.init_args = init_args if init_args is not None else ''
self.init_args = self.init_args.replace('=', ' = ')
def to_init_code(self, level: int) -> str:
return indent_line(f"def __init__(self, {self.init_args}):", level)
def to_code(self, level: int) -> str:
super_args = ', '.join([item.split(':')[0].strip()
for item in self.init_args.split(',')])
raw_desc = self.to_description(1)
desc: str = '' if raw_desc is None else raw_desc
lines = [
empty_lines(1),
f"__all__.extend('{self.name}')",
empty_lines(1),
f"class {self.name}({self.class_type}):",
]
lines += [desc] if desc.strip() else []
lines += [item.to_init_code(1) for item in self.class_attributes]
lines += [empty_lines(0)]
lines += [self.to_init_code(1),
indent_line(f"super().__init__({super_args})", 2)]
lines += [item.to_init_code(2) for item in self.properties]
lines += [empty_lines(0)]
lines += [item.to_getter_setter_code(1) for item in self.properties]
return indent_code(lines, level)
@staticmethod
def to_item_descs(items, title):
item_descs = [item.to_description(0) for item in items]
item_descs = [item for item in item_descs if item is not None]
if len(item_descs) > 0:
item_descs = [indent_line(title, 1)] + \
[indent_line(desc, 2) for desc in item_descs]
return item_descs
def to_description(self, level: int) -> Optional[str]:
class_desc = [] if self.description is None else [self.description]
item_descs = self.to_item_descs(self.properties, 'Args:')
att_descs = self.to_item_descs(self.class_attributes, 'Attr:')
descs = class_desc + item_descs + att_descs
if len(descs) == 0:
return ""
quotes = indent_line('"""', 0)
return indent_code([quotes] + descs + [quotes], level)
class FileItem:
def __init__(self,
entry_item: DefinitionItem,
entry_file: str,
ignore_errors: Optional[List[str]],
description: Optional[str],
imports: Optional[List[str]]):
self.description = description
self.ignore_errors = [] if not ignore_errors else ignore_errors
self.imports = [] if not imports else list(set(imports))
self.entry_item = entry_item
self.entry_file_exists = os.path.exists(entry_file)
def to_code(self, level: int) -> str:
lines: List[str] = []
if not self.entry_file_exists:
lines = [self.to_description(0),
self.to_import_code(0),
empty_lines(1), '__all__ = []']
lines.append(self.entry_item.to_code(0))
return indent_code(lines, level)
def to_description(self, level):
quotes = '"""'
lines = self.ignore_errors + [quotes, self.description, quotes]
return indent_code(lines, level)
def to_import_code(self, level):
imports_set: OrderedDict[str] = {}
for import_ in sorted(self.imports):
imports_set[f"import {import_}"] = None
return indent_code(list(imports_set), level)
|
# Based on scikit-learn/sklearn/utils/estimator_checks.py
import itertools
from functools import partial
def get_entry_params(entry):
user_parameters = entry.describe()["user_parameters"]
if not user_parameters:
return []
keys = [p["name"] for p in user_parameters]
try:
values = [p["allowed"] for p in user_parameters]
except KeyError:
return []
params = [None]
params.extend([dict(zip(keys, p)) for p in itertools.product(*values)])
return params
def _set_check_ids(obj):
"""Create pytest ids for checks.
When `obj` is an intake entry, this returns the pprint version of the
intake entry. When `obj` is a function, the name of the function is
returned with its keyworld arguments.
Parameters
----------
obj : intake entry or function
Items generated by `check_entry`
Returns
-------
id : string or None
See also
--------
check_entry
"""
if hasattr(obj, "container"):
c = getattr(obj, "_catalog", None)
if c:
name = f"{c.name}.{obj.name}"
else:
name = f"{obj.name}"
return name
if callable(obj):
if not isinstance(obj, partial):
return obj.__name__
if not obj.keywords:
return obj.func.__name__
kwstring = ",".join(["{}={}".format(k, v) for k, v in obj.keywords.items()])
return "{}({})".format(obj.func.__name__, kwstring)
def parametrize_with_checks(catalog):
"""Pytest specific decorator for parametrizing catalog checks.
The `id` of each check is set to be a pprint version of the catalog
and the name of the check with its keyword arguments.
This allows to use `pytest -k` to specify which tests to run::
pytest test_check_catalogs.py -k check_catalog_metadata
Parameters
----------
catalog : Intake Catalog
Catalog to generated checks for.
Returns
-------
decorator : `pytest.mark.parametrize`
Examples
--------
>>> from carbonplan.data.tests import parametrize_with_checks
>>> from carbonplan.data import cat
>>> @parametrize_with_checks(cat)
... def test_catalog(entry, check):
... check(entry)
"""
import pytest
checks_generator = itertools.chain.from_iterable(
check_entry(name, entry) for name, entry in dict(catalog.walk(depth=10)).items()
)
checks_with_marks = list(
_mark_xfail_checks(estimator, check, pytest) for estimator, check in checks_generator
)
return pytest.mark.parametrize("entry, check", checks_with_marks, ids=_set_check_ids)
def _mark_xfail_checks(entry, check, pytest):
# TODO
return entry, check
def _yield_all_checks(name, entry):
yield check_entry_metadata
for params in get_entry_params(entry):
yield partial(check_get_entry_data, params=params)
def check_entry(name, entry):
yield from ((entry, partial(check, name)) for check in _yield_all_checks(name, entry))
def check_get_entry_data(name, entry, params=None):
import pytest
if params is not None:
entry = entry(**params)
else:
entry = entry()
if entry.container == "catalog":
entry.reload()
elif entry.container in ["xarray", "dataframe"]:
if entry.metadata.get("ci", None) == "skip":
pytest.skip("dataset marked as ci: skip") # TODO: move to _mark_xfail_checks
elif entry.metadata.get("ci", None) == "xfail":
pytest.xfail("dataset marked as ci: xfail") # TODO: move to _mark_xfail_checks
try:
_ = entry.to_dask()
except NotImplementedError:
_ = entry.read()
def check_entry_metadata(name, entry):
import pytest
expected_keys = ["title", "summary", "description", "tags", "license", "providers"]
if entry.container == "catalog":
pytest.skip(
"not checking metadata in top level catalog objects."
) # TODO: move to _mark_xfail_checks
for key in expected_keys:
assert key in entry().metadata
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Pytype is too slow to check this file.
# pytype: skip-file
import builtins
import functools
import itertools
import operator
from typing import (Any, Callable, List, NamedTuple, Optional, Sequence, Union, Tuple)
import warnings
import numpy as np
import jax
from jax import core
from jax import ad_util
from jax import api
from jax import api_util
from jax import linear_util as lu
from jax import dtypes
from jax import lazy
from jax import tree_util
from jax.config import flags, config
from jax.core import (Primitive, _canonicalize_dimension, UnshapedArray,
ShapedArray, ConcreteArray, raise_to_shaped,
abstract_token, canonicalize_shape)
from jax.abstract_arrays import array_types
from jax.interpreters import partial_eval as pe
from jax.interpreters import xla
from jax.interpreters import pxla
from jax.interpreters import ad
from jax.interpreters import invertible_ad as iad
from jax.interpreters import batching
from jax.interpreters import masking
from jax.util import (cache, safe_zip, partial, prod, safe_map, canonicalize_axis,
split_list)
from jax.tree_util import tree_map
from jax.lib import pytree
from jax.lib import xla_bridge
from jax.lib import xla_client
xb = xla_bridge
xc = xla_client
xops = xla_client.ops
FLAGS = flags.FLAGS
_max = builtins.max
_min = builtins.min
_reduce = functools.reduce
Array = Any
DType = Any
Shape = Sequence[int]
def _try_broadcast_shapes(shapes):
assert shapes
if len(shapes) == 1: return shapes[0]
rank, *others = {len(shape) for shape in shapes}
if others: return None # must have consistent rank
if not rank: return () # scalar case
result_shape = [None] * rank
for i, sizes in enumerate(zip(*shapes)):
if sizes[:-1] == sizes[1:]:
result_shape[i] = sizes[0] # all equal sizes for this dimension
else:
sizes = [d for d in sizes if d != 1]
if sizes[:-1] != sizes[1:]:
return None # must have equal sizes other than 1-sized axes
result_shape[i] = sizes[0] if sizes else 1
return tuple(result_shape)
@cache()
def broadcast_shapes(*shapes):
"""Returns the shape that results from NumPy broadcasting of `shapes`."""
if len(shapes) == 1:
return shapes[0]
ndim = _max(len(shape) for shape in shapes)
shapes = [(1,) * (ndim - len(shape)) + shape for shape in shapes]
result_shape = _try_broadcast_shapes(shapes)
if result_shape is None:
raise ValueError("Incompatible shapes for broadcasting: {}"
.format(tuple(map(tuple, shapes))))
return result_shape
def _identity(x): return x
### traceables
def neg(x: Array) -> Array:
r"""Elementwise negation: :math:`-x`."""
return neg_p.bind(x)
def sign(x: Array) -> Array:
r"""Elementwise sign.
For floating-point inputs, returns
:math:`\mathrm{sign}(x) = \begin{cases}
-1 & x < 0\\
-0 & x = -0\\
\mathit{NaN} & x = \mathit{NaN}\\
+0 & x = +0\\
1 & x > 0
\end{cases}`
For signed integer inputs, returns
:math:`\mathrm{sign}(x) = \begin{cases}
-1 & x < 0\\
0 & x = 0\\
1 & x > 0
\end{cases}`
For complex inputs, returns the complex phase, i.e.
:math:`\mathrm{sign}(x) = \frac{x}{|x|}`.
"""
return sign_p.bind(x)
def nextafter(x1: Array, x2: Array) -> Array:
r"""Returns the next representable value after `x1` in the direction of `x2`.
Note that in some environments flush-denormal-to-zero semantics is used.
This means that, around zero, this function returns strictly non-zero
values which appear as zero in any operations. Consider this example::
>>> jnp.nextafter(0, 1) # denormal numbers are representable
DeviceArray(1.e-45, dtype=float32)
>>> jnp.nextafter(0, 1) * 1 # but are flushed to zero
DeviceArray(0., dtype=float32)
For the smallest usable (i.e. normal) float, use ``tiny`` of ``jnp.finfo``.
"""
return nextafter_p.bind(_brcast(x1, x2), _brcast(x2, x1))
def floor(x: Array) -> Array:
r"""Elementwise floor: :math:`\left\lfloor x \right\rfloor`."""
return floor_p.bind(x)
def ceil(x: Array) -> Array:
r"""Elementwise ceiling: :math:`\left\lceil x \right\rceil`."""
return ceil_p.bind(x)
def round(x: Array) -> Array:
r"""Elementwise round.
Rounds values to the nearest integer. Halfway values (e.g., `0.5`) are rounded
away from zero."""
return round_p.bind(x)
def is_finite(x: Array) -> Array:
r"""Elementwise :math:`\mathrm{isfinite}`.
For each element x returns `True` if and only if x is not :math:`\pm\infty` or
:math:`\mathit{NaN}`.
"""
return is_finite_p.bind(x)
def exp(x: Array) -> Array:
r"""Elementwise exponential: :math:`e^x`."""
return exp_p.bind(x)
def expm1(x: Array) -> Array:
r"""Elementwise :math:`e^{x} - 1`."""
return expm1_p.bind(x)
def log(x: Array) -> Array:
r"""Elementwise natural logarithm: :math:`\mathrm{log}(x)`."""
return log_p.bind(x)
def log1p(x: Array) -> Array:
r"""Elementwise :math:`\mathrm{log}(1 + x)`."""
return log1p_p.bind(x)
def tanh(x: Array) -> Array:
r"""Elementwise hyperbolic tangent: :math:`\mathrm{tanh}(x)`."""
return tanh_p.bind(x)
def sin(x: Array) -> Array:
r"""Elementwise sine: :math:`\mathrm{sin}(x)`."""
return sin_p.bind(x)
def cos(x: Array) -> Array:
r"""Elementwise cosine: :math:`\mathrm{cos}(x)`."""
return cos_p.bind(x)
def atan2(x: Array, y: Array) -> Array:
r"""Elementwise arc tangent of two variables:
:math:`\mathrm{atan}({x \over y})`."""
return atan2_p.bind(x, y)
def betainc(a: Array, b: Array, x: Array) -> Array:
r"""Elementwise regularized incomplete beta integral."""
return regularized_incomplete_beta_p.bind(a, b, x)
def lgamma(x: Array) -> Array:
r"""Elementwise log gamma: :math:`\mathrm{log}(\Gamma(x))`."""
return lgamma_p.bind(x)
def digamma(x: Array) -> Array:
r"""Elementwise digamma: :math:`\psi(x)`."""
return digamma_p.bind(x)
def igamma(a: Array, x: Array) -> Array:
r"""Elementwise regularized incomplete gamma function."""
return igamma_p.bind(a, x)
def igammac(a: Array, x: Array) -> Array:
r"""Elementwise complementary regularized incomplete gamma function."""
return igammac_p.bind(a, x)
def igamma_grad_a(a: Array, x: Array) -> Array:
r"""Elementwise derivative of the regularized incomplete gamma function."""
return igamma_grad_a_p.bind(a, x)
def random_gamma_grad(a: Array, x: Array) -> Array:
r"""Elementwise derivative of samples from `Gamma(a, 1)`."""
return random_gamma_grad_p.bind(a, x)
def bessel_i0e(x: Array) -> Array:
r"""Exponentially scaled modified Bessel function of order 0:
:math:`\mathrm{i0e}(x) = e^{-|x|} \mathrm{i0}(x)`
"""
return bessel_i0e_p.bind(x)
def bessel_i1e(x: Array) -> Array:
r"""Exponentially scaled modified Bessel function of order 1:
:math:`\mathrm{i1e}(x) = e^{-|x|} \mathrm{i1}(x)`
"""
return bessel_i1e_p.bind(x)
def erf(x: Array) -> Array:
r"""Elementwise error function: :math:`\mathrm{erf}(x)`."""
return erf_p.bind(x)
def erfc(x: Array) -> Array:
r"""Elementwise complementary error function:
:math:`\mathrm{erfc}(x) = 1 - \mathrm{erf}(x)`."""
return erfc_p.bind(x)
def erf_inv(x: Array) -> Array:
r"""Elementwise inverse error function: :math:`\mathrm{erf}^{-1}(x)`."""
return erf_inv_p.bind(x)
def real(x: Array) -> Array:
r"""Elementwise extract real part: :math:`\mathrm{Re}(x)`.
Returns the real part of a complex number.
"""
return real_p.bind(x)
def imag(x: Array) -> Array:
r"""Elementwise extract imaginary part: :math:`\mathrm{Im}(x)`.
Returns the imaginary part of a complex number.
"""
return imag_p.bind(x)
def complex(x: Array, y: Array) -> Array:
r"""Elementwise make complex number: :math:`x + jy`.
Builds a complex number from real and imaginary parts.
"""
return complex_p.bind(_brcast(x, y), _brcast(y, x))
def conj(x: Array) -> Array:
r"""Elementwise complex conjugate function: :math:`\overline{x}`."""
return conj_p.bind(x, input_dtype=_dtype(x))
def abs(x: Array) -> Array:
r"""Elementwise absolute value: :math:`|x|`."""
return abs_p.bind(x)
def pow(x: Array, y: Array) -> Array:
r"""Elementwise power: :math:`x^y`."""
return pow_p.bind(x, y)
def integer_pow(x: Array, y: int) -> Array:
r"""Elementwise power: :math:`x^y`, where :math:`y` is a fixed integer."""
if y == 0:
return _ones(x)
elif y == 1:
return x
else:
return integer_pow_p.bind(x, y=y)
def sqrt(x: Array) -> Array:
r"""Elementwise square root: :math:`\sqrt{x}`."""
return sqrt_p.bind(x)
def rsqrt(x: Array) -> Array:
r"""Elementwise reciprocal square root: :math:`1 \over \sqrt{x}."""
return rsqrt_p.bind(x)
def bitwise_not(x: Array) -> Array:
r"""Elementwise NOT: :math:`\neg x`."""
return not_p.bind(x)
def bitwise_and(x: Array, y: Array) -> Array:
r"""Elementwise AND: :math:`x \wedge y`."""
return and_p.bind(x, y)
def bitwise_or(x: Array, y: Array) -> Array:
r"""Elementwise OR: :math:`x \vee y`."""
return or_p.bind(x, y)
def bitwise_xor(x: Array, y: Array) -> Array:
r"""Elementwise exclusive OR: :math:`x \oplus y`."""
return xor_p.bind(x, y)
def population_count(x: Array) -> Array:
r"""Elementwise popcount, count the number of set bits in each element."""
return population_count_p.bind(x)
def add(x: Array, y: Array) -> Array:
r"""Elementwise addition: :math:`x + y`."""
return add_p.bind(x, y)
def sub(x: Array, y: Array) -> Array:
r"""Elementwise subtraction: :math:`x - y`."""
return sub_p.bind(x, y)
def mul(x: Array, y: Array) -> Array:
r"""Elementwise multiplication: :math:`x \times y`."""
return mul_p.bind(x, y)
def div(x: Array, y: Array) -> Array:
r"""Elementwise division: :math:`x \over y`."""
return div_p.bind(x, y)
def rem(x: Array, y: Array) -> Array:
r"""Elementwise remainder: :math:`x \bmod y`."""
return rem_p.bind(x, y)
def max(x: Array, y: Array) -> Array:
r"""Elementwise maximum: :math:`\mathrm{max}(x, y)`
For complex numbers, uses a lexicographic comparison on the
`(real, imaginary)` pairs."""
return max_p.bind(x, y)
def min(x: Array, y: Array) -> Array:
r"""Elementwise minimum: :math:`\mathrm{min}(x, y)`
For complex numbers, uses a lexicographic comparison on the
`(real, imaginary)` pairs."""
return min_p.bind(x, y)
def shift_left(x: Array, y: Array) -> Array:
r"""Elementwise left shift: :math:`x \ll y`."""
return shift_left_p.bind(x, y)
def shift_right_arithmetic(x: Array, y: Array) -> Array:
r"""Elementwise arithmetic right shift: :math:`x \gg y`."""
return shift_right_arithmetic_p.bind(x, y)
def shift_right_logical(x: Array, y: Array) -> Array:
r"""Elementwise logical right shift: :math:`x \gg y`."""
return shift_right_logical_p.bind(x, y)
def eq(x: Array, y: Array) -> Array:
r"""Elementwise equals: :math:`x = y`."""
return eq_p.bind(x, y)
def ne(x: Array, y: Array) -> Array:
r"""Elementwise not-equals: :math:`x \neq y`."""
return ne_p.bind(x, y)
def ge(x: Array, y: Array) -> Array:
r"""Elementwise greater-than-or-equals: :math:`x \geq y`."""
return ge_p.bind(x, y)
def gt(x: Array, y: Array) -> Array:
r"""Elementwise greater-than: :math:`x > y`."""
return gt_p.bind(x, y)
def le(x: Array, y: Array) -> Array:
r"""Elementwise less-than-or-equals: :math:`x \leq y`."""
return le_p.bind(x, y)
def lt(x: Array, y: Array) -> Array:
r"""Elementwise less-than: :math:`x < y`."""
return lt_p.bind(x, y)
def convert_element_type(operand: Array, new_dtype: DType) -> Array:
"""Elementwise cast.
Wraps XLA's `ConvertElementType
<https://www.tensorflow.org/xla/operation_semantics#convertelementtype>`_
operator, which performs an elementwise conversion from one type to another.
Similar to a C++ `static_cast`.
Args:
operand: an array or scalar value to be cast
new_dtype: the new type. Should be a NumPy type.
Returns:
An array with the same shape as `operand`, cast elementwise to `new_dtype`.
"""
new_dtype = dtypes.canonicalize_dtype(new_dtype)
# Avoids dropping precision by casting Python scalars to the default Jax
# type. If we passed a Python scalar directly to the bind call below, it is
# cast to the default type as part of the calling convention.
if type(operand) in dtypes.python_scalar_dtypes:
operand = np.asarray(operand, new_dtype)
old_dtype = dtypes.canonicalize_dtype(_dtype(operand))
if old_dtype == new_dtype:
return operand
if (dtypes.issubdtype(old_dtype, np.complexfloating) and
not dtypes.issubdtype(new_dtype, np.complexfloating)):
msg = "Casting complex values to real discards the imaginary part"
warnings.warn(msg, np.ComplexWarning, stacklevel=2)
return convert_element_type_p.bind(
operand, new_dtype=new_dtype, old_dtype=old_dtype)
def bitcast_convert_type(operand: Array, new_dtype: DType) -> Array:
"""Elementwise bitcast.
Wraps XLA's `BitcastConvertType
<https://www.tensorflow.org/xla/operation_semantics#bitcastconverttype>`_
operator, which performs a bit cast from one type to another. The bitwidth
of the source and destination types must match.
Args:
operand: an array or scalar value to be cast
new_dtype: the new type. Should be a NumPy type.
Returns:
An array with the same shape as `operand`, bitcast elementwise to
`new_dtype`.
"""
new_dtype = dtypes.canonicalize_dtype(new_dtype)
old_dtype = _dtype(operand)
if old_dtype != new_dtype:
return bitcast_convert_type_p.bind(operand, new_dtype=new_dtype)
else:
return operand
def clamp(min: Array, x: Array, max: Array) -> Array:
r"""Elementwise clamp.
Returns :math:`\mathrm{clamp}(x) = \begin{cases}
\mathit{min} & \text{if } x < \mathit{min},\\
\mathit{max} & \text{if } x > \mathit{max},\\
x & \text{otherwise}
\end{cases}`.
"""
return clamp_p.bind(min, x, max)
def concatenate(operands: Sequence[Array], dimension: int) -> Array:
"""Concatenates a sequence of arrays along `dimension`.
Wraps XLA's `Concatenate
<https://www.tensorflow.org/xla/operation_semantics#concatenate>`_
operator.
Args:
operands: a sequence of arrays to concatenate. The arrays must have equal
shapes, except in the `dimension` axis.
dimension: the dimension along which to concatenate the arrays.
Returns:
An array containing the concatenation.
"""
return concatenate_p.bind(*operands, dimension=dimension)
Precision = xla_client.PrecisionConfig.Precision
Precision.__str__ = lambda precision: precision.name
PrecisionType = Any
PrecisionLike = Union[None, PrecisionType, Tuple[PrecisionType, PrecisionType]]
class ConvDimensionNumbers(NamedTuple):
"""Describes batch, spatial, and feature dimensions of a convolution.
Args:
lhs_spec: a tuple of nonnegative integer dimension numbers containing
`(batch dimension, feature dimension, spatial dimensions...)`.
rhs_spec: a tuple of nonnegative integer dimension numbers containing
`(out feature dimension, in feature dimension, spatial dimensions...)`.
out_spec: a tuple of nonnegative integer dimension numbers containing
`(batch dimension, feature dimension, spatial dimensions...)`.
"""
lhs_spec: Sequence[int]
rhs_spec: Sequence[int]
out_spec: Sequence[int]
ConvGeneralDilatedDimensionNumbers = Union[
None, ConvDimensionNumbers, Tuple[str, str, str]]
def conv_general_dilated(
lhs: Array, rhs: Array, window_strides: Sequence[int],
padding: Union[str, Sequence[Tuple[int, int]]],
lhs_dilation: Optional[Sequence[int]] = None,
rhs_dilation: Optional[Sequence[int]] = None,
dimension_numbers: ConvGeneralDilatedDimensionNumbers = None,
feature_group_count: int = 1, batch_group_count: int = 1,
precision: PrecisionLike = None) -> Array:
"""General n-dimensional convolution operator, with optional dilation.
Wraps XLA's `Conv
<https://www.tensorflow.org/xla/operation_semantics#conv_convolution>`_
operator.
Args:
lhs: a rank `n+2` dimensional input array.
rhs: a rank `n+2` dimensional array of kernel weights.
window_strides: a sequence of `n` integers, representing the inter-window
strides.
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence of
`n` `(low, high)` integer pairs that give the padding to apply before and
after each spatial dimension.
lhs_dilation: `None`, or a sequence of `n` integers, giving the
dilation factor to apply in each spatial dimension of `lhs`. LHS dilation
is also known as transposed convolution.
rhs_dilation: `None`, or a sequence of `n` integers, giving the
dilation factor to apply in each spatial dimension of `rhs`. RHS dilation
is also known as atrous convolution.
dimension_numbers: either `None`, a `ConvDimensionNumbers` object, or
a 3-tuple `(lhs_spec, rhs_spec, out_spec)`, where each element is a string
of length `n+2`.
feature_group_count: integer, default 1. See XLA HLO docs.
batch_group_count: integer, default 1. See XLA HLO docs.
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
An array containing the convolution result.
In the string case of `dimension_numbers`, each character identifies by
position:
- the batch dimensions in `lhs`, `rhs`, and the output with the character
'N',
- the feature dimensions in `lhs` and the output with the character 'C',
- the input and output feature dimensions in rhs with the characters 'I'
and 'O' respectively, and
- spatial dimension correspondences between lhs, rhs, and the output using
any distinct characters.
For example, to indicate dimension numbers consistent with the `conv` function
with two spatial dimensions, one could use `('NCHW', 'OIHW', 'NCHW')`. As
another example, to indicate dimension numbers consistent with the TensorFlow
Conv2D operation, one could use `('NHWC', 'HWIO', 'NHWC')`. When using the
latter form of convolution dimension specification, window strides are
associated with spatial dimension character labels according to the order in
which the labels appear in the `rhs_spec` string, so that `window_strides[0]`
is matched with the dimension corresponding to the first character
appearing in rhs_spec that is not `'I'` or `'O'`.
If `dimension_numbers` is `None`, the default is `('NCHW', 'OIHW', 'NCHW')`
(for a 2D convolution).
"""
dnums = conv_dimension_numbers(lhs.shape, rhs.shape, dimension_numbers)
if lhs_dilation is None:
lhs_dilation = (1,) * (lhs.ndim - 2)
elif isinstance(padding, str) and not len(lhs_dilation) == lhs_dilation.count(1):
raise ValueError(
"String padding is not implemented for transposed convolution "
"using this op. Please either exactly specify the required padding or "
"use conv_transpose.")
if rhs_dilation is None:
rhs_dilation = (1,) * (rhs.ndim - 2)
if isinstance(padding, str):
lhs_perm, rhs_perm, _ = dnums
rhs_shape = np.take(rhs.shape, rhs_perm)[2:]
effective_rhs_shape = [(k-1) * r + 1 for k, r in zip(rhs_shape, rhs_dilation)]
padding = padtype_to_pads(
np.take(lhs.shape, lhs_perm)[2:], effective_rhs_shape,
window_strides, padding)
return conv_general_dilated_p.bind(
lhs, rhs, window_strides=tuple(window_strides), padding=tuple(padding),
lhs_dilation=tuple(lhs_dilation), rhs_dilation=tuple(rhs_dilation),
dimension_numbers=dnums,
feature_group_count=feature_group_count,
batch_group_count=batch_group_count,
lhs_shape=lhs.shape, rhs_shape=rhs.shape,
precision=_canonicalize_precision(precision))
def dot(lhs: Array, rhs: Array, precision: PrecisionLike = None) -> Array:
"""Vector/vector, matrix/vector, and matrix/matrix multiplication.
Wraps XLA's `Dot
<https://www.tensorflow.org/xla/operation_semantics#dot>`_
operator.
For more general contraction, see the `dot_general` operator.
Args:
lhs: an array of rank 1 or 2.
rhs: an array of rank 1 or 2.
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
An array containing the product.
"""
if 1 <= lhs.ndim <= 2 and 1 <= rhs.ndim <= 2 and lhs.shape[-1] == rhs.shape[0]:
return dot_general(lhs, rhs, (((lhs.ndim - 1,), (0,)), ((), ())),
precision=precision)
else:
raise TypeError("Incompatible shapes for dot: got {} and {}.".format(
lhs.shape, rhs.shape))
DotDimensionNumbers = Tuple[Tuple[Sequence[int], Sequence[int]],
Tuple[Sequence[int], Sequence[int]]]
def dot_general(lhs: Array, rhs: Array, dimension_numbers: DotDimensionNumbers,
precision: PrecisionLike = None) -> Array:
"""More general contraction operator.
Wraps XLA's `DotGeneral
<https://www.tensorflow.org/xla/operation_semantics#dotgeneral>`_
operator.
Args:
lhs: an array
rhs: an array
dimension_numbers: a tuple of tuples of the form
`((lhs_contracting_dims, rhs_contracting_dims),
(lhs_batch_dims, rhs_batch_dims))`
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
An array containing the result.
"""
contract_dims_seq, batch_dims_seq = dimension_numbers
contract_dims = tuple(map(lambda x: tuple(x), contract_dims_seq))
batch_dims = tuple(map(lambda x: tuple(x), batch_dims_seq))
return dot_general_p.bind(lhs, rhs,
dimension_numbers=(contract_dims, batch_dims),
precision=_canonicalize_precision(precision))
def broadcast(operand: Array, sizes: Sequence[int]) -> Array:
"""Broadcasts an array, adding new major dimensions.
Wraps XLA's `Broadcast
<https://www.tensorflow.org/xla/operation_semantics#broadcast>`_
operator.
Args:
operand: an array
sizes: a sequence of integers, giving the sizes of new major dimensions
to add.
Returns:
An array containing the result.
"""
dims = tuple(range(len(sizes), len(sizes) + np.ndim(operand)))
return broadcast_in_dim(operand, tuple(sizes) + np.shape(operand), dims)
def broadcast_in_dim(operand: Array, shape: Shape,
broadcast_dimensions: Sequence[int]) -> Array:
"""Wraps XLA's `BroadcastInDim
<https://www.tensorflow.org/xla/operation_semantics#broadcastindim>`_
operator.
"""
shape = _broadcast_in_dim_shape_rule(
operand, shape=shape, broadcast_dimensions=broadcast_dimensions)
if (np.ndim(operand) == len(shape) and not len(broadcast_dimensions)
and isinstance(operand, (xla.DeviceArray, core.Tracer))):
return operand
return broadcast_in_dim_p.bind(
operand, shape=tuple(shape),
broadcast_dimensions=tuple(broadcast_dimensions))
def broadcast_to_rank(x: Array, rank: int) -> Array:
"""Adds leading dimensions of ``1`` to give ``x`` rank ``rank``."""
return broadcast(x, (1,) * (rank - x.ndim))
def reshape(operand: Array, new_sizes: Shape,
dimensions: Optional[Sequence[int]] = None) -> Array:
"""Wraps XLA's `Reshape
<https://www.tensorflow.org/xla/operation_semantics#reshape>`_
operator.
For inserting/removing dimensions of size 1, prefer using ``lax.squeeze`` /
``lax.expand_dims``. These preserve information about axis identity that may
be useful for advanced transformation rules.
"""
new_sizes = canonicalize_shape(new_sizes) # TODO
new_sizes = tuple(new_sizes)
same_shape = np.shape(operand) == new_sizes
same_dims = dimensions is None or tuple(dimensions) == tuple(range(np.ndim(operand)))
if np.shape(operand) and same_shape and same_dims:
return operand
else:
return reshape_p.bind(
operand, new_sizes=new_sizes,
dimensions=None if dimensions is None or same_dims else tuple(dimensions))
def pad(operand: Array, padding_value: Array,
padding_config: Sequence[Tuple[int, int, int]]) -> Array:
"""Applies low, high, and/or interior padding to an array.
Wraps XLA's `Pad
<https://www.tensorflow.org/xla/operation_semantics#pad>`_
operator.
Args:
operand: an array to be padded.
padding_value: the value to be inserted as padding. Must have the same dtype
as ``operand``.
padding_config: a sequence of ``(low, high, interior)`` tuples of integers,
giving the amount of low, high, and interior (dilation) padding to insert
in each dimension.
Returns:
The ``operand`` array with padding value ``padding_value`` inserted in each
dimension according to the ``padding_config``.
"""
return pad_p.bind(operand, padding_value, padding_config=tuple(padding_config))
def rev(operand: Array, dimensions: Sequence[int]) -> Array:
"""Wraps XLA's `Rev
<https://www.tensorflow.org/xla/operation_semantics#rev_reverse>`_
operator.
"""
return rev_p.bind(operand, dimensions=tuple(dimensions))
def select(pred: Array, on_true: Array, on_false: Array) -> Array:
"""Wraps XLA's `Select
<https://www.tensorflow.org/xla/operation_semantics#select>`_
operator.
"""
return select_p.bind(pred, on_true, on_false)
def slice(operand: Array, start_indices: Sequence[int],
limit_indices: Sequence[int],
strides: Optional[Sequence[int]] = None) -> Array:
"""Wraps XLA's `Slice
<https://www.tensorflow.org/xla/operation_semantics#slice>`_
operator.
"""
return slice_p.bind(operand, start_indices=tuple(start_indices),
limit_indices=tuple(limit_indices),
strides=None if strides is None else tuple(strides))
def dynamic_slice(operand: Array, start_indices: Sequence[Array],
slice_sizes: Shape) -> Array:
"""Wraps XLA's `DynamicSlice
<https://www.tensorflow.org/xla/operation_semantics#dynamicslice>`_
operator.
Args:
operand: an array to slice.
start_indices: a list of scalar indices, one per dimension. These values
may be dynamic.
slice_sizes: the size of the slice. Must be a sequence of non-negative
integers with length equal to `ndim(operand)`. Inside a JIT compiled
function, only static values are supported (all JAX arrays inside JIT
must have statically known size).
Returns:
An array containing the slice.
"""
start_indices = _dynamic_slice_indices(operand, start_indices)
return dynamic_slice_p.bind(operand, *start_indices,
slice_sizes=tuple(slice_sizes))
def dynamic_update_slice(operand: Array, update: Array,
start_indices: Array) -> Array:
"""Wraps XLA's `DynamicUpdateSlice
<https://www.tensorflow.org/xla/operation_semantics#dynamicupdateslice>`_
operator.
Args:
operand: an array to slice.
update: an array containing the new values to write onto `operand`.
start_indices: a list of scalar indices, one per dimension.
Returns:
An array containing the slice.
"""
start_indices = _dynamic_slice_indices(operand, start_indices)
return dynamic_update_slice_p.bind(operand, update, *start_indices)
class GatherDimensionNumbers(NamedTuple):
"""
Describes the dimension number arguments to an `XLA's Gather operator
<https://www.tensorflow.org/xla/operation_semantics#gather>`_. See the XLA
documentation for more details of what the dimension numbers mean.
Args:
offset_dims: the set of dimensions in the `gather` output that offset into
an array sliced from `operand`. Must be a tuple of integers in ascending
order, each representing a dimension number of the output.
collapsed_slice_dims: the set of dimensions `i` in `operand` that have
`slice_sizes[i] == 1` and that should not have a corresponding dimension
in the output of the gather. Must be a tuple of integers in ascending
order.
start_index_map: for each dimension in `start_indices`, gives the
corresponding dimension in `operand` that is to be sliced. Must be a
tuple of integers with size equal to `start_indices.shape[-1]`.
Unlike XLA's `GatherDimensionNumbers` structure, `index_vector_dim` is
implicit; there is always an index vector dimension and it must always be the
last dimension. To gather scalar indices, add a trailing dimension of size 1.
"""
offset_dims: Sequence[int]
collapsed_slice_dims: Sequence[int]
start_index_map: Sequence[int]
def gather(operand: Array, start_indices: Array,
dimension_numbers: GatherDimensionNumbers,
slice_sizes: Shape) -> Array:
"""Gather operator.
Wraps `XLA's Gather operator
<https://www.tensorflow.org/xla/operation_semantics#gather>`_.
The semantics of gather are complicated, and its API might change in the
future. For most use cases, you should prefer `Numpy-style indexing
<https://docs.scipy.org/doc/numpy-1.16.0/reference/arrays.indexing.html>`_
(e.g., `x[:, (1,4,7), ...]`), rather than using `gather` directly.
Args:
operand: an array from which slices should be taken
start_indices: the indices at which slices should be taken
dimension_numbers: a `lax.GatherDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices` and the output relate.
slice_sizes: the size of each slice. Must be a sequence of non-negative
integers with length equal to `ndim(operand)`.
Returns:
An array containing the gather output.
"""
return gather_p.bind(
operand, start_indices, dimension_numbers=dimension_numbers,
slice_sizes=canonicalize_shape(slice_sizes))
class ScatterDimensionNumbers(NamedTuple):
"""
Describes the dimension number arguments to an `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_. See the XLA
documentation for more details of what the dimension numbers mean.
Args:
update_window_dims: the set of dimensions in the `updates` that are window
dimensions. Must be a tuple of integers in ascending
order, each representing a dimension number.
inserted_window_dims: the set of size 1 window dimensions that must be inserted
into the shape of `updates`. Must be a tuple of integers in ascending
order, each representing a dimension number of the output. These are the
mirror image of `collapsed_slice_dims` in the case of `gather`.
scatter_dims_to_operand_dims: for each dimension in `scatter_indices`, gives
the corresponding dimension in `operand`. Must be a sequence of integers
with size equal to indices.shape[-1].
Unlike XLA's `ScatterDimensionNumbers` structure, `index_vector_dim` is
implicit; there is always an index vector dimension and it must always be the
last dimension. To scatter scalar indices, add a trailing dimension of size 1.
"""
update_window_dims: Sequence[int]
inserted_window_dims: Sequence[int]
scatter_dims_to_operand_dims: Sequence[int]
def scatter_add(operand: Array, scatter_indices: Array, updates: Array,
dimension_numbers: ScatterDimensionNumbers, *,
indices_are_sorted: bool = False,
unique_indices: bool = False) -> Array:
"""Scatter-add operator.
Wraps `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_, where
addition is used to combine updates and values from `operand`.
The semantics of scatter are complicated and its API is subject to change.
Args:
operand: an array to which the scatter should be applied
scatter_indices: an array that gives the indices in `operand` to which each
update in `updates` should be applied.
updates: the updates that should be scattered onto `operand`.
dimension_numbers: a `lax.ScatterDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices`, `updates` and the output
relate.
indices_are_sorted: whether `scatter_indices` is known to be sorted. If
true, may improve performance on some backends.
unique_indices: whether the indices to be updated in ``operand`` are
guaranteed to not overlap with each other. If true, may improve performance on
some backends.
Returns:
An array containing the sum of `operand` and the scattered updates.
"""
jaxpr, consts = _reduction_jaxpr(add, _abstractify(_const(operand, 0)))
return scatter_add_p.bind(
operand, scatter_indices, updates, update_jaxpr=jaxpr,
update_consts=consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
def scatter_mul(operand: Array, scatter_indices: Array, updates: Array,
dimension_numbers: ScatterDimensionNumbers, *,
indices_are_sorted: bool = False,
unique_indices: bool = False) -> Array:
"""Scatter-multiply operator.
Wraps `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_, where
multiplication is used to combine updates and values from `operand`.
The semantics of scatter are complicated and its API is subject to change.
Args:
operand: an array to which the scatter should be applied
scatter_indices: an array that gives the indices in `operand` to which each
update in `updates` should be applied.
updates: the updates that should be scattered onto `operand`.
dimension_numbers: a `lax.ScatterDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices`, `updates` and the output
relate.
indices_are_sorted: whether `scatter_indices` is known to be sorted. If
true, may improve performance on some backends.
unique_indices: whether the indices to be updated in ``operand`` are
guaranteed to not overlap with each other. If true, may improve performance on
some backends.
Returns:
An array containing the sum of `operand` and the scattered updates.
"""
jaxpr, consts = _reduction_jaxpr(mul, _abstractify(_const(operand, 1)))
return scatter_mul_p.bind(
operand, scatter_indices, updates, update_jaxpr=jaxpr,
update_consts=consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
def scatter_min(operand: Array, scatter_indices: Array, updates: Array,
dimension_numbers: ScatterDimensionNumbers, *,
indices_are_sorted: bool = False,
unique_indices: bool = False) -> Array:
"""Scatter-min operator.
Wraps `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_, where
the `min` function is used to combine updates and values from `operand`.
The semantics of scatter are complicated and its API is subject to change.
Args:
operand: an array to which the scatter should be applied
scatter_indices: an array that gives the indices in `operand` to which each
update in `updates` should be applied.
updates: the updates that should be scattered onto `operand`.
dimension_numbers: a `lax.ScatterDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices`, `updates` and the output
relate.
indices_are_sorted: whether `scatter_indices` is known to be sorted. If
true, may improve performance on some backends.
unique_indices: whether the indices to be updated in ``operand`` are
guaranteed to not overlap with each other. If true, may improve performance on
some backends.
Returns:
An array containing the sum of `operand` and the scattered updates.
"""
jaxpr, consts = _reduction_jaxpr(min, _abstractify(_const(operand, 0)))
return scatter_min_p.bind(
operand, scatter_indices, updates, update_jaxpr=jaxpr,
update_consts=consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
def scatter_max(operand: Array, scatter_indices: Array, updates: Array,
dimension_numbers: ScatterDimensionNumbers, *,
indices_are_sorted: bool = False,
unique_indices: bool = False) -> Array:
"""Scatter-max operator.
Wraps `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_, where
the `max` function is used to combine updates and values from `operand`.
The semantics of scatter are complicated and its API is subject to change.
Args:
operand: an array to which the scatter should be applied
scatter_indices: an array that gives the indices in `operand` to which each
update in `updates` should be applied.
updates: the updates that should be scattered onto `operand`.
dimension_numbers: a `lax.ScatterDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices`, `updates` and the output
relate.
indices_are_sorted: whether `scatter_indices` is known to be sorted. If
true, may improve performance on some backends.
unique_indices: whether the indices to be updated in ``operand`` are
guaranteed to not overlap with each other. If true, may improve performance on
some backends.
Returns:
An array containing the sum of `operand` and the scattered updates.
"""
jaxpr, consts = _reduction_jaxpr(max, _abstractify(_const(operand, 0)))
return scatter_max_p.bind(
operand, scatter_indices, updates, update_jaxpr=jaxpr,
update_consts=consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
# Define this outside of scatter to ensure cache hits.
_scatter_reduction_computation = lambda x, y: y
def scatter(operand: Array, scatter_indices: Array, updates: Array,
dimension_numbers: ScatterDimensionNumbers, *,
indices_are_sorted: bool = False,
unique_indices: bool = False) -> Array:
"""Scatter-update operator.
Wraps `XLA's Scatter operator
<https://www.tensorflow.org/xla/operation_semantics#scatter>`_, where updates
replace values from `operand`.
If multiple updates are performed to the same index of operand, they may be
applied in any order.
The semantics of scatter are complicated and its API is subject to change.
Args:
operand: an array to which the scatter should be applied
scatter_indices: an array that gives the indices in `operand` to which each
update in `updates` should be applied.
updates: the updates that should be scattered onto `operand`.
dimension_numbers: a `lax.ScatterDimensionNumbers` object that describes
how dimensions of `operand`, `start_indices`, `updates` and the output
relate.
indices_are_sorted: whether `scatter_indices` is known to be sorted. If
true, may improve performance on some backends.
unique_indices: whether the indices to be updated in ``operand`` are
guaranteed to not overlap with each other. If true, may improve performance on
some backends.
Returns:
An array containing the sum of `operand` and the scattered updates.
"""
jaxpr, consts = _reduction_jaxpr(_scatter_reduction_computation,
_abstractify(_const(operand, 0)))
return scatter_p.bind(
operand, scatter_indices, updates, update_jaxpr=jaxpr,
update_consts=consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
def index_take(src: Array, idxs: Array, axes: Sequence[int]) -> Array:
indices = concatenate([expand_dims(i, (1,)) for i in idxs], 1)
indices = indices % np.array([src.shape[ax] for ax in axes])
slice_sizes = list(src.shape)
for ax in axes:
slice_sizes[ax] = 1
offset_dims = tuple(range(1, src.ndim - indices.shape[1] + 1))
dnums = GatherDimensionNumbers(
offset_dims=offset_dims,
collapsed_slice_dims=axes,
start_index_map=axes)
return gather(src, indices, dimension_numbers=dnums,
slice_sizes=tuple(slice_sizes))
def transpose(operand: Array, permutation: Sequence[int]) -> Array:
"""Wraps XLA's `Transpose
<https://www.tensorflow.org/xla/operation_semantics#transpose>`_
operator.
"""
permutation = tuple(permutation)
if permutation == tuple(range(len(permutation))):
return operand
else:
return transpose_p.bind(operand, permutation=permutation)
def argmin(operand: Array, axis: int,
index_dtype: DType) -> Tuple[Array, Array]:
"""Computes the index of the minimum element along ``axis``."""
return argmin_p.bind(operand, axes=(axis,),
index_dtype=dtypes.canonicalize_dtype(index_dtype))
def argmax(operand: Array, axis: int,
index_dtype: DType) -> Tuple[Array, Array]:
"""Computes the index of the maximum element along ``axis``."""
return argmax_p.bind(operand, axes=(axis,),
index_dtype=dtypes.canonicalize_dtype(index_dtype))
def reduce(operands: Array, init_values: Array, computation: Callable,
dimensions: Sequence[int]) -> Array:
"""Wraps XLA's `Reduce
<https://www.tensorflow.org/xla/operation_semantics#reduce>`_
operator.
"""
flat_operands, operand_tree = tree_util.tree_flatten(operands)
flat_init_values, init_value_tree = tree_util.tree_flatten(init_values)
if operand_tree != init_value_tree:
raise ValueError('Operands must have the same tree structure as init_values:'
f' {operand_tree} vs. {init_value_tree}')
if len(flat_operands) != len(flat_init_values):
raise ValueError('Must have same total number of operands as init_values: '
f' {len(flat_operands)} vs. {len(flat_init_values)}')
monoid_reducer = _get_monoid_reducer(computation, flat_init_values)
if monoid_reducer:
return monoid_reducer(*flat_operands, dimensions)
else:
flat_init_avals = safe_map(_abstractify, flat_init_values)
jaxpr, consts, out_tree = _variadic_reduction_jaxpr(
computation, tuple(flat_init_avals), init_value_tree)
out = reduce_p.bind(*(flat_operands + flat_init_values), computation=computation,
jaxpr=jaxpr, consts=consts, dimensions=tuple(dimensions))
return tree_util.tree_unflatten(out_tree, out)
@cache()
def _reduction_jaxpr(computation, aval):
pval = pe.PartialVal.unknown(aval)
comp = lu.wrap_init(lambda x, y: (computation(x, y),))
jaxpr, _, consts = pe.trace_to_jaxpr(comp, (pval, pval), instantiate=False)
return jaxpr, consts
@cache()
def _variadic_reduction_jaxpr(computation, flat_avals, aval_tree):
avals = tree_util.tree_unflatten(aval_tree, flat_avals)
flat_in_avals, in_tree = tree_util.tree_flatten((avals, avals))
pvals = safe_map(pe.PartialVal.unknown, flat_in_avals)
comp = lu.wrap_init(computation)
flat_comp, out_tree = api_util.flatten_fun_nokwargs(comp, in_tree)
jaxpr, _, consts = pe.trace_to_jaxpr(flat_comp, tuple(pvals),
instantiate=False)
return jaxpr, consts, out_tree()
def _get_monoid_reducer(monoid_op: Callable, xs: Array) -> Optional[Callable]:
if len(xs) != 1:
return None
x, = xs
aval = core.get_aval(x)
dtype = _dtype(x)
if (type(aval) is ConcreteArray) and aval.shape == ():
if monoid_op is add:
return np.equal(aval.val, 0) and partial(
_reduce_sum)
if monoid_op is mul:
return np.equal(aval.val, 1) and _reduce_prod
elif monoid_op is bitwise_or and dtype == np.bool_:
return np.equal(aval.val, _get_max_identity(dtype)) and _reduce_or
elif monoid_op is bitwise_and and dtype == np.bool_:
return np.equal(aval.val, _get_min_identity(dtype)) and _reduce_and
elif monoid_op is max:
return np.equal(aval.val, _get_max_identity(dtype)) and _reduce_max
elif monoid_op is min:
return np.equal(aval.val, _get_min_identity(dtype)) and _reduce_min
return None
def _get_max_identity(dtype: DType) -> Array:
if dtypes.issubdtype(dtype, np.inexact):
return np.array(-np.inf, dtype)
elif dtypes.issubdtype(dtype, np.integer):
return np.array(dtypes.iinfo(dtype).min, dtype)
elif dtypes.issubdtype(dtype, np.bool_):
return np.array(False, np.bool_)
def _get_min_identity(dtype: DType) -> Array:
if dtypes.issubdtype(dtype, np.inexact):
return np.array(np.inf, dtype)
elif dtypes.issubdtype(dtype, np.integer):
return np.array(dtypes.iinfo(dtype).max, dtype)
elif dtypes.issubdtype(dtype, np.bool_):
return np.array(True, np.bool_)
def _reduce_sum(operand: Array, axes: Sequence[int]) -> Array:
return reduce_sum_p.bind(operand, axes=tuple(axes))
def _reduce_prod(operand: Array, axes: Sequence[int]) -> Array:
return reduce_prod_p.bind(operand, axes=tuple(axes))
def _reduce_max(operand: Array, axes: Sequence[int]) -> Array:
return reduce_max_p.bind(operand, axes=tuple(axes))
def _reduce_min(operand: Array, axes: Sequence[int]) -> Array:
return reduce_min_p.bind(operand, axes=tuple(axes))
def _reduce_or(operand: Array, axes: Sequence[int]) -> Array:
return reduce_or_p.bind(operand, axes=tuple(axes))
def _reduce_and(operand: Array, axes: Sequence[int]) -> Array:
return reduce_and_p.bind(operand, axes=tuple(axes))
def reduce_window(operand: Array, init_value: Array, computation: Callable,
window_dimensions: Shape, window_strides: Sequence[int],
padding: Union[str, Sequence[Tuple[int, int]]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
"""Wraps XLA's `ReduceWindowWithGeneralPadding
<https://www.tensorflow.org/xla/operation_semantics#reducewindow>`_
operator.
"""
if isinstance(padding, str):
dilated_window_dims = (window_dimensions if window_dilation is None else
_dilate_shape(window_dimensions, window_dilation))
padding = tuple(padtype_to_pads(operand.shape, dilated_window_dims,
window_strides, padding))
else:
padding = tuple(padding)
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
monoid_reducer = _get_monoid_window_reducer(computation, init_value)
if monoid_reducer:
return monoid_reducer(operand, window_dimensions, window_strides, padding,
base_dilation, window_dilation)
else:
jaxpr, consts = _reduction_jaxpr(computation, _abstractify(init_value))
return reduce_window_p.bind(
operand, init_value, jaxpr=jaxpr, consts=consts,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=padding,
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _get_monoid_window_reducer(monoid_op: Callable, x: Array) -> Optional[Callable]:
aval = core.get_aval(x)
if (type(aval) is ConcreteArray) and aval.shape == ():
if monoid_op is add:
return aval.val == 0 and _reduce_window_sum
elif monoid_op is max:
return aval.val == _get_max_identity(aval.dtype) and _reduce_window_max
elif monoid_op is min:
return aval.val == _get_min_identity(aval.dtype) and _reduce_window_min
return None
def _reduce_window_sum(operand: Array, window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_sum_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_prod(operand: Array, window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
init_value = _const(operand, 1)
jaxpr, consts = _reduction_jaxpr(mul, _abstractify(init_value))
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_p.bind(
operand, init_value, jaxpr=jaxpr, consts=consts,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_max(operand: Array, window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_max_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _reduce_window_min(operand: Array, window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Optional[Sequence[int]] = None,
window_dilation: Optional[Sequence[int]] = None) -> Array:
if base_dilation is None:
base_dilation = (1,) * len(window_dimensions)
if window_dilation is None:
window_dilation = (1,) * len(window_dimensions)
return reduce_window_min_p.bind(
operand, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _select_and_scatter(operand: Array, select: Callable,
window_dimensions: Shape, window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]], source: Array,
init_value: Array, scatter: Callable,
base_dilation: Sequence[int],
window_dilation: Sequence[int]) -> Array:
select_jaxpr, select_consts = _reduction_jaxpr(select, _abstractify(init_value))
scatter_jaxpr, scatter_consts = _reduction_jaxpr(scatter, _abstractify(init_value))
return select_and_scatter_p.bind(
operand, source, init_value, select_jaxpr=select_jaxpr,
select_consts=select_consts, scatter_jaxpr=scatter_jaxpr,
scatter_consts=scatter_consts, window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def _select_and_scatter_add(source: Array, operand: Array,
select_prim: core.Primitive,
window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]]) -> Array:
return select_and_scatter_add_p.bind(
source, operand, select_prim=select_prim,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding))
def _select_and_gather_add(tangents: Array, operand: Array,
select_prim: core.Primitive,
window_dimensions: Shape,
window_strides: Sequence[int],
padding: Sequence[Tuple[int, int]],
base_dilation: Sequence[int],
window_dilation: Sequence[int]) -> Array:
"""Extracts the tangent corresponding to the minimum or maximum element in each
window of the `operand` array.
Wraps XLA's `ReduceWindow
<https://www.tensorflow.org/xla/operation_semantics#reducewindow>`_
operator, which applies a reduction function to all elements in each window of the
input multi-dimensional array. In this case, the input multi-dimensional array is
built by packing each element in the `operand` array with its corresponding
element in the `tangents` array.
Args:
tangents: an array
operand: an array with the same shape as `tangents`
select_prim: a reduction function (restricted to `ge_p` and `le_p`)
window_dimensions: an array of integers for window dimension values
window_strides: an array of integers for window stride values
base_dilation: an array of integers for base dilation values
window_dilation: an array of integers for window dilation values
Returns:
An array containing the elements in `tangents` corresponding to the output of the
reduction of `operand` fin each window.
"""
return select_and_gather_add_p.bind(
tangents, operand, select_prim=select_prim,
window_dimensions=tuple(window_dimensions),
window_strides=tuple(window_strides), padding=tuple(padding),
base_dilation=tuple(base_dilation),
window_dilation=tuple(window_dilation))
def sort(operand: Union[Array, Sequence[Array]], dimension: int = -1,
is_stable: bool = True, num_keys: int = 1) -> Union[Array, Tuple[Array, ...]]:
"""Wraps XLA's `Sort
<https://www.tensorflow.org/xla/operation_semantics#sort>`_
operator.
Args:
operand : Array or sequence of arrays
dimension : integer dimension along which to sort. Default: -1.
is_stable : boolean specifying whether to use a stable sort. Default: True.
num_keys : number of operands to treat as sort keys. Default: 1.
For num_keys > 1, the sort order will be determined lexicographically using
the first `num_keys` arrays, with the first key being primary.
The remaining operands will be returned with the same permutation.
Returns:
operand : sorted version of the input or inputs.
"""
if isinstance(operand, Sequence):
if len(operand) == 0:
raise TypeError("Sort requires at least one operand")
if not (1 <= num_keys <= len(operand)):
raise ValueError(f"num_keys={num_keys} must be between 1 and len(operand)={len(operand)}")
dimension = canonicalize_axis(dimension, len(operand[0].shape))
return tuple(sort_p.bind(*operand, dimension=dimension,
is_stable=is_stable,
num_keys=num_keys))
else:
if num_keys != 1:
raise ValueError(f"num_keys={num_keys} must equal 1 for a single operand.")
dimension = canonicalize_axis(dimension, len(operand.shape))
return sort_p.bind(operand, dimension=dimension, is_stable=is_stable, num_keys=1)[0]
def sort_key_val(keys: Array, values: Array, dimension: int = -1,
is_stable: bool = True) -> Tuple[Array, Array]:
"""Sorts ``keys`` along ``dimension`` and applies same permutation to ``values``."""
dimension = canonicalize_axis(dimension, len(keys.shape))
k, v = sort_p.bind(keys, values, dimension=dimension, is_stable=is_stable, num_keys=1)
return k, v
def top_k(operand: Array, k: int) -> Tuple[Array, Array]:
"""Returns top ``k`` values and their indices along the last axis of ``operand``."""
k = int(k)
if k < 0:
raise ValueError("k argument to top_k must be nonnegative, got {}".format(k))
return top_k_p.bind(operand, k=k)
def tie_in(x: Array, y: Array) -> Array:
"""Deprecated. Ignores ``x`` and returns ``y``."""
return y
def full(shape: Shape, fill_value: Array, dtype: Optional[DType] = None) -> Array:
"""Returns an array of `shape` filled with `fill_value`.
Arguments:
shape: sequence of integers, describing the shape of the output array.
fill_value: the value to fill the new array with.
dtype: the type of the output array, or `None`. If not `None`, `fill_value`
will be cast to `dtype`.
"""
shape = canonicalize_shape(shape)
if np.shape(fill_value):
msg = "full must be called with scalar fill_value, got fill_value.shape {}."
raise TypeError(msg.format(np.shape(fill_value)))
dtype = dtypes.canonicalize_dtype(dtype or _dtype(fill_value))
fill_value = convert_element_type(fill_value, dtype)
return broadcast(fill_value, shape)
def _device_put_raw(x):
if isinstance(x, xla.DeviceArray):
return x
else:
aval = raise_to_shaped(core.get_aval(x))
return xla.array_result_handler(None, aval)(*xla.device_put(x))
def iota(dtype: DType, size: int) -> Array:
"""Wraps XLA's `Iota
<https://www.tensorflow.org/xla/operation_semantics#iota>`_
operator.
"""
if config.omnistaging_enabled:
dtype = dtypes.canonicalize_dtype(dtype)
size = core.concrete_or_error(int, size, "size argument of lax.iota")
return iota_p.bind(dtype=dtype, shape=(size,), dimension=0)
else:
size = size if type(size) is masking.Poly else int(size)
shape = canonicalize_shape((size,))
dtype = dtypes.canonicalize_dtype(dtype)
lazy_expr = lazy.iota(dtype, shape[0])
aval = ShapedArray(shape, dtype)
return xla.make_device_array(aval, None, lazy_expr, xla.DeviceConstant())
def broadcasted_iota(dtype: DType, shape: Shape, dimension: int) -> Array:
"""Convenience wrapper around ``iota``."""
dtype = dtypes.canonicalize_dtype(dtype)
shape = canonicalize_shape(shape)
dimension = core.concrete_or_error(
int, dimension, "dimension argument of lax.broadcasted_iota")
return iota_p.bind(dtype=dtype, shape=shape, dimension=dimension)
def _eye(dtype: DType, shape: Shape, offset: int) -> Array:
"""Like numpy.eye, create a 2D array with ones on a diagonal."""
N, M = tuple(map(int, shape))
offset = int(offset)
dtype = dtypes.canonicalize_dtype(dtype)
if config.omnistaging_enabled:
bool_eye = eq(add(broadcasted_iota(np.int32, (N, M), 0), np.int32(offset)),
broadcasted_iota(np.int32, (N, M), 1))
return convert_element_type_p.bind(bool_eye, new_dtype=dtype,
old_dtype=np.bool_)
else:
lazy_expr = lazy.eye(dtype, (N, M), offset)
aval = ShapedArray((N, M), dtype)
return xla.make_device_array(aval, None, lazy_expr, xla.DeviceConstant())
def _delta(dtype: DType, shape: Shape, axes: Sequence[int]) -> Array:
"""This utility function exists for creating Kronecker delta arrays."""
shape = tuple(map(int, shape))
axes = tuple(map(int, axes))
dtype = dtypes.canonicalize_dtype(dtype)
base_shape = tuple(np.take(shape, axes))
if config.omnistaging_enabled:
iotas = [broadcasted_iota(np.uint32, base_shape, i)
for i in range(len(base_shape))]
eyes = [eq(i1, i2) for i1, i2 in zip(iotas[:-1], iotas[1:])]
result = convert_element_type_p.bind(_reduce(operator.and_, eyes),
new_dtype=dtype, old_dtype=np.bool_)
return broadcast_in_dim(result, shape, axes)
else:
lazy_expr = lazy.broadcast(lazy.delta(dtype, base_shape), shape, axes)
aval = ShapedArray(shape, dtype)
return xla.make_device_array(aval, None, lazy_expr, xla.DeviceConstant())
def _tri(dtype: DType, shape: Shape, offset: int) -> Array:
"""Like numpy.tri, create a 2D array with ones below a diagonal."""
N, M = tuple(map(int, shape))
offset = int(offset)
dtype = dtypes.canonicalize_dtype(dtype)
if config.omnistaging_enabled:
bool_tri = ge(add(broadcasted_iota(np.int32, (N, M), 0), np.int32(offset)),
broadcasted_iota(np.int32, (N, M), 1))
return convert_element_type_p.bind(bool_tri, old_dtype=np.int32,
new_dtype=dtype)
else:
lazy_expr = lazy.tri(dtype, (N, M), offset)
aval = ShapedArray((N, M), dtype)
return xla.make_device_array(aval, None, lazy_expr, xla.DeviceConstant())
def stop_gradient(x):
"""Stops gradient computation.
Operationally ``stop_gradient`` is the identity function, that is, it returns
argument `x` unchanged. However, ``stop_gradient`` prevents the flow of
gradients during forward or reverse-mode automatic differentiation. If there
are multiple nested gradient computations, ``stop_gradient`` stops gradients
for all of them.
For example:
>>> jax.grad(lambda x: x**2)(3.)
array(6., dtype=float32)
>>> jax.grad(lambda x: jax.lax.stop_gradient(x)**2)(3.)
array(0., dtype=float32)
>>> jax.grad(jax.grad(lambda x: x**2))(3.)
array(2., dtype=float32)
>>> jax.grad(jax.grad(lambda x: jax.lax.stop_gradient(x)**2))(3.)
array(0., dtype=float32)
"""
def stop(x):
if (dtypes.issubdtype(_dtype(x), np.floating) or
dtypes.issubdtype(_dtype(x), np.complexfloating)):
return ad_util.stop_gradient_p.bind(x)
else:
return x # only bind primitive on inexact dtypes, to avoid some staging
return tree_map(stop, x)
### convenience wrappers around traceables
def conv(lhs: Array, rhs: Array, window_strides: Sequence[int],
padding: str, precision: PrecisionLike = None) -> Array:
"""Convenience wrapper around `conv_general_dilated`.
Args:
lhs: a rank `n+2` dimensional input array.
rhs: a rank `n+2` dimensional array of kernel weights.
window_strides: a sequence of `n` integers, representing the inter-window
strides.
padding: either the string `'SAME'`, the string `'VALID'`.
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
An array containing the convolution result.
"""
return conv_general_dilated(lhs, rhs, window_strides, padding,
precision=precision)
def conv_with_general_padding(lhs: Array, rhs: Array,
window_strides: Sequence[int],
padding: Union[str, Sequence[Tuple[int, int]]],
lhs_dilation: Optional[Sequence[int]],
rhs_dilation: Optional[Sequence[int]],
precision: PrecisionLike = None) -> Array:
"""Convenience wrapper around `conv_general_dilated`.
Args:
lhs: a rank `n+2` dimensional input array.
rhs: a rank `n+2` dimensional array of kernel weights.
window_strides: a sequence of `n` integers, representing the inter-window
strides.
padding: either the string `'SAME'`, the string `'VALID'`, or a sequence of
`n` `(low, high)` integer pairs that give the padding to apply before and
after each spatial dimension.
lhs_dilation: `None`, or a sequence of `n` integers, giving the
dilation factor to apply in each spatial dimension of `lhs`. LHS dilation
is also known as transposed convolution.
rhs_dilation: `None`, or a sequence of `n` integers, giving the
dilation factor to apply in each spatial dimension of `rhs`. RHS dilation
is also known as atrous convolution.
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
An array containing the convolution result.
"""
return conv_general_dilated(
lhs, rhs, window_strides, padding, lhs_dilation=lhs_dilation,
rhs_dilation=rhs_dilation, precision=precision)
def _conv_transpose_padding(k, s, padding):
"""Calculate before and after padding for a dim of transposed convolution.
Args:
k: int: kernel dimension.
s: int: dimension stride value.
padding: 'same' or 'valid' padding mode for original forward conv.
Returns:
2-tuple: ints: before and after padding for transposed convolution.
"""
if padding == 'SAME':
pad_len = k + s - 2
if s > k - 1:
pad_a = k - 1
else:
pad_a = int(np.ceil(pad_len / 2))
elif padding == 'VALID':
pad_len = k + s - 2 + _max(k - s, 0)
pad_a = k - 1
else:
raise ValueError('Padding mode must be `SAME` or `VALID`.')
pad_b = pad_len - pad_a
return pad_a, pad_b
def _flip_axes(x, axes):
"""Flip ndarray 'x' along each axis specified in axes tuple."""
for axis in axes:
x = np.flip(x, axis)
return x
def conv_transpose(lhs: Array, rhs: Array, strides: Sequence[int],
padding: Union[str, Sequence[Tuple[int, int]]],
rhs_dilation: Optional[Sequence[int]] = None,
dimension_numbers: ConvGeneralDilatedDimensionNumbers = None,
transpose_kernel: bool = False,
precision: PrecisionLike = None) -> Array:
"""Convenience wrapper for calculating the N-d convolution "transpose".
This function directly calculates a fractionally strided conv rather than
indirectly calculating the gradient (transpose) of a forward convolution.
Args:
lhs: a rank `n+2` dimensional input array.
rhs: a rank `n+2` dimensional array of kernel weights.
strides: sequence of `n` integers, sets fractional stride.
padding: 'SAME', 'VALID' will set as transpose of corresponding forward
conv, or a sequence of `n` integer 2-tuples describing before-and-after
padding for each `n` spatial dimension.
rhs_dilation: `None`, or a sequence of `n` integers, giving the
dilation factor to apply in each spatial dimension of `rhs`. RHS dilation
is also known as atrous convolution.
dimension_numbers: tuple of dimension descriptors as in
lax.conv_general_dilated. Defaults to tensorflow convention.
transpose_kernel: if True flips spatial axes and swaps the input/output
channel axes of the kernel. This makes the output of this function identical
to the gradient-derived functions like keras.layers.Conv2DTranspose
applied to the same kernel. For typical use in neural nets this is completely
pointless and just makes input/output channel specification confusing.
precision: Optional. Either ``None``, which means the default precision for
the backend, a ``lax.Precision`` enum value (``Precision.DEFAULT``,
``Precision.HIGH`` or ``Precision.HIGHEST``) or a tuple of two
``lax.Precision`` enums indicating precision of ``lhs``` and ``rhs``.
Returns:
Transposed N-d convolution, with output padding following the conventions of
keras.layers.Conv2DTranspose.
"""
assert len(lhs.shape) == len(rhs.shape) and len(lhs.shape) >= 2
ndims = len(lhs.shape)
one = (1,) * (ndims - 2)
# Set dimensional layout defaults if not specified.
if dimension_numbers is None:
if ndims == 2:
dimension_numbers = ('NC', 'IO', 'NC')
elif ndims == 3:
dimension_numbers = ('NHC', 'HIO', 'NHC')
elif ndims == 4:
dimension_numbers = ('NHWC', 'HWIO', 'NHWC')
elif ndims == 5:
dimension_numbers = ('NHWDC', 'HWDIO', 'NHWDC')
else:
raise ValueError('No 4+ dimensional dimension_number defaults.')
dn = conv_dimension_numbers(lhs.shape, rhs.shape, dimension_numbers)
k_shape = np.take(rhs.shape, dn.rhs_spec)
k_sdims = k_shape[2:]
# Calculate correct output shape given padding and strides.
pads: Union[str, Sequence[Tuple[int, int]]]
if padding in {'SAME', 'VALID'}:
if rhs_dilation is None:
rhs_dilation = (1,) * (rhs.ndim - 2)
effective_k_size = map(lambda k, r: (k-1) * r + 1, k_sdims, rhs_dilation)
pads = [_conv_transpose_padding(k, s, padding)
for k,s in zip(effective_k_size, strides)]
else:
pads = padding
if transpose_kernel:
# flip spatial dims and swap input / output channel axes
rhs = _flip_axes(rhs, np.array(dn.rhs_spec)[2:])
rhs = np.swapaxes(rhs, dn.rhs_spec[0], dn.rhs_spec[1])
return conv_general_dilated(lhs, rhs, one, pads, strides, rhs_dilation, dn,
precision=precision)
def full_like(x: Array, fill_value: Array, dtype: Optional[DType] = None,
shape: Optional[Shape] = None) -> Array:
"""Create a full array like np.full based on the example array `x`.
Args:
x: example array-like, used for shape and dtype information.
fill_value: a scalar value to fill the entries of the output array.
dtype: optional, a dtype parameter for the output ndarray.
shape: optional, a shape parameter for the output ndarray.
Returns:
An ndarray with the same shape as `x` with its entries set equal to
`fill_value`, similar to the output of np.full.
"""
fill_shape = np.shape(x) if shape is None else canonicalize_shape(shape)
if not config.omnistaging_enabled:
fill_value = tie_in(x, fill_value)
return full(fill_shape, fill_value, dtype or _dtype(x))
def collapse(operand: Array, start_dimension: int,
stop_dimension: int) -> Array:
"""Collapses dimensions of an array into a single dimension.
For example, if ``operand`` is an array with shape ``[2, 3, 4]``,
``collapse(operand, 0, 2).shape == [6, 4]``. The elements of the collapsed
dimension are laid out major-to-minor, i.e., with the lowest-numbered
dimension as the slowest varying dimension.
Args:
operand: an input array.
start_dimension: the start of the dimensions to collapse (inclusive).
stop_dimension: the end of the dimensions to collapse (exclusive).
Returns:
An array where dimensions ``[start_dimension, stop_dimension)`` have been
collapsed (raveled) into a single dimension.
"""
lo, hi = start_dimension, stop_dimension
size = prod(operand.shape[lo:hi])
new_shape = operand.shape[:lo] + (size,) + operand.shape[hi:]
return reshape(operand, new_shape)
def slice_in_dim(operand: Array, start_index: Optional[int],
limit_index: Optional[int],
stride: int = 1, axis: int = 0)-> Array:
"""Convenience wrapper around slice applying to only one dimension."""
start_indices = [0] * operand.ndim
limit_indices = list(operand.shape)
strides = [1] * operand.ndim
# translate `None`
len_axis = operand.shape[axis]
start_index_int = _canonicalize_dimension(start_index) if start_index is not None else 0
limit_index_int = _canonicalize_dimension(limit_index) if limit_index is not None else len_axis
# translate negative indices
if start_index_int < 0:
start_index_int = start_index_int + len_axis
if limit_index_int < 0:
limit_index_int = limit_index_int + len_axis
axis = int(axis)
start_indices[axis] = start_index_int
limit_indices[axis] = limit_index_int
strides[axis] = int(stride)
return slice(operand, start_indices, limit_indices, strides)
def index_in_dim(operand: Array, index: int, axis: int = 0,
keepdims: bool = True) -> Array:
"""Convenience wrapper around slice to perform int indexing."""
index, axis = int(index), int(axis)
axis_size = operand.shape[axis]
wrapped_index = index + axis_size if index < 0 else index
if not 0 <= wrapped_index < axis_size:
msg = 'index {} is out of bounds for axis {} with size {}'
raise IndexError(msg.format(index, axis, axis_size))
result = slice_in_dim(operand, wrapped_index, wrapped_index + 1, 1, axis)
if keepdims:
return result
else:
return squeeze(result, (axis,))
def dynamic_slice_in_dim(operand: Array, start_index: Array,
slice_size: int, axis: int = 0) -> Array:
"""Convenience wrapper around dynamic_slice applying to one dimension."""
start_indices = [_zero(start_index)] * operand.ndim
slice_sizes = list(operand.shape)
axis = int(axis)
start_indices[axis] = start_index
slice_sizes[axis] = int(slice_size)
return dynamic_slice(operand, start_indices, slice_sizes)
def dynamic_index_in_dim(operand: Array, index: Array, axis: int = 0,
keepdims: bool = True) -> Array:
"""Convenience wrapper around dynamic_slice to perform int indexing."""
result = dynamic_slice_in_dim(operand, index, 1, axis)
if keepdims:
return result
else:
return squeeze(result, (axis,))
def dynamic_update_slice_in_dim(operand: Array, update: Array,
start_index: Array, axis: int) -> Array:
"""Convenience wrapper around :func:`dynamic_update_slice` to update a slice
in a single ``axis``.
"""
axis = int(axis)
start_indices = [_zero(start_index)] * _ndim(operand)
start_indices[axis] = start_index
return dynamic_update_slice(operand, update, start_indices)
def dynamic_update_index_in_dim(operand: Array, update: Array, index: Array,
axis: int) -> Array:
"""Convenience wrapper around :func:`dynamic_update_slice` to update a slice
of size 1 in a single ``axis``.
"""
axis = int(axis)
if _ndim(update) != _ndim(operand):
assert _ndim(update) + 1 == _ndim(operand)
update = expand_dims(update, (axis,))
return dynamic_update_slice_in_dim(operand, update, index, axis)
def batch_matmul(lhs: Array, rhs: Array,
precision: PrecisionLike = None) -> Array:
"""Batch matrix multiplication."""
if _min(lhs.ndim, rhs.ndim) < 2:
raise ValueError('Arguments to batch_matmul must be at least 2D, got {}, {}'
.format(lhs.ndim, rhs.ndim))
if lhs.ndim != rhs.ndim:
raise ValueError('Arguments to batch_matmul must have same ndim, got {}, {}'
.format(lhs.ndim, rhs.ndim))
lhs_contract = (lhs.ndim - 1,)
rhs_contract = (rhs.ndim - 2,)
batch = tuple(range(lhs.ndim - 2))
return dot_general(lhs, rhs, ((lhs_contract, rhs_contract), (batch, batch)),
precision=precision)
# These functions also exist in the XLA client library, but we treat them
# as non-primitive to maintain a smaller set of autodiff primitives.
def square(x: Array) -> Array:
r"""Elementwise square: :math:`x^2`."""
return integer_pow(x, 2)
def reciprocal(x: Array) -> Array:
r"""Elementwise reciprocal: :math:`1 \over x`."""
return integer_pow(x, -1)
def _upcast_fp16_for_computation(f):
@functools.wraps(f)
def f_wrapped(x):
dtype = _dtype(x)
if dtype == np.float16 or dtype == dtypes.bfloat16:
return convert_element_type(
f(convert_element_type(x, np.float32)), dtype)
return f(x)
return f_wrapped
@api.jit
@_upcast_fp16_for_computation
def tan(x: Array) -> Array:
r"""Elementwise tangent: :math:`\mathrm{tan}(x)`."""
return div(sin(x), cos(x))
@api.jit
def asin(x: Array) -> Array:
r"""Elementwise arc sine: :math:`\mathrm{asin}(x)`."""
if dtypes.issubdtype(_dtype(x), np.complexfloating):
return mul(_const(x, -1j), asinh(mul(_const(x, 1j), x)))
else:
return mul(_const(x, 2),
atan2(x, add(_const(x, 1), sqrt(sub(_const(x, 1), square(x))))))
@api.jit
def acos(x: Array) -> Array:
r"""Elementwise arc cosine: :math:`\mathrm{acos}(x)`."""
if dtypes.issubdtype(_dtype(x), np.complexfloating):
result = mul(_const(x, 1j), acosh(x))
# By convention, numpy chooses the branch with positive real part.
rpart = real(result)
return select(
gt(rpart, _const(rpart, 0)),
result,
neg(result)
)
else:
return select(
ne(x, _const(x, -1.0)),
mul(_const(x, 2),
atan2(sqrt(sub(_const(x, 1), square(x))), add(_const(x, 1), x))),
full_like(x, np.pi))
def atan(x: Array) -> Array:
r"""Elementwise arc tangent: :math:`\mathrm{atan}(x)`."""
if dtypes.issubdtype(_dtype(x), np.complexfloating):
return mul(_const(x, -1j), atanh(mul(_const(x, 1j), x)))
else:
return atan2(x, _const(x, 1))
def sinh(x: Array) -> Array:
r"""Elementwise hyperbolic sine: :math:`\mathrm{sinh}(x)`."""
return sinh_p.bind(x)
def cosh(x: Array) -> Array:
r"""Elementwise hyperbolic cosine: :math:`\mathrm{cosh}(x)`."""
return cosh_p.bind(x)
def asinh(x: Array) -> Array:
r"""Elementwise inverse hyperbolic sine: :math:`\mathrm{asinh}(x)`."""
return asinh_p.bind(x)
def acosh(x: Array) -> Array:
r"""Elementwise inverse hyperbolic cosine: :math:`\mathrm{acosh}(x)`."""
return acosh_p.bind(x)
def atanh(x: Array) -> Array:
r"""Elementwise inverse hyperbolic tangent: :math:`\mathrm{atanh}(x)`."""
return atanh_p.bind(x)
# Add some methods to ShapedArray that rely on lax primitives
ShapedArray.broadcast = core.aval_method(broadcast)
ShapedArray.transpose = core.aval_method(transpose) # clobbered by lax_numpy
ShapedArray.reshape = core.aval_method(reshape) # clobbered by lax_numpy
def _iter(tracer):
if tracer.ndim == 0:
raise TypeError("iteration over a 0-d array") # same as numpy error
else:
n = int(tracer.shape[0])
# return (index_in_dim(tracer, i, keepdims=False) for i in range(n))
return iter([index_in_dim(tracer, i, keepdims=False) for i in range(n)])
ShapedArray._iter = staticmethod(_iter)
# Add some ad handlers that use (or could use) lax primitives
def zeros_like_array(x):
return full_like(x, 0)
for t in itertools.chain(dtypes.python_scalar_dtypes.keys(), array_types,
[xla.DeviceArray, pxla.ShardedDeviceArray]):
ad_util.jaxval_adders[t] = add
ad_util.jaxval_zeros_likers[xla.DeviceArray] = zeros_like_array
ad_util.jaxval_zeros_likers[pxla.ShardedDeviceArray] = zeros_like_array
### primitives
_input_dtype = lambda *args, **_: dtypes.canonicalize_dtype(args[0].dtype)
_fixed_dtype = lambda dtype: lambda *args, **kwargs: dtypes.canonicalize_dtype(dtype)
_complex_basetype = lambda dtype: np.abs(np.zeros((), dtype)).dtype
def standard_primitive(shape_rule, dtype_rule, name, translation_rule=None,
multiple_results=False):
prim = Primitive(name)
prim.multiple_results = multiple_results
prim.def_impl(partial(xla.apply_primitive, prim))
prim.def_abstract_eval(partial(standard_abstract_eval, prim, shape_rule, dtype_rule))
xla.translations[prim] = translation_rule or partial(standard_translate, name)
return prim
def standard_abstract_eval(prim, shape_rule, dtype_rule, *args, **kwargs):
assert all(isinstance(arg, UnshapedArray) for arg in args), args
least_specialized = _max(
map(type, args), key=operator.attrgetter('array_abstraction_level'))
if least_specialized is ConcreteArray:
out_vals = prim.impl(*[x.val for x in args], **kwargs)
if not prim.multiple_results:
out_vals = [out_vals]
out_avals = safe_map(ConcreteArray, out_vals)
elif least_specialized is ShapedArray:
shapes, dtypes = shape_rule(*args, **kwargs), dtype_rule(*args, **kwargs)
if not prim.multiple_results:
shapes, dtypes = [shapes], [dtypes]
out_avals = safe_map(ShapedArray, shapes, dtypes)
elif least_specialized is UnshapedArray:
dtypes = dtype_rule(*args, **kwargs)
if not prim.multiple_results:
dtypes = [dtypes]
out_avals = safe_map(UnshapedArray, dtypes)
else:
raise TypeError(args, least_specialized)
if not prim.multiple_results:
return out_avals[0]
return out_avals
def standard_translate(name, c, *args, **kwargs):
xla_opname = ''.join(term.capitalize() for term in name.split('_'))
return getattr(xops, xla_opname)(*args, **kwargs)
def unop_dtype_rule(result_dtype, accepted_dtypes, name, aval, **kwargs):
if not any(dtypes.issubdtype(aval.dtype, t) for t in accepted_dtypes):
msg = '{} does not accept dtype {}. Accepted dtypes are subtypes of {}.'
typename = str(np.dtype(aval.dtype).name)
accepted_typenames = (t.__name__ for t in accepted_dtypes)
raise TypeError(msg.format(name, typename, ', '.join(accepted_typenames)))
return result_dtype(aval.dtype)
def unop(result_dtype, accepted_dtypes, name, translation_rule=None):
dtype_rule = partial(unop_dtype_rule, result_dtype, accepted_dtypes, name)
prim = standard_primitive(_attrgetter('shape'), dtype_rule, name,
translation_rule=translation_rule)
batching.defvectorized(prim)
masking.defvectorized(prim)
return prim
standard_unop = partial(unop, _identity)
_attrgetter = lambda name: lambda x, **kwargs: getattr(x, name)
def naryop_dtype_rule(result_dtype, accepted_dtypes, name, *avals, **kwargs):
aval_dtypes = [aval.dtype for aval in avals]
for i, (aval_dtype, types) in enumerate(zip(aval_dtypes, accepted_dtypes)):
if not any(dtypes.issubdtype(aval_dtype, t) for t in types):
if aval_dtype is dtypes.float0:
raise TypeError(
f"Called {name} with a float0 at position {i}. "
"float0s do not support any operations by design, because they "
"are not compatible with non-trivial vector spaces. No implicit dtype "
"conversion is done. You can use np.zeros_like(arr, dtype=np.float) "
"to cast a float0 array to a regular zeros array. \n"
"If you didn't expect to get a float0 you might have accidentally "
"taken a gradient with respect to an integer argument.")
else:
msg = ('{} does not accept dtype {} at position {}. '
'Accepted dtypes at position {} are subtypes of {}.')
typename = str(np.dtype(aval_dtype).name)
typenames = ', '.join(t.__name__ for t in types)
raise TypeError(msg.format(name, typename, i, i, typenames))
_check_same_dtypes(name, False, *aval_dtypes)
return result_dtype(*avals)
def _broadcasting_shape_rule(name, *avals):
shapes = [aval.shape for aval in avals if aval.shape]
if not shapes:
return ()
if len({len(shape) for shape in shapes}) != 1:
msg = '{} got arrays of different rank: {}.'
raise TypeError(msg.format(name, ', '.join(map(str, map(tuple, shapes)))))
result_shape = _try_broadcast_shapes(shapes)
if result_shape is None:
msg = '{} got incompatible shapes for broadcasting: {}.'
raise TypeError(msg.format(name, ', '.join(map(str, map(tuple, shapes)))))
return result_shape
def naryop(result_dtype, accepted_dtypes, name, translation_rule=None):
dtype_rule = partial(naryop_dtype_rule, result_dtype, accepted_dtypes, name)
shape_rule = partial(_broadcasting_shape_rule, name)
prim = standard_primitive(shape_rule, dtype_rule, name,
translation_rule=translation_rule)
batching.defbroadcasting(prim)
masking.defnaryop(prim)
return prim
standard_naryop = partial(naryop, _input_dtype)
def _broadcast_translate(translate: Callable):
# Decorator for translation rules which adds explicit broadcasting of
# positional arguments. This is necessary only for a handful of primitives
# whose XLA implementations do not support broadcasting.
def _broadcast_array(array, array_shape, result_shape):
if array_shape == result_shape:
return array
bcast_dims = tuple(range(len(result_shape) - len(array_shape),
len(result_shape)))
result = xops.BroadcastInDim(array, result_shape, bcast_dims)
return result
def _broadcasted_translation_rule(c, *args, **kwargs):
shapes = [c.get_shape(arg).dimensions() for arg in args]
result_shape = broadcast_shapes(*shapes)
args = [_broadcast_array(arg, arg_shape, result_shape)
for arg, arg_shape in zip(args, shapes)]
return translate(c, *args, **kwargs)
return _broadcasted_translation_rule
# NOTE(mattjj): this isn't great for orchestrate fwd mode because it means JVPs
# get two extra ops in them: a reshape and a broadcast_in_dim (or sometimes just
# a broadcast). but saving the shape info with the primitives isn't great either
# because then we can't trace these ops without shape data.
def _brcast(x, *others):
# Used in jvprules to make naryop broadcasting explicit for transposability.
# Requires shape info during jvp tracing, which isn't strictly necessary.
# We don't need full numpy broadcasting, but otherwise the logic is the same
# so we reuse the broadcast_shapes function after filtering out scalars.
shapes = tuple(filter(None, map(np.shape, (x,) + others)))
shape = shapes and broadcast_shapes(*shapes)
if np.shape(x) != shape:
return _brcast_to(x, shape)
else:
return x
def _brcast_to(x, shape):
x_shape = np.shape(x)
assert x_shape != shape
if x_shape:
assert len(x_shape) == len(shape)
broadcast_dimensions, = np.where(np.equal(x_shape, shape))
squeezed_dimensions, = np.where(np.not_equal(x_shape, shape))
squeezed = squeeze(x, squeezed_dimensions)
return broadcast_in_dim(squeezed, shape, broadcast_dimensions)
else:
return broadcast(x, shape)
_float = {np.floating}
_complex = {np.complexfloating}
_complex_elem_types = {np.float32, np.float64}
_int = {np.integer}
_bool = {np.bool_}
_num = _int | _float | _complex
_any = _int | _float | _complex | _bool
_bool_or_int = _int | _bool
neg_p = standard_unop(_num, 'neg')
ad.deflinear(neg_p, lambda t: [neg(t)])
def _sign_translation_rule(c, x):
shape = c.get_shape(x)
dtype = shape.numpy_dtype()
if dtypes.issubdtype(dtype, np.unsignedinteger):
zero = xb.constant(c, np.array(0, dtype=dtype))
dims = c.get_shape(x).dimensions()
return xops.Select(xops.Eq(x, zero), xops.Broadcast(zero, dims),
xops.Broadcast(xb.constant(c, np.array(1, dtype=dtype)),
dims))
return xops.Sign(x)
sign_p = standard_unop(_num, 'sign', translation_rule=_sign_translation_rule)
ad.defjvp_zero(sign_p)
nextafter_p = standard_naryop(
[_float, _float], 'nextafter',
translation_rule=lambda c, x1, x2: xops.NextAfter(x1, x2))
floor_p = standard_unop(_float, 'floor')
ad.defjvp_zero(floor_p)
ceil_p = standard_unop(_float, 'ceil')
ad.defjvp_zero(ceil_p)
round_p = standard_unop(_float, 'round')
ad.defjvp_zero(round_p)
is_finite_p = unop(_fixed_dtype(np.bool_), _float, 'is_finite')
ad.defjvp_zero(is_finite_p)
exp_p = standard_unop(_float | _complex, 'exp')
ad.defjvp2(exp_p, lambda g, ans, x: mul(g, ans))
iad.definverse(exp_p, lambda r, x: log(r))
# For exp_p it is more efficient to use the reconstructed output for the vjp
# rule instead of computing it again from the input.
iad.primitive_ivjps[exp_p] = lambda x, y, ct: [[log(y[0])], [ct[0] * y[0]]]
log_p = standard_unop(_float | _complex, 'log')
ad.defjvp(log_p, lambda g, x: div(g, x))
iad.definverse(log_p, lambda r, x: exp(r))
expm1_p = standard_unop(_float | _complex, 'expm1')
ad.defjvp2(expm1_p, lambda g, ans, x: mul(g, add(ans, _one(ans))))
log1p_p = standard_unop(_float | _complex, 'log1p')
ad.defjvp(log1p_p, lambda g, x: div(g, add(x, _one(x))))
tanh_p = standard_unop(_float | _complex, 'tanh')
ad.defjvp2(tanh_p, lambda g, ans, x: mul(g, sub(_one(x), mul(ans, ans))))
sin_p = standard_unop(_float | _complex, 'sin')
ad.defjvp(sin_p, lambda g, x: mul(g, cos(x)))
cos_p = standard_unop(_float | _complex, 'cos')
ad.defjvp(cos_p, lambda g, x: neg(mul(g, sin(x))))
atan2_p = standard_naryop([_float, _float], 'atan2')
ad.defjvp(atan2_p,
lambda g, x, y: _brcast(g, y) * (y / (square(x) + square(y))),
lambda g, x, y: _brcast(g, x) * -x / (square(x) + square(y)))
sinh_p = standard_unop(_float | _complex, 'sinh')
ad.defjvp(sinh_p, lambda g, x: mul(g, cosh(x)))
cosh_p = standard_unop(_float | _complex, 'cosh')
ad.defjvp(cosh_p, lambda g, x: mul(g, sinh(x)))
asinh_p = standard_unop(_float | _complex, 'asinh')
ad.defjvp(asinh_p, lambda g, x: mul(g, rsqrt(square(x) + _one(x))))
acosh_p = standard_unop(_float | _complex, 'acosh')
ad.defjvp(acosh_p,
lambda g, x: mul(g, rsqrt((x - _one(x)) * (x + _one(x)))))
atanh_p = standard_unop(_float | _complex, 'atanh')
ad.defjvp(atanh_p,
lambda g, x: mul(g, reciprocal((_one(x) - x) * (_one(x) + x))))
regularized_incomplete_beta_p = standard_naryop(
[_float, _float, _float], 'regularized_incomplete_beta',
translation_rule=_broadcast_translate(
partial(standard_translate, 'regularized_incomplete_beta')))
def betainc_gradx(g, a, b, x):
lbeta = lgamma(a) + lgamma(b) - lgamma(a + b)
partial_x = exp((b - 1) * log1p(-x) +
(a - 1) * log(x) - lbeta)
return partial_x * g
def betainc_grad_not_implemented(g, a, b, x):
raise ValueError("Betainc gradient with respect to a and b not supported.")
ad.defjvp(regularized_incomplete_beta_p,
betainc_grad_not_implemented,
betainc_grad_not_implemented,
betainc_gradx)
lgamma_p = standard_unop(_float, 'lgamma')
ad.defjvp(lgamma_p, lambda g, x: mul(g, digamma(x)))
digamma_p = standard_unop(_float, 'digamma')
igamma_p = standard_naryop(
[_float, _float], 'igamma',
translation_rule=_broadcast_translate(partial(standard_translate, 'igamma')))
igamma_grad_a_p = standard_naryop([_float, _float], 'igamma_grad_a',
translation_rule=_broadcast_translate(partial(standard_translate,
'igamma_grad_a')))
def igamma_gradx(g, a, x):
return _brcast(g, a, x) * exp(-x + (a - _ones(a)) * log(x) - lgamma(a))
def igamma_grada(g, a, x):
return _brcast(g, a, x) * igamma_grad_a(a, x)
ad.defjvp(igamma_p, igamma_grada, igamma_gradx)
igammac_p = standard_naryop(
[_float, _float], 'igammac',
translation_rule=_broadcast_translate(partial(standard_translate, 'igammac')))
def igammac_gradx(g, a, x):
return -igamma_gradx(g, a, x)
def igammac_grada(g, a, x):
return -igamma_grada(g, a, x)
ad.defjvp(igammac_p, igammac_grada, igammac_gradx)
random_gamma_grad_p = standard_naryop([_float, _float], 'random_gamma_grad',
translation_rule=_broadcast_translate(partial(standard_translate,
'random_gamma_grad')))
bessel_i0e_p = standard_unop(_float, 'bessel_i0e')
ad.defjvp2(bessel_i0e_p, lambda g, y, x: g * (bessel_i1e(x) - sign(x) * y))
bessel_i1e_p = standard_unop(_float, 'bessel_i1e')
def _bessel_i1e_jvp(g, y, x):
eps = dtypes.finfo(_dtype(x)).eps
x_is_not_tiny = abs(x) > eps
safe_x = select(x_is_not_tiny, x, full_like(x, eps))
dy_dx = bessel_i0e(safe_x) - y * (sign(safe_x) + reciprocal(safe_x))
dy_dx = select(x_is_not_tiny, dy_dx, full_like(x, 0.5))
return g * dy_dx
ad.defjvp2(bessel_i1e_p, _bessel_i1e_jvp)
erf_p = standard_unop(_float, 'erf')
ad.defjvp(erf_p, lambda g, x: mul(_const(x, 2. / np.sqrt(np.pi)),
mul(g, exp(neg(square(x))))))
erfc_p = standard_unop(_float, 'erfc')
ad.defjvp(erfc_p, lambda g, x: mul(_const(x, 2. / np.sqrt(np.pi)),
mul(neg(g), exp(neg(square(x))))))
erf_inv_p = standard_unop(_float, 'erf_inv')
ad.defjvp2(erf_inv_p, lambda g, ans, x: mul(_const(x, np.sqrt(np.pi) / 2.),
mul(g, exp(square(ans)))))
real_p = unop(_complex_basetype, _complex, 'real')
ad.deflinear(real_p, lambda t: [complex(t, np.zeros((), _dtype(t)))])
imag_p = unop(_complex_basetype, _complex, 'imag')
ad.defjvp(imag_p, lambda g, _: real(mul(_const(g, -1j), g)))
_complex_dtype = lambda dtype, *args: (np.zeros((), dtype) + np.zeros((), np.complex64)).dtype
complex_p = naryop(_complex_dtype, [_complex_elem_types, _complex_elem_types],
'complex')
ad.deflinear(complex_p, lambda t: [real(t), imag(neg(t))])
conj_p = unop(_complex_dtype, _complex_elem_types | _complex, 'conj')
def _conj_transpose_rule(t, x, *, input_dtype):
assert ad.is_undefined_primal(x)
if dtypes.issubdtype(input_dtype, np.complexfloating):
return [conj(t)]
else:
return [real(t)]
xla.translations[conj_p] = lambda c, x, **kwargs: xops.Conj(x)
ad.primitive_jvps[conj_p] = partial(ad.linear_jvp, conj_p)
ad.primitive_transposes[conj_p] = _conj_transpose_rule
abs_p = unop(_complex_basetype, _num, 'abs')
def _abs_jvp_rule(g, ans, x):
if _iscomplex(x):
return _maybe_real(mul(g, div(_maybe_conj(x),
_replace_zero(convert_element_type(ans, _dtype(x))))))
else:
return select(ge(x, _zero(x)), g, neg(g))
ad.defjvp2(abs_p, _abs_jvp_rule)
_maybe_conj = lambda x: conj(x) if _iscomplex(x) else x
_maybe_real = lambda x: real(x) if _iscomplex(x) else x
sqrt_p = standard_unop(_float | _complex, 'sqrt')
ad.defjvp2(sqrt_p, lambda g, ans, x: mul(g, div(_const(x, 0.5), ans)))
rsqrt_p = standard_unop(_float | _complex, 'rsqrt')
ad.defjvp2(rsqrt_p,
lambda g, ans, x:
mul(g, mul(_const(x, -0.5), pow(x, _const(x, -1.5)))))
pow_p = standard_naryop([_float | _complex, _float | _complex], 'pow')
def _pow_jvp_lhs(g, ans, x, y):
jac = mul(y, pow(x, select(eq(y, _zeros(y)), _ones(y), sub(y, _ones(y)))))
return mul(_brcast(g, y), jac)
def _pow_jvp_rhs(g, ans, x, y):
return mul(_brcast(g, x), mul(log(_replace_zero(x)), ans))
ad.defjvp2(pow_p, _pow_jvp_lhs, _pow_jvp_rhs)
def _integer_pow_dtype_rule(x, *, y):
dtype = unop_dtype_rule(_identity, _int | _float | _complex, 'integer_pow', x)
if y < 0 and dtypes.issubdtype(dtype, np.integer):
raise TypeError("Integers cannot be raised to negative powers, got "
f"integer_pow({x}, {y})")
return dtype
def _integer_pow_translation_rule(c, x, *, y):
if y == 0:
shape = c.get_shape(x)
return xb.constant(c, np.array(1, dtype=shape.numpy_dtype()))
is_reciprocal = y < 0
if is_reciprocal:
y = -y
acc = None
while y > 0:
if y & 1:
acc = x if acc is None else xops.Mul(acc, x)
y >>= 1
if y > 0:
x = xops.Mul(x, x)
return xops.Reciprocal(acc) if is_reciprocal else acc
def _integer_pow_jvp(g, x, *, y):
return g if y == 0 else mul(g, mul(_const(x, y), integer_pow(x, y - 1)))
integer_pow_p = standard_primitive(
_attrgetter('shape'), _integer_pow_dtype_rule, 'integer_pow',
translation_rule=_integer_pow_translation_rule)
batching.defvectorized(integer_pow_p)
masking.defvectorized(integer_pow_p)
ad.defjvp(integer_pow_p, _integer_pow_jvp)
_replace_zero = lambda x: select(eq(x, _const(x, 0)), _ones(x), x)
not_p = standard_unop(_bool_or_int, 'not')
ad.defjvp_zero(not_p)
and_p = standard_naryop([_bool_or_int, _bool_or_int], 'and')
ad.defjvp_zero(and_p)
or_p = standard_naryop([_bool_or_int, _bool_or_int], 'or')
ad.defjvp_zero(or_p)
xor_p = standard_naryop([_bool_or_int, _bool_or_int], 'xor')
ad.defjvp_zero(xor_p)
population_count_p = standard_unop(_int, 'population_count')
def _add_transpose(t, x, y):
# The following linearity assertion is morally true, but because in some cases we
# instantiate zeros for convenience, it doesn't always hold.
# assert ad.is_undefined_primal(x) and ad.is_undefined_primal(y)
return [t, t]
add_p = standard_naryop([_num, _num], 'add')
ad.defjvp(add_p, lambda g, x, y: _brcast(g, y), lambda g, x, y: _brcast(g, x))
ad.primitive_transposes[add_p] = _add_transpose
def _add_inverse(r, x, y):
xr = r - y
yr = r - x
return xr, yr
iad.definverse(add_p, _add_inverse)
def _sub_transpose(t, x, y):
# The following linearity assertion is morally true, but because in some cases
# we instantiate zeros for convenience, it doesn't always hold.
# assert ad.is_undefined_primal(x) and ad.is_undefined_primal(y)
return [t, neg(t) if type(t) is not ad_util.Zero else ad_util.Zero]
sub_p = standard_naryop([_num, _num], 'sub')
ad.defjvp(sub_p,
lambda g, x, y: _brcast(g, y),
lambda g, x, y: _brcast(neg(g), x))
ad.primitive_transposes[sub_p] = _sub_transpose
mul_p = standard_naryop([_num, _num], 'mul')
ad.defbilinear_broadcasting(_brcast, mul_p, mul, mul)
def _mul_inverse(r, x, y):
xr = r / y
yr = r / x
return xr, yr
iad.definverse(mul_p, _mul_inverse)
def _div_transpose_rule(cotangent, x, y):
assert ad.is_undefined_primal(x) and not ad.is_undefined_primal(y)
res = ad_util.Zero if type(cotangent) is ad_util.Zero else div(cotangent, y)
return res, None
div_p = standard_naryop([_num, _num], 'div')
ad.defjvp(div_p,
lambda g, x, y: div(_brcast(g, y), y),
lambda g, x, y: mul(mul(neg(_brcast(g, x)), x), integer_pow(y, -2)))
ad.primitive_transposes[div_p] = _div_transpose_rule
rem_p = standard_naryop([_num, _num], 'rem')
ad.defjvp(rem_p,
lambda g, x, y: _brcast(g, y),
lambda g, x, y: mul(_brcast(neg(g), x), floor(div(x, y))))
def _broadcasting_select(c, which, x, y):
"""Wrapper around XLA `Select` that broadcasts its arguments."""
which_shape, x_shape, y_shape = (
c.get_shape(t).dimensions() for t in (which, x, y))
out_shape = broadcast_shapes(which_shape, x_shape, y_shape)
bcast_dims = lambda shape: tuple(range(len(out_shape) - len(shape),
len(out_shape)))
which = xops.BroadcastInDim(which, out_shape, bcast_dims(which_shape))
x = xops.BroadcastInDim(x, out_shape, bcast_dims(x_shape))
y = xops.BroadcastInDim(y, out_shape, bcast_dims(y_shape))
return xops.Select(which, x, y)
def _minmax_translation_rule(c, x, y, *, minmax=None, cmp=None):
dtype = c.get_shape(x).numpy_dtype()
if dtypes.issubdtype(dtype, np.complexfloating):
rx = xops.Real(x)
ry = xops.Real(y)
return _broadcasting_select(
c, xops.Select(xops.Eq(rx, ry), cmp(xops.Imag(x), xops.Imag(y)),
cmp(rx, ry)),
x, y)
return minmax(x, y)
max_p: core.Primitive = standard_naryop(
[_any, _any], 'max', translation_rule=partial(
_minmax_translation_rule, minmax=xops.Max, cmp=xops.Gt))
ad.defjvp2(max_p,
lambda g, ans, x, y: mul(_brcast(g, y), _balanced_eq(x, ans, y)),
lambda g, ans, x, y: mul(_brcast(g, x), _balanced_eq(y, ans, x)))
min_p: core.Primitive = standard_naryop(
[_any, _any], 'min', translation_rule=partial(
_minmax_translation_rule, minmax=xops.Min, cmp=xops.Lt))
ad.defjvp2(min_p,
lambda g, ans, x, y: mul(_brcast(g, y), _balanced_eq(x, ans, y)),
lambda g, ans, x, y: mul(_brcast(g, x), _balanced_eq(y, ans, x)))
shift_left_p = standard_naryop([_int, _int], 'shift_left')
ad.defjvp_zero(shift_left_p)
shift_right_arithmetic_p = standard_naryop([_int, _int], 'shift_right_arithmetic')
ad.defjvp_zero(shift_right_arithmetic_p)
shift_right_logical_p = standard_naryop([_int, _int], 'shift_right_logical')
ad.defjvp_zero(shift_right_logical_p)
eq_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'eq')
ad.defjvp_zero(eq_p)
ne_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'ne')
ad.defjvp_zero(ne_p)
ge_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'ge')
ad.defjvp_zero(ge_p)
gt_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'gt')
ad.defjvp_zero(gt_p)
le_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'le')
ad.defjvp_zero(le_p)
lt_p = naryop(_fixed_dtype(np.bool_), [_any, _any], 'lt')
ad.defjvp_zero(lt_p)
def _convert_element_type_shape_rule(operand, *, new_dtype, old_dtype):
return operand.shape
def _convert_element_type_dtype_rule(operand, *, new_dtype, old_dtype):
return new_dtype
def _convert_element_type_translation_rule(c, operand, *, new_dtype, old_dtype):
if (dtypes.issubdtype(old_dtype, np.complexfloating) and
not dtypes.issubdtype(new_dtype, np.complexfloating)):
operand = xops.Real(operand)
new_etype = xla_client.dtype_to_etype(new_dtype)
return xops.ConvertElementType(operand, new_element_type=new_etype)
def _convert_element_type_transpose_rule(ct, operand, *, new_dtype, old_dtype):
if type(ct) is ad_util.Zero:
return [ad_util.Zero(operand.aval)]
elif core.primal_dtype_to_tangent_dtype(old_dtype) is dtypes.float0:
return [ad_util.Zero(ShapedArray(operand.aval.shape, dtype=dtypes.float0))]
else:
return [convert_element_type_p.bind(ct, new_dtype=old_dtype,
old_dtype=new_dtype)]
def _convert_element_type_jvp_rule(tangent, operand , *, new_dtype, old_dtype):
if core.primal_dtype_to_tangent_dtype(new_dtype) is dtypes.float0:
return ad_util.Zero(ShapedArray(tangent.shape, dtype=dtypes.float0))
else:
return convert_element_type_p.bind(tangent, new_dtype=new_dtype,
old_dtype=old_dtype)
convert_element_type_p = standard_primitive(
_convert_element_type_shape_rule, _convert_element_type_dtype_rule,
'convert_element_type', _convert_element_type_translation_rule)
ad.defjvp(convert_element_type_p, _convert_element_type_jvp_rule)
ad.primitive_transposes[convert_element_type_p] = _convert_element_type_transpose_rule
batching.defvectorized(convert_element_type_p)
masking.defvectorized(convert_element_type_p)
def _bitcast_convert_type_shape_rule(operand, *, new_dtype):
return operand.shape
def _bitcast_convert_type_dtype_rule(operand, *, new_dtype):
return new_dtype
def _bitcast_convert_type_translation_rule(c, operand, *, new_dtype):
new_etype = xla_bridge.dtype_to_etype(new_dtype)
return xops.BitcastConvertType(operand, new_element_type=new_etype)
bitcast_convert_type_p = standard_primitive(
_bitcast_convert_type_shape_rule, _bitcast_convert_type_dtype_rule,
'bitcast_convert_type', _bitcast_convert_type_translation_rule)
ad.defjvp_zero(bitcast_convert_type_p)
batching.defvectorized(bitcast_convert_type_p)
masking.defvectorized(bitcast_convert_type_p)
def _conv_general_dilated_shape_rule(
lhs: ShapedArray, rhs: ShapedArray, *, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers, feature_group_count,
batch_group_count, **unused_kwargs) -> Tuple[int, ...]:
assert type(dimension_numbers) is ConvDimensionNumbers
if len(lhs.shape) != len(rhs.shape):
msg = ("conv_general_dilated lhs and rhs must have the same number of "
"dimensions, but got {} and {}.")
raise ValueError(msg.format(lhs.shape, rhs.shape))
if not feature_group_count > 0:
msg = ("conv_general_dilated feature_group_count "
"must be a positive integer, got {}.")
raise ValueError(msg.format(feature_group_count))
lhs_feature_count = lhs.shape[dimension_numbers.lhs_spec[1]]
quot, rem = divmod(lhs_feature_count, feature_group_count)
if rem:
msg = ("conv_general_dilated feature_group_count must divide lhs feature "
"dimension size, but {} does not divide {}.")
raise ValueError(msg.format(feature_group_count, lhs_feature_count))
if quot != rhs.shape[dimension_numbers.rhs_spec[1]]:
msg = ("conv_general_dilated lhs feature dimension size divided by "
"feature_group_count must equal the rhs input feature dimension "
"size, but {} // {} != {}.")
raise ValueError(msg.format(lhs_feature_count, feature_group_count,
rhs.shape[dimension_numbers.rhs_spec[1]]))
if rhs.shape[dimension_numbers.rhs_spec[0]] % feature_group_count:
msg = ("conv_general_dilated rhs output feature dimension size must be a "
"multiple of feature_group_count, but {} is not a multiple of {}.")
raise ValueError(msg.format(rhs.shape[dimension_numbers.rhs_spec[0]],
feature_group_count))
if not batch_group_count > 0:
msg = ("conv_general_dilated batch_group_count "
"must be a positive integer, got {}.")
raise ValueError(msg.format(batch_group_count))
lhs_batch_count = lhs.shape[dimension_numbers.lhs_spec[0]]
if lhs_batch_count % batch_group_count != 0:
msg = ("conv_general_dilated batch_group_count must divide lhs batch "
"dimension size, but {} does not divide {}.")
raise ValueError(msg.format(batch_group_count, lhs_batch_count))
if rhs.shape[dimension_numbers.rhs_spec[0]] % batch_group_count:
msg = ("conv_general_dilated rhs output feature dimension size must be a "
"multiple of batch_group_count, but {} is not a multiple of {}.")
raise ValueError(msg.format(rhs.shape[dimension_numbers.rhs_spec[0]],
batch_group_count))
if batch_group_count > 1 and feature_group_count > 1:
msg = ("At most one of batch_group_count and feature_group_count may be > "
"1, got batch_group_count={} and feature_group_count={}")
raise ValueError(msg.format(batch_group_count, feature_group_count))
lhs_perm, rhs_perm, out_perm = dimension_numbers
lhs_trans = _dilate_shape(np.take(lhs.shape, lhs_perm), lhs_dilation)
rhs_trans = _dilate_shape(np.take(rhs.shape, rhs_perm), rhs_dilation)
out_trans = conv_shape_tuple(lhs_trans, rhs_trans, window_strides, padding,
batch_group_count)
return tuple(np.take(out_trans, np.argsort(out_perm)))
def _conv_general_dilated_dtype_rule(
lhs, rhs, *, window_strides, padding, lhs_dilation, rhs_dilation,
dimension_numbers, **unused_kwargs):
return naryop_dtype_rule(_input_dtype, [_float | _complex, _float | _complex],
'conv_general_dilated', lhs, rhs)
_conv_spec_transpose = lambda spec: (spec[1], spec[0]) + spec[2:]
_conv_sdims = lambda spec: spec[2:]
# Understanding the convolution transpose rules:
# Ignoring the spatial dimensions, let m = batch, j = input feature,
# k = output feature.
#
# Convolution computes the following contraction:
# Forward: [m, j] [j, k] -> [m, k]
#
# The transposes are similar to the rules for transposing a matmul:
# LHS transpose: [m, k] [k, j] -> [m, j]
# RHS transpose: [j, m] [m, k] -> [j, k]
#
# With feature grouping, we have the following signatures:
# Forward: [m, gj] [j, gk] -> [m, gk]
# LHS transpose: [m, gk] [k, gj] -> [m, gj]
# --> implemented as feature grouping after transposing the group from the
# kernel input features to the kernel output features.
# RHS transpose: [gj, m] [m, gk] -> [j, gk]
# --> which is batch grouping.
#
# With batch grouping, we have the following signatures:
# Forward: [gm,j] [j,gk]->[m,gk]
# LHS transpose: [m, gk][gk, j] -> [gm, j]
# --> implemented as feature grouping with transposing the group on the kernel
# and the output.
# RHS transpose: [j, gm][m, gk] -> [j, gk]
# --> which is feature grouping.
def _conv_general_dilated_transpose_lhs(
g, rhs, *, window_strides, padding, lhs_dilation, rhs_dilation,
dimension_numbers, feature_group_count, batch_group_count,
lhs_shape, rhs_shape, precision):
assert type(dimension_numbers) is ConvDimensionNumbers
assert batch_group_count == 1 or feature_group_count == 1
lhs_sdims, rhs_sdims, out_sdims = map(_conv_sdims, dimension_numbers)
lhs_spec, rhs_spec, out_spec = dimension_numbers
t_rhs_spec = _conv_spec_transpose(rhs_spec)
if feature_group_count > 1:
# in addition to switching the dims in the spec, need to move the feature
# group axis into the transposed rhs's output feature dim
rhs = _reshape_axis_out_of(rhs_spec[0], feature_group_count, rhs)
rhs = _reshape_axis_into(rhs_spec[0], rhs_spec[1], rhs)
elif batch_group_count > 1:
rhs = _reshape_axis_out_of(rhs_spec[0], batch_group_count, rhs)
rhs = _reshape_axis_into(rhs_spec[0], rhs_spec[1], rhs)
feature_group_count = batch_group_count
trans_dimension_numbers = ConvDimensionNumbers(out_spec, t_rhs_spec, lhs_spec)
padding = _conv_general_vjp_lhs_padding(
np.take(lhs_shape, lhs_sdims), np.take(rhs_shape, rhs_sdims),
window_strides, np.take(g.shape, out_sdims), padding, lhs_dilation,
rhs_dilation)
revd_weights = rev(rhs, rhs_sdims)
out = conv_general_dilated(
g, revd_weights, window_strides=lhs_dilation, padding=padding,
lhs_dilation=window_strides, rhs_dilation=rhs_dilation,
dimension_numbers=trans_dimension_numbers,
feature_group_count=feature_group_count,
batch_group_count=1, precision=precision)
if batch_group_count > 1:
out = _reshape_axis_out_of(lhs_spec[1], batch_group_count, out)
out = _reshape_axis_into(lhs_spec[1], lhs_spec[0], out)
return out
def _conv_general_dilated_transpose_rhs(
g, lhs, *, window_strides, padding, lhs_dilation, rhs_dilation,
dimension_numbers: ConvDimensionNumbers, feature_group_count: int,
batch_group_count: int, lhs_shape, rhs_shape, precision):
assert type(dimension_numbers) is ConvDimensionNumbers
if np.size(g) == 0:
# Avoids forming degenerate convolutions where the RHS has spatial size 0.
return ad_util.Zero
lhs_sdims, rhs_sdims, out_sdims = map(_conv_sdims, dimension_numbers)
lhs_trans, rhs_trans, out_trans = map(_conv_spec_transpose, dimension_numbers)
assert batch_group_count == 1 or feature_group_count == 1
if batch_group_count > 1:
feature_group_count = batch_group_count
batch_group_count = 1
elif feature_group_count > 1:
batch_group_count = feature_group_count
feature_group_count = 1
trans_dimension_numbers = ConvDimensionNumbers(lhs_trans, out_trans, rhs_trans)
padding = _conv_general_vjp_rhs_padding(
np.take(lhs_shape, lhs_sdims), np.take(rhs_shape, rhs_sdims),
window_strides, np.take(g.shape, out_sdims), padding, lhs_dilation,
rhs_dilation)
return conv_general_dilated(
lhs, g, window_strides=rhs_dilation, padding=padding,
lhs_dilation=lhs_dilation, rhs_dilation=window_strides,
dimension_numbers=trans_dimension_numbers,
feature_group_count=feature_group_count,
batch_group_count=batch_group_count, precision=precision)
def _conv_general_dilated_translation_rule(
c, lhs, rhs, *, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers, feature_group_count,
batch_group_count, precision, expand_complex_convolutions, **unused_kwargs):
assert type(dimension_numbers) is ConvDimensionNumbers
dimension_numbers = _conv_general_proto(dimension_numbers)
precision_config = _precision_config(precision)
dtype = c.get_shape(lhs).numpy_dtype()
conv = lambda x, y: xops.ConvGeneralDilated(
x, y, window_strides, padding, lhs_dilation, rhs_dilation,
dimension_numbers, feature_group_count, batch_group_count,
precision_config=precision_config)
if expand_complex_convolutions and np.issubdtype(dtype, np.complexfloating):
# We use a trick for complex multiplication due to Gauss which uses three
# multiplications and five additions; instead of the naive method of four
# multiplications and two additions.
# https://en.wikipedia.org/wiki/Multiplication_algorithm#Complex_multiplication_algorithm
#
# This performance win comes with a trade-off in accuracy; especially in
# cases when the real and imaginary differ hugely in magnitude. The relative
# error bound (e.g. 1p-24 in case of float32) would be relative to the
# maximum of real and imaginary parts of the result instead of being
# satisfied by the real and imaginary parts independently of each other.
lhs_real, lhs_imag = xops.Real(lhs), xops.Imag(lhs)
rhs_real, rhs_imag = xops.Real(rhs), xops.Imag(rhs)
k1 = conv(xops.Add(lhs_real, lhs_imag), rhs_real)
k2 = conv(lhs_real, xops.Sub(rhs_imag, rhs_real))
k3 = conv(lhs_imag, xops.Add(rhs_real, rhs_imag))
return xops.Complex(xops.Sub(k1, k3), xops.Add(k1, k2))
return conv(lhs, rhs)
def _conv_general_dilated_batch_rule(
batched_args, batch_dims, *, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers,
feature_group_count, batch_group_count, precision, **unused_kwargs):
assert batch_group_count == 1 or feature_group_count == 1
lhs, rhs = batched_args
lhs_bdim, rhs_bdim = batch_dims
lhs_spec, rhs_spec, out_spec = dimension_numbers
if lhs_bdim is not None and rhs_bdim is not None:
assert lhs.shape[lhs_bdim] == rhs.shape[rhs_bdim]
if batch_group_count > 1:
new_lhs = _reshape_axis_into(lhs_bdim, lhs_spec[0], lhs)
batch_group_count *= lhs.shape[lhs_bdim]
else:
new_lhs = _reshape_axis_into(lhs_bdim, lhs_spec[1], lhs)
feature_group_count *= lhs.shape[lhs_bdim]
new_rhs = _reshape_axis_into(rhs_bdim, rhs_spec[0], rhs)
out = conv_general_dilated(
new_lhs, new_rhs, window_strides, padding, lhs_dilation, rhs_dilation,
dimension_numbers, feature_group_count=feature_group_count,
batch_group_count=batch_group_count,
precision=precision)
out = _reshape_axis_out_of(out_spec[1], lhs.shape[lhs_bdim], out)
return out, out_spec[1]
elif lhs_bdim is not None:
if batch_group_count == 1:
new_lhs = _reshape_axis_into(lhs_bdim, lhs_spec[0], lhs)
out = conv_general_dilated(new_lhs, rhs, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers,
feature_group_count, precision=precision)
out = _reshape_axis_out_of(out_spec[0], lhs.shape[lhs_bdim], out)
return out, out_spec[0]
else:
new_lhs = _reshape_axis_out_of(lhs_spec[0] + int(lhs_bdim <= lhs_spec[0]),
batch_group_count, lhs)
new_lhs = _reshape_axis_into(lhs_bdim + int(lhs_spec[0] < lhs_bdim),
lhs_spec[0] + 1,
new_lhs)
new_lhs = _reshape_axis_into(lhs_spec[0], lhs_spec[0], new_lhs)
out = conv_general_dilated(new_lhs, rhs, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers,
feature_group_count, batch_group_count,
precision=precision)
out = _reshape_axis_out_of(out_spec[0], lhs.shape[lhs_bdim], out)
return out, out_spec[0]
elif rhs_bdim is not None:
if feature_group_count == 1 and batch_group_count == 1:
new_rhs = _reshape_axis_into(rhs_bdim, rhs_spec[0], rhs)
out = conv_general_dilated(lhs, new_rhs, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers,
feature_group_count, batch_group_count,
precision=precision)
out = _reshape_axis_out_of(out_spec[1], rhs.shape[rhs_bdim], out)
return out, out_spec[1]
else:
# groups need to be outermost, so we need to factor them out of the
# rhs output feature dim, then factor the batch dim into the remaining rhs
# output feature dim, then put groups back in. We do something
# similar on the output. An alternative which would require more FLOPs but
# fewer reshapes would be to broadcast lhs.
group_count = (feature_group_count if feature_group_count > 1
else batch_group_count)
new_rhs = _reshape_axis_out_of(rhs_spec[0] + int(rhs_bdim <= rhs_spec[0]),
group_count, rhs)
new_rhs = _reshape_axis_into(rhs_bdim + int(rhs_spec[0] < rhs_bdim),
rhs_spec[0] + 1,
new_rhs)
new_rhs = _reshape_axis_into(rhs_spec[0], rhs_spec[0], new_rhs)
out = conv_general_dilated(lhs, new_rhs, window_strides, padding,
lhs_dilation, rhs_dilation, dimension_numbers,
feature_group_count, batch_group_count,
precision=precision)
out = _reshape_axis_out_of(out_spec[1], group_count, out)
out = _reshape_axis_out_of(out_spec[1] + 1, rhs.shape[rhs_bdim], out)
out = _reshape_axis_into(out_spec[1], out_spec[1] + 1, out)
return out, out_spec[1]
def _masked(padded_value, logical_shape, dimensions, value=0):
"""
Sets all padding to the given value (default is 0) in the given dimensions.
All values outside the logical shape are considered padding.
"""
if len(dimensions) == 0:
return padded_value
masks = [broadcasted_iota(np.int32, padded_value.shape, d) < logical_shape[d]
for d in dimensions]
mask_intersection = masks[0]
for mask in masks[1:]:
mask_intersection &= mask
return select(mask_intersection, padded_value, full_like(padded_value, value))
def _conv_general_dilated_masking_rule(
padded_vals, logical_shapes, window_strides, padding, lhs_dilation,
rhs_dilation, dimension_numbers, feature_group_count, batch_group_count,
lhs_shape, rhs_shape, precision):
lhs, rhs = padded_vals
logical_lhs_shape, logical_rhs_shape = logical_shapes
o, i, *window_dimensions = dimension_numbers.rhs_spec
assert (np.all(np.take(rhs.shape, window_dimensions)
== np.take(logical_rhs_shape, window_dimensions))), \
"Conv filter masking not yet implemented."
n, c, *padded_dimensions = dimension_numbers.lhs_spec
return conv_general_dilated(
_masked(lhs, logical_lhs_shape, padded_dimensions),
_masked(rhs, logical_rhs_shape, (i,)),
window_strides=window_strides, padding=padding,
lhs_dilation=lhs_dilation, rhs_dilation=rhs_dilation,
dimension_numbers=dimension_numbers,
feature_group_count=feature_group_count,
batch_group_count=batch_group_count,
precision=precision)
conv_general_dilated_p = standard_primitive(
_conv_general_dilated_shape_rule, _conv_general_dilated_dtype_rule,
'conv_general_dilated', partial(_conv_general_dilated_translation_rule,
expand_complex_convolutions=False))
# TODO(b/161124619, b/161126248): XLA does not support complex convolution on
# CPU or GPU; on these backends, lower complex convolutions away.
xla.backend_specific_translations['cpu'][conv_general_dilated_p] = partial(
_conv_general_dilated_translation_rule, expand_complex_convolutions=True)
xla.backend_specific_translations['gpu'][conv_general_dilated_p] = partial(
_conv_general_dilated_translation_rule, expand_complex_convolutions=True)
ad.defbilinear(conv_general_dilated_p,
_conv_general_dilated_transpose_lhs,
_conv_general_dilated_transpose_rhs)
batching.primitive_batchers[conv_general_dilated_p] = \
_conv_general_dilated_batch_rule
masking.masking_rules[conv_general_dilated_p] = \
_conv_general_dilated_masking_rule
def _reshape_axis_into(src, dst, x):
perm = [i for i in range(x.ndim) if i != src]
perm.insert(dst, src)
new_shape = list(np.delete(x.shape, src))
new_shape[dst] *= x.shape[src]
return reshape(x, new_shape, perm)
def _reshape_axis_out_of(src, size1, x):
shape = list(x.shape)
size2, ragged = divmod(shape[src], size1)
assert not ragged
shape[src:src+1] = [size1, size2]
return reshape(x, shape)
def _precision_config(precision):
if precision is not None:
config = xla_client.PrecisionConfig()
if isinstance(precision, tuple):
config.operand_precision.extend(precision)
else:
config.operand_precision.extend((precision, precision))
return config
return None
def _dot_general_shape_rule(lhs, rhs, *, dimension_numbers, precision):
(lhs_contracting, rhs_contracting), (lhs_batch, rhs_batch) = dimension_numbers
if not all(np.all(np.greater_equal(d, 0)) and np.all(np.less(d, lhs.ndim))
for d in (lhs_contracting, lhs_batch)):
msg = ("dot_general requires lhs dimension numbers to be nonnegative and "
"less than the number of axes of the lhs value, got "
f"lhs_batch of {lhs_batch} and lhs_contracting of {lhs_contracting} "
f"for lhs of rank {lhs.ndim}")
raise TypeError(msg)
if not all(np.all(np.greater_equal(d, 0)) and np.all(np.less(d, rhs.ndim))
for d in (rhs_contracting, rhs_batch)):
msg = ("dot_general requires rhs dimension numbers to be nonnegative and "
"less than the number of axes of the rhs value, got "
f"rhs_batch of {rhs_batch} and rhs_contracting of {rhs_contracting} "
f"for rhs of rank {rhs.ndim}")
raise TypeError(msg)
if len(lhs_batch) != len(rhs_batch):
msg = ("dot_general requires equal numbers of lhs_batch and rhs_batch "
"dimensions, got lhs_batch {} and rhs_batch {}.")
raise TypeError(msg.format(lhs_batch, rhs_batch))
lhs_contracting_set, lhs_batch_set = set(lhs_contracting), set(lhs_batch)
rhs_contracting_set, rhs_batch_set = set(rhs_contracting), set(rhs_batch)
if len(lhs_batch_set) != len(lhs_batch):
msg = ("dot_general requires lhs batch dimensions to be distinct, got "
f"lhs_batch {lhs_batch}.")
raise TypeError(msg)
if len(rhs_batch_set) != len(rhs_batch):
msg = ("dot_general requires rhs batch dimensions to be distinct, got "
f"rhs_batch {rhs_batch}.")
raise TypeError(msg)
if len(lhs_contracting_set) != len(lhs_contracting):
msg = ("dot_general requires lhs contracting dimensions to be distinct, "
f"got lhs_contracting {lhs_contracting}.")
raise TypeError(msg)
if len(rhs_contracting_set) != len(rhs_contracting):
msg = ("dot_general requires rhs contracting dimensions to be distinct, "
f"got rhs_contracting {rhs_contracting}.")
raise TypeError(msg)
if lhs_contracting_set & lhs_batch_set:
msg = ("dot_general requires lhs batch dimensions to be disjoint from "
"contracting dimensions, got lhs_batch {} and lhs_contracting {}.")
raise TypeError(msg.format(lhs_batch, lhs_contracting))
if rhs_contracting_set & rhs_batch_set:
msg = ("dot_general requires rhs batch dimensions to be disjoint from "
"contracting dimensions, got rhs_batch {} and rhs_contracting {}.")
raise TypeError(msg.format(rhs_batch, rhs_contracting))
lhs_batch_shape = np.take(lhs.shape, lhs_batch)
rhs_batch_shape = np.take(rhs.shape, rhs_batch)
if not np.all(np.equal(lhs_batch_shape, rhs_batch_shape)):
msg = ("dot_general requires lhs batch dimensions and rhs batch dimensions "
"to have the same shape, got {} and {}.")
raise TypeError(msg.format(lhs_batch_shape, rhs_batch_shape))
lhs_contracting_shape = np.take(lhs.shape, lhs_contracting)
rhs_contracting_shape = np.take(rhs.shape, rhs_contracting)
if not np.all(np.equal(lhs_contracting_shape, rhs_contracting_shape)):
msg = ("dot_general requires contracting dimensions to have the same "
"shape, got {} and {}.")
raise TypeError(msg.format(lhs_contracting_shape, rhs_contracting_shape))
batch_shape = tuple(lhs_batch_shape)
lhs_contract_or_batch = tuple(sorted(tuple(lhs_contracting) + tuple(lhs_batch)))
lhs_tensored_shape = tuple(np.delete(lhs.shape, lhs_contract_or_batch))
rhs_contract_or_batch = tuple(sorted(tuple(rhs_contracting) + tuple(rhs_batch)))
rhs_tensored_shape = tuple(np.delete(rhs.shape, rhs_contract_or_batch))
return batch_shape + lhs_tensored_shape + rhs_tensored_shape
def _dot_general_dtype_rule(lhs, rhs, *, dimension_numbers, precision):
return naryop_dtype_rule(_input_dtype, [_any, _any], 'dot_general', lhs, rhs)
def _dot_general_transpose_lhs(g, y, *, dimension_numbers, precision,
swap_ans=False):
(x_contract, y_contract), (x_batch, y_batch) = dimension_numbers
x_ndim = g.ndim - y.ndim + len(x_batch) + 2 * len(x_contract)
x_kept = remaining(range(x_ndim), x_contract, x_batch)
y_kept = remaining(range(y.ndim), y_contract, y_batch)
if swap_ans:
ans_batch, ans_y, _ = ranges_like(x_batch, y_kept, x_kept)
else:
ans_batch, _, ans_y = ranges_like(x_batch, x_kept, y_kept)
dims = ((ans_y, y_kept), (ans_batch, y_batch))
x_contract_sorted_by_y = list(np.take(x_contract, np.argsort(y_contract)))
out_axes = np.argsort(list(x_batch) + x_kept + x_contract_sorted_by_y)
return transpose(dot_general(g, y, dims, precision=precision),
tuple(out_axes))
def _dot_general_transpose_rhs(g, x, *, dimension_numbers, precision):
(x_contract, y_contract), (x_batch, y_batch) = dimension_numbers
swapped_dimension_numbers = ((y_contract, x_contract), (y_batch, x_batch))
return _dot_general_transpose_lhs(
g, x, dimension_numbers=swapped_dimension_numbers, precision=precision,
swap_ans=True)
def _dot_general_batch_rule(batched_args, batch_dims, *, dimension_numbers,
precision):
# there are three kinds of dimensions in a dot_general:
# - contraction dimensions appear in lhs and rhs but not the result
# - batch dimensions appear in lhs, rhs, and result
# - tensor product dimensions appear in the result and one of lhs or rhs
(lhs_contract, rhs_contract), (lhs_batch, rhs_batch) = dimension_numbers
lhs, rhs = batched_args
lbd, rbd = batch_dims
assert lbd is not None or rbd is not None
def bump_dims(dims, b):
return tuple(np.add(dims, np.greater_equal(dims, b)))
if lbd is not None and rbd is not None:
# adding a batch dimension
lhs_batch = (lbd,) + bump_dims(lhs_batch, lbd)
rhs_batch = (rbd,) + bump_dims(rhs_batch, rbd)
lhs_contract = bump_dims(lhs_contract, lbd)
rhs_contract = bump_dims(rhs_contract, rbd)
result_batch_dim = 0
else:
# adding a tensor product dimension
if lbd is not None:
other = tuple(d for d in range(lhs.ndim)
if d not in lhs_batch and d not in lhs_contract)
result_batch_dim = (len(lhs_batch) + sum(np.less(other, lbd)))
lhs_batch = bump_dims(lhs_batch, lbd)
lhs_contract = bump_dims(lhs_contract, lbd)
else:
other = tuple(d for d in range(rhs.ndim)
if d not in rhs_batch and d not in rhs_contract)
result_batch_dim = (lhs.ndim - len(lhs_contract) +
sum(np.less(other, rbd)))
rhs_batch = bump_dims(rhs_batch, rbd)
rhs_contract = bump_dims(rhs_contract, rbd)
new_dimension_numbers = ((lhs_contract, rhs_contract), (lhs_batch, rhs_batch))
batched_out = dot_general(lhs, rhs, new_dimension_numbers,
precision=precision)
return batched_out, int(result_batch_dim)
def _dot_using_sum_of_products(lhs, rhs, *, dimension_numbers):
contract_dims, batch_dims = dimension_numbers
lhs_contract_dims, rhs_contract_dims = contract_dims
lhs_batch_dims, rhs_batch_dims = batch_dims
lhs_noncontract_dims = tuple(sorted(
set(range(np.ndim(lhs))) - set(lhs_batch_dims) - set(lhs_contract_dims)))
rhs_noncontract_dims = tuple(sorted(
set(range(np.ndim(rhs))) - set(rhs_batch_dims) - set(rhs_contract_dims)))
lhs = transpose(lhs,
lhs_batch_dims + lhs_noncontract_dims + lhs_contract_dims)
rhs = transpose(rhs,
rhs_batch_dims + rhs_noncontract_dims + rhs_contract_dims)
lhs_start_expand = len(lhs_batch_dims) + len(lhs_noncontract_dims)
lhs_end_expand = lhs_start_expand + len(rhs_noncontract_dims)
lhs = expand_dims(lhs, tuple(range(lhs_start_expand, lhs_end_expand)))
rhs_start_expand = len(lhs_batch_dims)
rhs_end_expand = rhs_start_expand + len(lhs_noncontract_dims)
rhs = expand_dims(rhs, tuple(range(rhs_start_expand, rhs_end_expand)))
out_ndim = (len(lhs_batch_dims) + len(lhs_noncontract_dims) +
len(rhs_noncontract_dims))
op_product = bitwise_and if lhs.dtype == np.bool_ else mul
op_sum = bitwise_or if lhs.dtype == np.bool_ else add
return reduce(op_product(lhs, rhs), _zero(lhs), op_sum,
tuple(range(out_ndim, out_ndim + len(lhs_contract_dims))))
def _dot_general_translation_rule(c, lhs, rhs, *, dimension_numbers, precision):
dtype = c.get_shape(lhs).numpy_dtype()
if dtypes.issubdtype(dtype, np.inexact):
return xops.DotGeneral(lhs, rhs,
xc.make_dot_dimension_numbers(dimension_numbers),
precision_config=_precision_config(precision))
else:
# TODO(b/134526360): XLA doesn't support bool or integer dots, so we emit a
# sum of products instead.
translation = xla.lower_fun(_dot_using_sum_of_products,
multiple_results=False)
return translation(c, lhs, rhs, dimension_numbers=dimension_numbers)
def _dot_general_masking_rule(padded_vals, logical_shapes, *, dimension_numbers,
precision):
lhs, rhs = padded_vals
# Only need to mask off contraction dims of one side - we mask the lhs here
# but this is arbitrary. Could check the sizes of lhs and rhs and mask
# whichever is smallest.
lhs_shape, _ = logical_shapes
(lhs_contract, _), _ = dimension_numbers
return dot_general(_masked(lhs, lhs_shape, lhs_contract),
rhs, dimension_numbers, precision=precision)
dot_general_p = standard_primitive(_dot_general_shape_rule,
_dot_general_dtype_rule, 'dot_general',
_dot_general_translation_rule)
ad.defbilinear(dot_general_p,
_dot_general_transpose_lhs, _dot_general_transpose_rhs)
batching.primitive_batchers[dot_general_p] = _dot_general_batch_rule
masking.masking_rules[dot_general_p] = _dot_general_masking_rule
def _broadcast_shape_rule(operand, sizes):
_check_shapelike('broadcast', 'sizes', sizes)
return tuple(sizes) + operand.shape
def _broadcast_batch_rule(batched_args, batch_dims, *, sizes):
operand, = batched_args
bdim, = batch_dims
new_bdim = None if bdim is None else bdim + len(sizes)
return broadcast(operand, sizes), new_bdim
broadcast_p = standard_primitive(
_broadcast_shape_rule, _input_dtype, 'broadcast')
ad.deflinear(broadcast_p, lambda t, sizes: [_reduce_sum(t, range(len(sizes)))])
batching.primitive_batchers[broadcast_p] = _broadcast_batch_rule
def _broadcast_in_dim_impl(operand, *, shape, broadcast_dimensions):
if type(operand) is np.ndarray:
operand = _device_put_raw(operand)
if xla.type_is_device_array(operand) and np.all(
np.equal(operand.shape, np.take(shape, broadcast_dimensions))):
shape = _broadcast_in_dim_shape_rule(
operand, shape=shape, broadcast_dimensions=broadcast_dimensions)
aval = ShapedArray(shape, _dtype(operand))
if operand._lazy_expr is None:
lazy_expr = lazy.broadcast(lazy.array(operand), shape, broadcast_dimensions)
else:
lazy_expr = lazy.broadcast(operand._lazy_expr, shape, broadcast_dimensions)
return xla.make_device_array(aval, operand._device, lazy_expr, operand.device_buffer)
else:
return xla.apply_primitive(broadcast_in_dim_p, operand, shape=shape,
broadcast_dimensions=broadcast_dimensions)
def _broadcast_in_dim_shape_rule(operand, *, shape, broadcast_dimensions):
_check_shapelike('broadcast_in_dim', 'shape', shape)
_check_shapelike('broadcast_in_dim', 'broadcast_dimensions',
broadcast_dimensions)
operand_ndim = np.ndim(operand)
if operand_ndim != len(broadcast_dimensions):
msg = ('broadcast_in_dim broadcast_dimensions must have length equal to '
'operand ndim; got broadcast_dimensions {} for operand ndim {}.')
raise TypeError(msg.format(broadcast_dimensions, operand_ndim))
if len(shape) < operand_ndim:
msg = ('broadcast_in_dim target broadcast shape must have equal or higher rank '
'to the operand shape; got operand ndim {} and target broadcast ndim {}.')
raise TypeError(msg.format(operand_ndim, len(shape)))
if not set(broadcast_dimensions).issubset(set(range(len(shape)))):
msg = ('broadcast_in_dim broadcast_dimensions must be a subset of output '
'dimensions, got {} for operand ndim {} and shape {}.')
raise TypeError(msg.format(broadcast_dimensions, operand_ndim, shape))
if any(operand.shape[i] != shape[broadcast_dimensions[i]] and
operand.shape[i] != 1 for i in range(operand_ndim)):
msg = (
"broadcast_in_dim operand dimension sizes must either be 1, or be "
"equal to their corresponding dimensions in the target broadcast "
"shape; got operand of shape {}, target broadcast shape {}, "
"broadcast_dimensions {} ")
raise TypeError(msg.format(operand.shape, shape, broadcast_dimensions))
if (len(broadcast_dimensions) != len(set(broadcast_dimensions)) or
tuple(broadcast_dimensions) != tuple(sorted(broadcast_dimensions))):
msg = ("broadcast_in_dim broadcast_dimensions must be strictly increasing; "
"got broadcast_dimensions {}")
raise TypeError(msg.format(broadcast_dimensions))
return shape
def _broadcast_in_dim_transpose_rule(t, *, shape, broadcast_dimensions):
axes = tuple(np.delete(range(len(shape)), broadcast_dimensions))
return [_reduce_sum(t, axes)]
def _broadcast_in_dim_batch_rule(batched_args, batch_dims, *, shape,
broadcast_dimensions):
operand, = batched_args
bdim, = batch_dims
new_operand = batching.moveaxis(operand, bdim, 0)
new_shape = (operand.shape[bdim],) + shape
new_broadcast_dimensions = (0,) + tuple(np.add(1, broadcast_dimensions))
return broadcast_in_dim(new_operand, new_shape, new_broadcast_dimensions), 0
broadcast_in_dim_p = standard_primitive(
_broadcast_in_dim_shape_rule, _input_dtype, 'broadcast_in_dim')
broadcast_in_dim_p.def_impl(_broadcast_in_dim_impl)
ad.deflinear(broadcast_in_dim_p, _broadcast_in_dim_transpose_rule)
batching.primitive_batchers[broadcast_in_dim_p] = _broadcast_in_dim_batch_rule
def _clamp_shape_rule(min, operand, max):
if min.shape and min.shape != operand.shape:
m = "clamp requires min.shape == operand.shape or min.shape == (), got {}."
raise TypeError(m.format(min.shape))
if max.shape and max.shape != operand.shape:
m = "clamp requires max.shape == operand.shape or max.shape == (), got {}."
raise TypeError(m.format(max.shape))
return operand.shape
_clamp_dtype_rule = partial(naryop_dtype_rule, _input_dtype, [_any, _any, _any],
'clamp')
clamp_p = standard_primitive(_clamp_shape_rule, _clamp_dtype_rule, 'clamp')
ad.defjvp(clamp_p,
lambda g, min, operand, max:
select(bitwise_and(gt(min, operand), lt(min, max)),
_brcast(g, operand), _zeros(operand)),
lambda g, min, operand, max:
select(bitwise_and(gt(operand, min), lt(operand, max)),
g, _zeros(operand)),
lambda g, min, operand, max:
select(lt(max, operand), _brcast(g, operand), _zeros(operand)))
batching.defbroadcasting(clamp_p)
def _concatenate_shape_rule(*operands, **kwargs):
dimension = kwargs.pop('dimension')
if not operands:
msg = "concatenate expects at least one operand, got 0."
raise TypeError(msg)
if not all(isinstance(operand, UnshapedArray) for operand in operands):
msg = "All objects to concatenate must be arrays, got {}."
op = next(op for op in operands if not isinstance(op, UnshapedArray))
raise TypeError(msg.format(type(op)))
if len({operand.ndim for operand in operands}) != 1:
msg = "Cannot concatenate arrays with different ranks, got {}."
raise TypeError(msg.format(", ".join(str(o.ndim) for o in operands)))
if not 0 <= dimension < operands[0].ndim:
msg = "concatenate dimension out of bounds: dimension {} for shapes {}."
raise TypeError(msg.format(dimension, ", ".join([str(o.shape) for o in operands])))
shapes = [operand.shape[:dimension] + operand.shape[dimension+1:]
for operand in operands]
if not shapes[:-1] == shapes[1:]:
msg = ("Cannot concatenate arrays with shapes that differ in dimensions "
"other than the one being concatenated: concatenating along "
"dimension {} for shapes {}.")
shapes = [operand.shape for operand in operands]
raise TypeError(msg.format(dimension, ", ".join(map(str, shapes))))
concat_size = sum(o.shape[dimension] for o in operands)
ex_shape = operands[0].shape
return ex_shape[:dimension] + (concat_size,) + ex_shape[dimension+1:]
def _concatenate_dtype_rule(*operands, **kwargs):
_check_same_dtypes('concatenate', False, *(o.dtype for o in operands))
return operands[0].dtype
def _concatenate_translation_rule(c, *operands, **kwargs):
dimension = kwargs.pop('dimension')
return xops.ConcatInDim(c, operands, dimension)
def _concatenate_transpose_rule(t, *operands, dimension):
operand_shapes = [o.aval.shape if ad.is_undefined_primal(o) else o.shape
for o in operands]
if type(t) is ad_util.Zero:
return ad_util.Zero
else:
limit_points = np.cumsum([shape[dimension] for shape in operand_shapes])
starts = np.zeros((len(operands), t.ndim), dtype=int)
starts[1:, dimension] = limit_points[:-1]
limits = np.tile(t.shape, (len(operands), 1))
limits[:, dimension] = limit_points
return [slice(t, start, limit) if ad.is_undefined_primal(o) else None
for o, start, limit in zip(operands, starts, limits)]
def _concatenate_batch_rule(batched_args, batch_dims, *, dimension):
size = next(op.shape[bdim] for op, bdim in zip(batched_args, batch_dims)
if bdim is not None)
operands = [batching.moveaxis(op, bdim, 0) if bdim is not None
else broadcast(op, (size,))
for op, bdim in zip(batched_args, batch_dims)]
return concatenate(operands, dimension + 1), 0
# The concatenate_p masking rule requires use of a while-loop construct and so
# is defined in lax_control_flow.py
concatenate_p = standard_primitive(
_concatenate_shape_rule, _concatenate_dtype_rule, 'concatenate',
_concatenate_translation_rule)
ad.deflinear(concatenate_p, _concatenate_transpose_rule)
ad.primitive_transposes[concatenate_p] = _concatenate_transpose_rule
batching.primitive_batchers[concatenate_p] = _concatenate_batch_rule
def _pad_dtype_rule(operand, padding_value, *, padding_config):
if operand.dtype != padding_value.dtype:
msg = "pad operand and padding_value must be same dtype: got {} and {}."
raise TypeError(msg.format(operand.dtype, padding_value.dtype))
return _input_dtype(operand, padding_value)
def _pad_shape_rule(operand, padding_value, *, padding_config):
del padding_value
if not len(padding_config) == np.ndim(operand):
raise ValueError("length of padding_config must equal the number of axes "
f"of operand, got padding_config {padding_config} "
f"for operand shape {np.shape(operand)}")
if not all(i >= 0 for _, _, i in padding_config):
raise ValueError("interior padding in padding_config must be nonnegative, "
f"got padding_config {padding_config}")
return tuple(l + h + d + (_max(0, d - 1) * i if i > 0 else 0)
for (l, h, i), d in zip(padding_config, np.shape(operand)))
def _pad_transpose(t, operand, padding_value, *, padding_config):
if type(t) is ad_util.Zero:
return ad_util.Zero
lo, hi, interior = zip(*padding_config)
total = lambda x: _reduce_sum(x, list(range(t.ndim)))
def t_op():
unpad_config = safe_zip(np.negative(lo), np.negative(hi),
np.zeros_like(interior))
unpadded = pad(t, np.array(0., t.dtype), unpad_config)
return slice(unpadded, np.zeros_like(lo), unpadded.shape, np.add(interior, 1))
t_operand = t_op() if ad.is_undefined_primal(operand) else None
t_padv = sub(total(t), total(t_operand)) if ad.is_undefined_primal(padding_value) else None
return [t_operand, t_padv]
def _pad_batch_rule(batched_args, batch_dims, *, padding_config):
operand, padding_value = batched_args
operand_bdim, padding_value_bdim = batch_dims
if padding_value_bdim is None:
assert operand_bdim is not None
padding_config = list(padding_config)
padding_config.insert(operand_bdim, (0, 0, 0))
return pad(operand, padding_value, padding_config), operand_bdim
else:
raise NotImplementedError # loop and stack
def _pad_translation_rule(c, operand, padding_value, *, padding_config):
return xops.Pad(operand, padding_value,
xc.make_padding_config(padding_config))
def _pad_masking_rule(padded_vals, logical_shapes, padding_config):
operand, padding_value = padded_vals
shape, _ = logical_shapes
out = pad(operand, padding_value, padding_config)
out_shape = [lo + shape[i] * (interior + 1)
for i, (lo, hi, interior) in enumerate(padding_config)]
padded_dims = [i for i, config in enumerate(padding_config)
if config != (0, 0, 0)]
return _masked(out, out_shape, padded_dims, padding_value)
pad_p = standard_primitive(_pad_shape_rule, _pad_dtype_rule, 'pad',
translation_rule=_pad_translation_rule)
ad.deflinear(pad_p, _pad_transpose)
ad.primitive_transposes[pad_p] = _pad_transpose
batching.primitive_batchers[pad_p] = _pad_batch_rule
masking.masking_rules[pad_p] = _pad_masking_rule
# The squeeze primitive exists for the benefit of masking and other
# transformations that need to keep track of axis identity.
# For example, consider reshaping a 2D array with shape (1, N) into a 1D array
# with shape (N,). This results in the following JAXpr:
# reshape[ dimension=None new_sizes=(N,) ]
# For N > 1, we can match up the output array axis with the second axis of the
# input. But for N = 1, it is not clear how axes match up: all we know from the
# JAXpr is that we are reshaping from (1, 1) to (1,).
# In constrast, squeeze[ dimensions=(0,) ] is unambiguous.
def squeeze(array: Array, dimensions: Tuple[int, ...]) -> Array:
"""Squeeze any number of size 1 dimensions from an array."""
ndim = np.ndim(array)
dimensions = tuple(sorted(canonicalize_axis(i, ndim) for i in dimensions))
if not dimensions:
return array
return squeeze_p.bind(array, dimensions=dimensions)
def _squeeze_dtype_rule(operand, *, dimensions):
return operand.dtype
def _squeeze_shape_rule(operand, *, dimensions):
return _compute_squeeze_shape(np.shape(operand), dimensions)
def _compute_squeeze_shape(shape, dimensions):
dims_set = set(dimensions)
if len(dims_set) != len(dimensions):
raise ValueError(f"dimensions are not unique: {dimensions}")
if not all(0 <= d < len(shape) for d in dims_set):
raise ValueError(f"dimensions outside range [0, ndim): {dimensions}")
if any(shape[d] != 1 for d in dimensions):
raise ValueError(
"cannot select an axis to squeeze out which has size not equal to "
f"one, got shape={shape} and dimensions={dimensions}")
return tuple(s for i, s in enumerate(shape) if i not in dims_set)
def _squeeze_translation_rule(c, arg, *, dimensions):
new_shape = _compute_squeeze_shape(c.get_shape(arg).dimensions(), dimensions)
return xops.Reshape(arg, new_shape)
def _squeeze_transpose_rule(t, operand, *, dimensions):
assert ad.is_undefined_primal(operand)
return [expand_dims(t, dimensions)]
def _squeeze_batch_rule(batched_args, batch_dims, *, dimensions):
operand, = batched_args
bdim, = batch_dims
operand = batching.moveaxis(operand, bdim, 0)
dimensions = tuple(np.add(1, dimensions))
return squeeze(operand, dimensions=dimensions), 0
squeeze_p = standard_primitive(_squeeze_shape_rule, _squeeze_dtype_rule,
'squeeze', _squeeze_translation_rule)
ad.deflinear2(squeeze_p, _squeeze_transpose_rule)
batching.primitive_batchers[squeeze_p] = _squeeze_batch_rule
def expand_dims(array: Array, dimensions: Tuple[int, ...]) -> Array:
"""Insert any number of size 1 dimensions into an array."""
ndim_out = np.ndim(array) + len(dimensions)
dims_set = frozenset(canonicalize_axis(i, ndim_out) for i in dimensions)
result_shape = list(np.shape(array))
for i in sorted(dims_set):
result_shape.insert(i, 1)
broadcast_dims = [i for i in range(ndim_out) if i not in dims_set]
return broadcast_in_dim(array, result_shape, broadcast_dims)
# We have a nonstandard reshape impl so that we can be lazy about data movement.
def _reshape_impl(operand, *, new_sizes, dimensions):
old_sizes = np.shape(operand)
if xla.type_is_device_array(operand) and dimensions is None:
bcast_dims = _is_singleton_reshape(old_sizes, new_sizes)
if bcast_dims is not None:
aval = ShapedArray(new_sizes, operand.dtype)
if operand._lazy_expr is None:
lazy_expr = lazy.broadcast(lazy.array(operand), new_sizes, bcast_dims)
else:
lazy_expr = lazy.broadcast(operand._lazy_expr, new_sizes, bcast_dims)
return xla.make_device_array(aval, operand._device, lazy_expr, operand.device_buffer)
return xla.apply_primitive(reshape_p, operand, new_sizes=new_sizes,
dimensions=dimensions)
def _is_singleton_reshape(old, new):
# A singleton reshape is one where only singleton dimensions are added. We
# want to detect them because they can be expressed as (lazy) broadcasts.
old, new = iter(old), iter(new)
d1, d2 = next(old, None), next(new, None)
bcast_dims = []
i = 0
while True:
if d1 is d2 is None:
return bcast_dims
elif d1 == d2:
bcast_dims.append(i)
i += 1
d1, d2 = next(old, None), next(new, None)
elif d2 == 1:
i += 1
d2 = next(new, None)
else:
return None
def _reshape_shape_rule(operand, *, new_sizes, dimensions):
if not np.all(np.greater_equal(new_sizes, 0)):
msg = 'reshape new_sizes must all be positive, got {}.'
raise TypeError(msg.format(new_sizes))
if prod(np.shape(operand)) != prod(new_sizes):
msg = 'reshape total size must be unchanged, got new_sizes {} for shape {}.'
raise TypeError(msg.format(new_sizes, np.shape(operand)))
if dimensions is not None:
if set(dimensions) != set(range(np.ndim(operand))):
msg = ('reshape dimensions must be a permutation of operand dimensions, '
'got dimensions {} for shape {}.')
raise TypeError(msg.format(dimensions, np.shape(operand)))
return tuple(new_sizes)
def _reshape_dtype_rule(operand, *, new_sizes, dimensions):
return operand.dtype
def _reshape_translation_rule(c, operand, *, new_sizes, dimensions):
if dimensions is None:
return xops.Reshape(operand, new_sizes)
else:
return xops.Reshape(operand, dimensions, new_sizes)
def _reshape_transpose_rule(t, operand, *, new_sizes, dimensions):
assert ad.is_undefined_primal(operand)
if dimensions is None:
return [reshape(t, operand.aval.shape)]
else:
return [transpose(reshape(t, np.take(operand.aval.shape, dimensions)),
np.argsort(dimensions))]
def _reshape_batch_rule(batched_args, batch_dims, *, new_sizes, dimensions):
operand, = batched_args
bdim, = batch_dims
operand = batching.moveaxis(operand, bdim, 0)
if dimensions is not None:
dimensions = (0,) + tuple(np.add(1, dimensions))
return reshape(operand, operand.shape[:1] + new_sizes, dimensions), 0
def _reshape_masking_rule(padded_args, logical_shapes, polymorphic_shapes,
new_sizes, dimensions):
operand, = padded_args
old_shape, = polymorphic_shapes
def is_poly(size): return type(size) is masking.Poly and not size.is_constant
def merge_const_sizes(shape):
"""Merges all nonpolymorphic sizes into the previous polymorphic size."""
poly_dims = [i for i, size in enumerate(shape) if is_poly(size)]
return [prod(shape[start:stop])
for start, stop in zip([0] + poly_dims, poly_dims + [len(shape)])]
if merge_const_sizes(old_shape) != merge_const_sizes(new_sizes):
raise NotImplementedError(
"Reshape on padded dimensions causing fragmentation is not supported.")
return reshape(operand,
new_sizes=masking.padded_shape_as_value(new_sizes),
dimensions=dimensions)
reshape_p = standard_primitive(_reshape_shape_rule, _reshape_dtype_rule,
'reshape', _reshape_translation_rule)
reshape_p.def_impl(_reshape_impl)
ad.deflinear2(reshape_p, _reshape_transpose_rule)
batching.primitive_batchers[reshape_p] = _reshape_batch_rule
masking.masking_rules[reshape_p] = _reshape_masking_rule
def _rev_shape_rule(operand, *, dimensions):
_check_shapelike('rev', 'dimensions', dimensions)
if len(set(dimensions)) != len(dimensions):
msg = 'rev dimensions must be unique, got {}.'
raise TypeError(msg.format(dimensions))
if dimensions and not _max(dimensions) < operand.ndim:
msg = ('rev dimensions must all be less than operand ndim, got dimensions '
'{} for operand ndim {}.')
raise TypeError(msg.format(dimensions, operand.ndim))
return operand.shape
def _rev_batch_rule(batched_args, batch_dims, *, dimensions):
operand, = batched_args
bdim, = batch_dims
new_dimensions = [i + 1 if i >= bdim else i for i in dimensions]
return rev(operand, new_dimensions), bdim
rev_p = standard_primitive(_rev_shape_rule, _input_dtype, 'rev')
ad.deflinear(rev_p, lambda t, dimensions: [rev(t, dimensions)])
batching.primitive_batchers[rev_p] = _rev_batch_rule
def _transpose_impl(operand, *, permutation):
if xla.type_is_device_array(operand):
if operand._lazy_expr is None:
lazy_expr = lazy.transpose(lazy.array(operand), permutation)
else:
lazy_expr = lazy.transpose(operand._lazy_expr, permutation)
aval = ShapedArray(lazy_expr.shape, operand.dtype)
return xla.make_device_array(aval, operand._device, lazy_expr, operand.device_buffer)
else:
return xla.apply_primitive(transpose_p, operand, permutation=permutation)
def _transpose_shape_rule(operand, *, permutation):
if not isinstance(permutation, (tuple, list, np.ndarray)):
msg = "transpose permutation must be a tuple/list/ndarray, got {}."
raise TypeError(msg.format(type(permutation)))
if tuple(sorted(permutation)) != tuple(range(operand.ndim)):
msg = ("transpose permutation isn't a permutation of operand dimensions, "
"got permutation {} for operand shape {}.")
raise TypeError(msg.format(permutation, operand.shape))
return tuple(np.take(operand.shape, permutation))
def _transpose_batch_rule(batched_args, batch_dims, *, permutation):
operand, = batched_args
bdim, = batch_dims
perm = (bdim,) + tuple(i if i < bdim else i+1 for i in permutation)
return transpose(operand, perm), 0
def _transpose_masking_rule(padded_vals, logical_shapes, permutation):
return transpose(*padded_vals, permutation=permutation)
transpose_p = standard_primitive(_transpose_shape_rule, _input_dtype,
'transpose')
transpose_p.def_impl(_transpose_impl)
ad.deflinear(transpose_p,
lambda t, permutation: [transpose(t, np.argsort(permutation))])
batching.primitive_batchers[transpose_p] = _transpose_batch_rule
masking.masking_rules[transpose_p] = _transpose_masking_rule
def _select_shape_rule(pred, on_true, on_false):
if on_true.shape != on_false.shape:
msg = "select on_true and on_false must have the same shape, got {} and {}."
raise TypeError(msg.format(on_true.shape, on_false.shape))
if pred.shape and pred.shape != on_true.shape:
msg = ("select pred must be scalar or have the same shape as on_true and "
"on_false, got pred shape {} for on_true and on_false of shape {}.")
raise TypeError(msg.format(pred.shape, on_true.shape))
return on_true.shape
def _select_dtype_rule(pred, on_true, on_false):
_check_same_dtypes("select", False, on_true.dtype, on_false.dtype)
if not dtypes.issubdtype(pred.dtype, np.bool_):
msg = "select pred must be boolean type, got {}."
raise TypeError(msg.format(pred.dtype))
return on_true.dtype
def _select_transpose_rule(t, pred, on_true, on_false):
assert not ad.is_undefined_primal(pred)
if type(t) is ad_util.Zero:
return ad_util.Zero
else:
zeros = full_like(t, 0)
return [None,
select(pred, t, zeros) if ad.is_undefined_primal(on_true) else None,
select(pred, zeros, t) if ad.is_undefined_primal(on_false) else None]
def _select_batch_rule(batched_args, batch_dims, **unused_kwargs):
pred, on_true, on_false, = batched_args
pred_bdim, ot_bdim, of_bdim = batch_dims
size = next(x.shape[i] for x, i in zip(batched_args, batch_dims)
if i is not None)
# avoid transposes and some broadcasts in special cases
if pred_bdim == ot_bdim == of_bdim:
if np.shape(pred) == np.shape(on_true):
return select(pred, on_true, on_false), pred_bdim
else:
# vmapped function had a scalar pred with nonscalar args
assert np.ndim(pred) == 1
pred = broadcast_in_dim(pred, on_true.shape, [pred_bdim])
return select(pred, on_true, on_false), pred_bdim
elif np.ndim(pred) == 0 and ot_bdim is not None and of_bdim is not None:
if ot_bdim == of_bdim:
return select(pred, on_true, on_false), ot_bdim
elif np.shape(on_true) == np.shape(on_false):
on_false = batching.moveaxis(on_false, of_bdim, ot_bdim)
return select(pred, on_true, on_false), ot_bdim
pred = batching.bdim_at_front(pred, pred_bdim, size) if np.shape(pred) else pred
if not np.shape(on_true) == np.shape(on_false) == ():
on_true = batching.bdim_at_front(on_true, ot_bdim, size)
on_false = batching.bdim_at_front(on_false, of_bdim, size)
assert np.shape(on_true) == np.shape(on_false)
if 0 < np.ndim(pred) < np.ndim(on_true):
# vmapped function had a scalar pred with nonscalar args
assert np.ndim(pred) == 1
pred = broadcast_in_dim(pred, on_true.shape, [0])
if np.ndim(pred) > np.ndim(on_true):
assert np.ndim(on_true) == 0
on_true = broadcast(on_true, pred.shape)
on_false = broadcast(on_false, pred.shape)
return select(pred, on_true, on_false), 0
def _select_masking_rule(padded_vals, logical_shapes):
pred_shape, true_shape, false_shape = [
masking.padded_shape_as_value(val.shape) for val in padded_vals]
assert np.array_equal(pred_shape, true_shape)
assert np.array_equal(pred_shape, false_shape)
return select(*padded_vals)
def _select_jvp(primals, tangents):
pred, on_true, on_false = primals
_, on_true_dot, on_false_dot = tangents
out = select(pred, on_true, on_false)
if type(on_true_dot) is ad_util.Zero:
out_dot = select(pred, _zeros(on_false_dot), on_false_dot)
elif type(on_false_dot) is ad_util.Zero:
out_dot = select(pred, on_true_dot, _zeros(on_true_dot))
else:
out_dot = select(pred, on_true_dot, on_false_dot)
return out, out_dot
select_p = standard_primitive(_select_shape_rule, _select_dtype_rule, 'select')
ad.primitive_jvps[select_p] = _select_jvp
ad.primitive_transposes[select_p] = _select_transpose_rule
batching.primitive_batchers[select_p] = _select_batch_rule
masking.masking_rules[select_p] = _select_masking_rule
def _slice_shape_rule(operand, *, start_indices, limit_indices, strides):
_check_shapelike("slice", "start_indices", start_indices)
_check_shapelike("slice", "limit_indices", limit_indices)
if operand.ndim != len(start_indices):
msg = ("slice start_indices must have length equal to the number of "
"dimensions of the operand, got indices {} for operand shape {}.")
raise TypeError(msg.format(start_indices, operand.shape))
if len(start_indices) != len(limit_indices):
msg = ("slice limit_indices must have the same length as start_indices, "
"got start_inidices {} and limit_indices {}.")
raise TypeError(msg.format(start_indices, limit_indices))
if (not masking.is_polymorphic(limit_indices) and
not masking.is_polymorphic(operand.shape) and
not np.all(np.less_equal(limit_indices, operand.shape))):
msg = ("slice limit_indices must be less than or equal to operand shape, "
"got limit_indices {} for operand shape {}.")
raise TypeError(msg.format(limit_indices, operand.shape))
if not np.all(np.greater_equal(start_indices, 0)):
msg = ("slice start_indices must be greater than or equal to zero, "
"got start_indices of {}.")
raise TypeError(msg.format(start_indices))
if (not masking.is_polymorphic(limit_indices) and
not np.all(np.greater_equal(limit_indices, start_indices))):
msg = ("slice limit_indices must be greater than or equal to start_indices,"
" got start_indices {} and limit_indices {}.")
raise TypeError(msg.format(start_indices, limit_indices))
if strides is None:
strides = np.ones(operand.ndim, np.int32)
else:
_check_shapelike("slice", "strides", strides)
if len(strides) != operand.ndim:
msg = ("slice strides must have length equal to the number of dimensions "
"of the operand, got strides {} for operand shape {}.")
raise TypeError(msg.format(strides, operand.shape))
if not np.all(np.greater(strides, 0)):
msg = "slice strides must be positive, got {}"
raise TypeError(msg.format(strides))
diff = np.subtract(limit_indices, start_indices)
# Not np.divmod since Poly.__rdivmod__ is ignored by NumPy, breaks poly stride
return tuple(q + (r > 0) for q, r in map(divmod, diff, strides))
def _slice_translation_rule(c, operand, *, start_indices, limit_indices,
strides):
return xops.Slice(operand, start_indices, limit_indices,
strides or [1] * len(start_indices))
def _slice_transpose_rule(t, operand, *, start_indices, limit_indices, strides):
assert ad.is_undefined_primal(operand)
operand_shape = operand.aval.shape
if strides is None or np.all(np.equal(strides, 1)):
pads = zip(start_indices, np.subtract(operand_shape, limit_indices),
(0,) * len(start_indices))
else:
real_limits = np.add(
start_indices,
np.where(np.array(t.shape) == 0, 0,
np.add(1, np.multiply(np.subtract(t.shape, 1), strides))))
pads = safe_zip(start_indices, np.subtract(operand_shape, real_limits),
np.subtract(strides, 1))
result = pad(t, _const(t, 0), pads)
assert result.shape == operand_shape, (
f"result.shape={result.shape} operand_shape={operand_shape}")
return [result]
def _slice_batching_rule(batched_args, batch_dims, *, start_indices,
limit_indices, strides):
operand, = batched_args
bdim, = batch_dims
new_start_indices = list(start_indices)
new_start_indices.insert(bdim, 0)
new_limit_indices = list(limit_indices)
new_limit_indices.insert(bdim, operand.shape[bdim])
if strides is None:
new_strides = None
else:
new_strides = list(strides)
new_strides.insert(bdim, 1)
out = slice(operand, new_start_indices, new_limit_indices, new_strides)
return out, bdim
def _slice_masking_rule(
padded_vals, logical_shapes, start_indices, limit_indices, strides):
operand, = padded_vals
strides = masking.padded_shape_as_value(strides) if strides else None
return slice(operand,
start_indices=masking.padded_shape_as_value(start_indices),
limit_indices=masking.padded_shape_as_value(limit_indices),
strides=strides)
slice_p = standard_primitive(_slice_shape_rule, _input_dtype, 'slice',
_slice_translation_rule)
ad.deflinear2(slice_p, _slice_transpose_rule)
batching.primitive_batchers[slice_p] = _slice_batching_rule
masking.masking_rules[slice_p] = _slice_masking_rule
def _dynamic_slice_shape_rule(operand, *start_indices, slice_sizes):
if operand.ndim != len(start_indices):
msg = ("dynamic_slice start_indices must have length equal to the number "
"of dimensions of the operand, got indices {} for operand shape {}.")
raise TypeError(msg.format(start_indices, operand.shape))
if len(start_indices) != len(slice_sizes):
msg = ("dynamic_slice slice_sizes must have the same length as "
"start_indices, got start_inidices length {} and slice_sizes {}.")
raise TypeError(msg.format(len(start_indices), slice_sizes))
if not np.all(np.less_equal(slice_sizes, operand.shape)):
msg = ("slice slice_sizes must be less than or equal to operand shape, "
"got slice_sizes {} for operand shape {}.")
raise TypeError(msg.format(slice_sizes, operand.shape))
if not np.all(np.greater_equal(slice_sizes, 0)):
msg = ("slice slice_sizes must be greater than or equal to zero, "
"got slice_sizes of {}.")
raise TypeError(msg.format(slice_sizes))
return tuple(slice_sizes)
def _dynamic_slice_dtype_rule(operand, *start_indices, slice_sizes):
if any(i.dtype != start_indices[0].dtype or
not dtypes.issubdtype(i.dtype, np.integer) for i in start_indices):
msg = ("index arguments to dynamic_slice must be integers of the same "
"type, got: {}")
raise TypeError(msg.format(", ".join(i.dtype.name for i in start_indices)))
return operand.dtype
def _dynamic_slice_translation_rule(c, operand, *start_indices, slice_sizes):
return xops.DynamicSlice(operand, start_indices, slice_sizes)
def _dynamic_slice_jvp(primals, tangents, *, slice_sizes):
tangent_out = tangents[0]
if type(tangent_out) is not ad_util.Zero:
tangent_out = dynamic_slice(tangent_out, primals[1:], slice_sizes)
return dynamic_slice(primals[0], primals[1:], slice_sizes), tangent_out
def _dynamic_slice_transpose_rule(t, operand, *start_indices, slice_sizes):
assert ad.is_undefined_primal(operand)
assert all(not ad.is_undefined_primal(s) for s in start_indices)
operand_shape, operand_dtype = operand.aval.shape, operand.aval.dtype
if config.omnistaging_enabled:
zeros = full(operand_shape, 0, operand_dtype)
else:
zeros = full(operand_shape, tie_in(t, _zero(t)))
if type(t) is ad_util.Zero:
return [zeros] + [None] * len(start_indices)
else:
return ([dynamic_update_slice(zeros, t, start_indices)] +
[None] * len(start_indices))
def _batch_dynamic_slice_indices(indices, bdims):
if len(indices) == 0:
return np.array([], 'int32'), None
size = next((x.shape[i] for x, i in zip(indices, bdims) if i is not None), -1)
if size < 0:
return concatenate([broadcast(i, (1,)) for i in indices], 0), None
indices = concatenate(
[broadcast_in_dim(x, (size, 1),
broadcast_dimensions=((0,) if i is not None else ()))
for x, i in zip(indices, bdims)],
dimension=1)
return indices, 0
def _dynamic_slice_batching_rule(batched_args, batch_dims, *, slice_sizes):
# A dynamic slice is a special case of gather; we can delegate to the gather
# batching rule.
# TODO(phawkins): consider removing dynamic_slice entirely and using gather
# always.
operand, *start_indices = batched_args
operand_bd, *start_idx_bds = batch_dims
operand_shape = (operand.shape if operand_bd is batching.not_mapped
else tuple(np.delete(operand.shape, operand_bd)))
dims = tuple(range(len(operand_shape)))
dnums = GatherDimensionNumbers(offset_dims=dims, collapsed_slice_dims=(),
start_index_map=dims)
index, index_bdim = _batch_dynamic_slice_indices(start_indices, start_idx_bds)
return _gather_batching_rule(
[operand, index], [operand_bd, index_bdim], dimension_numbers=dnums,
slice_sizes=slice_sizes)
dynamic_slice_p = standard_primitive(
_dynamic_slice_shape_rule, _dynamic_slice_dtype_rule, 'dynamic_slice',
_dynamic_slice_translation_rule)
ad.primitive_jvps[dynamic_slice_p] = _dynamic_slice_jvp # TODO
ad.primitive_transposes[dynamic_slice_p] = _dynamic_slice_transpose_rule
batching.primitive_batchers[dynamic_slice_p] = _dynamic_slice_batching_rule
def _dynamic_update_slice_shape_rule(operand, update, *start_indices):
if operand.ndim != update.ndim:
msg = ("dynamic_update_slice update must have the same rank as operand, "
"got update shape {} for operand shape {}.")
raise TypeError(msg.format(update.shape, operand.shape))
if operand.ndim != len(start_indices):
msg = ("dynamic_update_slice start_indices must have length equal to the "
"rank of operand, got indices {} for operand shape {}.")
raise TypeError(msg.format(start_indices, operand.shape))
if not np.all(np.less_equal(update.shape, operand.shape)):
msg = ("dynamic_update_slice update shape must be smaller than operand "
"shape, got update shape {} for operand shape {}.")
raise TypeError(msg.format(update.shape, operand.shape))
return operand.shape
def _dynamic_update_slice_dtype_rule(operand, update, *start_indices):
_check_same_dtypes("dynamic_update_slice", False, operand.dtype, update.dtype)
if any(i.dtype != start_indices[0].dtype or
not dtypes.issubdtype(i.dtype, np.integer) for i in start_indices):
msg = ("index arguments to dynamic_update_slice must be integers of the "
"same type, got {}")
raise TypeError(msg.format(", ".join(i.dtype.name for i in start_indices)))
return operand.dtype
def _dynamic_update_slice_jvp(primals, tangents):
operand, update = primals[:2]
start_indices = primals[2:]
g_operand, g_update = tangents[:2]
val_out = dynamic_update_slice(operand, update, start_indices)
if type(g_operand) is ad_util.Zero and type(g_update) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
g_operand = ad.instantiate_zeros(g_operand)
g_update = ad.instantiate_zeros(g_update)
tangent_out = dynamic_update_slice(g_operand, g_update, start_indices)
return val_out, tangent_out
def _dynamic_update_slice_transpose_rule(t, operand, update, *start_indices):
assert all(not ad.is_undefined_primal(x) for x in start_indices)
if ad.is_undefined_primal(update):
update_shape = update.aval.shape
else:
update_shape = update.shape
dus = dynamic_update_slice
ds = dynamic_slice
zeros = _zeros(t, shape=update_shape)
operand_t = dus(t, zeros, start_indices) if ad.is_undefined_primal(operand) else None
update_t = ds(t, start_indices, update_shape) if ad.is_undefined_primal(update) else None
return [operand_t, update_t] + [None] * len(start_indices)
def _dynamic_update_slice_translation_rule(c, operand, update, *start_indices):
return xops.DynamicUpdateSlice(operand, update, start_indices)
def _dynamic_update_slice_batching_rule(batched_args, batch_dims):
# A dynamic update slice is a special case of scatter; we can delegate to the
# scatter batching rule.
# TODO(phawkins): consider removing dynamic_update_slice entirely and using
# scatter always.
operand, update, *start_idx = batched_args
operand_bd, update_bd, *start_idx_bd = batch_dims
update_shape = (np.shape(update) if update_bd is batching.not_mapped
else tuple(np.delete(np.shape(update), update_bd)))
dims = tuple(range(len(update_shape)))
dnums = ScatterDimensionNumbers(update_window_dims=dims,
inserted_window_dims=(),
scatter_dims_to_operand_dims=dims)
index, index_bdim = _batch_dynamic_slice_indices(start_idx, start_idx_bd)
return _scatter_batching_rule(
scatter, (operand, index, update), (operand_bd, index_bdim, update_bd),
update_jaxpr=None, update_consts=None, dimension_numbers=dnums,
indices_are_sorted=True, unique_indices=True)
dynamic_update_slice_p = standard_primitive(
_dynamic_update_slice_shape_rule, _dynamic_update_slice_dtype_rule,
'dynamic_update_slice', _dynamic_update_slice_translation_rule)
ad.primitive_jvps[dynamic_update_slice_p] = _dynamic_update_slice_jvp
ad.primitive_transposes[dynamic_update_slice_p] = \
_dynamic_update_slice_transpose_rule
batching.primitive_batchers[dynamic_update_slice_p] = \
_dynamic_update_slice_batching_rule
def _gather_dimensions_proto(indices_shape, dimension_numbers):
assert type(dimension_numbers) is GatherDimensionNumbers
proto = xla_client.GatherDimensionNumbers()
proto.offset_dims.extend(dimension_numbers.offset_dims)
proto.collapsed_slice_dims.extend(dimension_numbers.collapsed_slice_dims)
proto.start_index_map.extend(dimension_numbers.start_index_map)
assert indices_shape.rank() > 0
proto.index_vector_dim = indices_shape.rank() - 1
return proto
def _gather_dtype_rule(operand, start_indices, **kwargs):
if not dtypes.issubdtype(start_indices.dtype, np.integer):
raise ValueError("start_indices must have an integer type")
return dtypes.canonicalize_dtype(operand.dtype)
_rank = lambda arr: len(arr.shape)
def _is_sorted(dims, op_name, name):
for i in range(1, len(dims)):
if dims[i] < dims[i - 1]:
raise TypeError(f"{name} in {op_name} op must be sorted; got {dims}")
def _sorted_dims_in_range(dims, rank, op_name, name):
if len(dims) == 0:
return
invalid_dim = None
if dims[0] < 0:
invalid_dim = dims[0]
elif dims[-1] >= rank:
invalid_dim = dims[-1]
if invalid_dim:
raise TypeError(f"Invalid {name} set in {op_name} op; valid range is "
f"[0, {rank}); got: {invalid_dim}.")
def _no_duplicate_dims(dims, op_name, name):
if len(set(dims)) != len(dims):
raise TypeError(f"{name} in {op_name} op must not repeat; got: {dims}.")
def _gather_shape_rule(operand, start_indices, *, dimension_numbers,
slice_sizes):
"""Validates the well-formedness of the arguments to Gather.
The code implements the checks based on the detailed operation semantics of
XLA's `Gather <https://www.tensorflow.org/xla/operation_semantics#gather>`_
operator and following the outline of the implementation of
ShapeInference::InferGatherShape in TensorFlow.
"""
offset_dims = dimension_numbers.offset_dims
collapsed_slice_dims = dimension_numbers.collapsed_slice_dims
start_index_map = dimension_numbers.start_index_map
# Note: in JAX, index_vector_dim is always computed as below, cf. the
# documentation of the GatherDimensionNumbers class.
index_vector_dim = _rank(start_indices) - 1
# This case should never happen in JAX, due to the implicit construction of
# index_vector_dim, but is included for completeness.
if _rank(start_indices) < index_vector_dim or index_vector_dim < 0:
raise TypeError(f"Gather index leaf dimension must be within [0, rank("
f"start_indices) + 1). rank(start_indices) is "
f"{_rank(start_indices)} and gather index leaf dimension "
f"is {index_vector_dim}.")
expanded_start_indices_shape = list(start_indices.shape)
# This case should never happen in JAX, due to the implicit construction of
# index_vector_dim, but is included for completeness.
if len(expanded_start_indices_shape) == index_vector_dim:
expanded_start_indices_shape.append(1)
# Start ValidateGatherDimensions
# In the error messages output by XLA, "offset_dims" is called "Output window
# dimensions" in error messages. For consistency's sake, our error messages
# stick to "offset_dims".
_is_sorted(offset_dims, "gather", "offset_dims")
_no_duplicate_dims(offset_dims, "gather", "offset_dims")
output_offset_dim_count = len(offset_dims)
output_shape_rank = len(offset_dims) + _rank(start_indices) - 1
for i in range(output_offset_dim_count):
offset_dim = offset_dims[i]
if offset_dim < 0 or offset_dim >= output_shape_rank:
raise TypeError(f"Offset dimension {i} in gather op is out of bounds; "
f"got {offset_dim}, but should have been in "
f"[0, {output_shape_rank})")
if len(start_index_map) != start_indices.shape[index_vector_dim]:
raise TypeError(f"Gather op has {len(start_index_map)} elements in "
f"start_index_map and the bound of dimension "
f"index_vector_dim={index_vector_dim} of start_indices is "
f"{start_indices.shape[index_vector_dim]}. These two "
f"numbers must be equal.")
for i in range(len(start_index_map)):
operand_dim_for_start_index_i = start_index_map[i]
if (operand_dim_for_start_index_i < 0 or
operand_dim_for_start_index_i >= _rank(operand)):
raise TypeError(f"Invalid start_index_map; domain is "
f"[0, {_rank(operand)}), got: "
f"{i}->{operand_dim_for_start_index_i}.")
_no_duplicate_dims(start_index_map, "gather", "start_index_map")
# _is_sorted and _sorted_dims_in_range are checked in the opposite order
# compared to the XLA implementation. In cases when the input is not sorted
# AND there are problematic collapsed_slice_dims, the error message will thus
# be different.
_is_sorted(collapsed_slice_dims, "gather", "collapsed_slice_dims")
_sorted_dims_in_range(collapsed_slice_dims, _rank(operand), "gather",
"collapsed_slice_dims")
_no_duplicate_dims(collapsed_slice_dims, "gather", "collapsed_slice_dims")
# End ValidateGatherDimensions
if _rank(operand) != len(slice_sizes):
raise TypeError(f"Gather op must have one slice size for every input "
f"dimension; got: len(slice_sizes)={len(slice_sizes)}, "
f"input_shape.rank={_rank(operand)}")
if len(slice_sizes) != len(offset_dims) + len(collapsed_slice_dims):
raise TypeError(f"All components of the offset index in a gather op must "
f"either be a offset dimension or explicitly collapsed; "
f"got len(slice_sizes)={len(slice_sizes)}, "
f"output_slice_sizes={offset_dims}, collapsed_slice_dims="
f"{collapsed_slice_dims}.")
for i in range(len(slice_sizes)):
slice_size = slice_sizes[i]
corresponding_input_size = operand.shape[i]
if slice_size < 0 or slice_size > corresponding_input_size:
raise TypeError(f"Slice size at index {i} in gather op is out of range, "
f"must be within [0, {corresponding_input_size + 1}), "
f"got {slice_size}.")
for i in range(len(collapsed_slice_dims)):
bound = slice_sizes[collapsed_slice_dims[i]]
if bound > 1:
raise TypeError(f"Gather op can only collapse slice dims with bound 1 "
f"or 0, but bound is {bound} for index "
f"{collapsed_slice_dims[i]} at position {i}.")
expanded_start_indices_shape.pop(index_vector_dim)
start_indices_shape = iter(expanded_start_indices_shape)
slice_sizes = iter(np.delete(slice_sizes, collapsed_slice_dims))
return tuple(next(slice_sizes) if i in offset_dims
else next(start_indices_shape) for i in range(output_shape_rank))
def _gather_translation_rule(c, operand, start_indices, *, dimension_numbers,
slice_sizes):
indices_shape = c.get_shape(start_indices)
return xops.Gather(
operand, start_indices,
_gather_dimensions_proto(indices_shape, dimension_numbers), slice_sizes,
indices_are_sorted=False)
def _gather_jvp_rule(g, operand, start_indices, *, dimension_numbers,
slice_sizes):
return gather(g, start_indices, dimension_numbers, slice_sizes)
def _gather_transpose_rule(t, operand, start_indices, *, dimension_numbers,
slice_sizes):
assert ad.is_undefined_primal(operand)
operand_shape = operand.aval.shape
if type(t) is ad_util.Zero:
return ad_util.Zero
if config.omnistaging_enabled:
zeros = full(operand_shape, _zero(t))
else:
zeros = full(operand_shape, tie_in(t, _zero(t)))
scatter_dnums = ScatterDimensionNumbers(
update_window_dims=dimension_numbers.offset_dims,
inserted_window_dims=dimension_numbers.collapsed_slice_dims,
scatter_dims_to_operand_dims=dimension_numbers.start_index_map)
out = scatter_add(zeros, start_indices, t, scatter_dnums,
indices_are_sorted=False,
unique_indices=False)
return [out, ad_util.Zero.from_value(start_indices)]
def _gather_batching_rule(batched_args, batch_dims, *, dimension_numbers,
slice_sizes):
operand, start_indices = batched_args
operand_bdim, start_indices_bdim = batch_dims
if operand_bdim is not None and start_indices_bdim is None:
operand = batching.moveaxis(operand, operand_bdim, 0)
slice_sizes = (operand.shape[0],) + slice_sizes
offset_dims = (0,) + tuple(np.add(1, dimension_numbers.offset_dims))
collapsed_slice_dims = tuple(np.add(1, dimension_numbers.collapsed_slice_dims))
start_index_map = tuple(np.add(1, dimension_numbers.start_index_map))
dnums = GatherDimensionNumbers(
offset_dims=offset_dims,
collapsed_slice_dims=collapsed_slice_dims,
start_index_map=start_index_map)
return gather(operand, start_indices, dimension_numbers=dnums,
slice_sizes=slice_sizes), 0
elif operand_bdim is None and start_indices_bdim is not None:
start_indices = batching.moveaxis(start_indices, start_indices_bdim, 0)
offset_dims = tuple(np.add(1, dimension_numbers.offset_dims))
dnums = GatherDimensionNumbers(
offset_dims=offset_dims,
collapsed_slice_dims=dimension_numbers.collapsed_slice_dims,
start_index_map=dimension_numbers.start_index_map)
return gather(operand, start_indices, dimension_numbers=dnums,
slice_sizes=slice_sizes), 0
else:
# move batch dimensions to the front to simplify logic
operand = batching.moveaxis(operand, operand_bdim, 0)
start_indices = batching.moveaxis(start_indices, start_indices_bdim, 0)
# Example: user code had start_indices shape (3, 4, 5), and we have to deal
# with start_indices shape (7, 3, 4, 5). We transform that to a
# start_indices of shape (7, 3, 4, 6) where we concatenated an iota that
# counts along our batch dimension to the front of the ndindex.
count_shape = list(start_indices.shape)
count_shape[-1] = 1
counts = broadcasted_iota(start_indices.dtype, tuple(count_shape), 0)
start_indices = concatenate([counts, start_indices], len(count_shape) - 1)
slice_sizes = (1,) + slice_sizes
collapsed_slice_dims = (0,) + tuple(np.add(1, dimension_numbers.collapsed_slice_dims))
offset_dims = tuple(np.add(1, dimension_numbers.offset_dims))
start_index_map = (0,) + tuple(np.add(1, dimension_numbers.start_index_map))
dnums = GatherDimensionNumbers(
offset_dims=offset_dims,
collapsed_slice_dims=collapsed_slice_dims,
start_index_map=start_index_map)
return gather(operand, start_indices, dimension_numbers=dnums,
slice_sizes=slice_sizes), 0
gather_p = standard_primitive(
_gather_shape_rule, _gather_dtype_rule, 'gather',
_gather_translation_rule)
ad.defjvp(gather_p, _gather_jvp_rule, None)
ad.primitive_transposes[gather_p] = _gather_transpose_rule
batching.primitive_batchers[gather_p] = _gather_batching_rule
def _scatter_dimensions_proto(indices_shape, dimension_numbers):
assert type(dimension_numbers) is ScatterDimensionNumbers
proto = xla_client.ScatterDimensionNumbers()
proto.update_window_dims.extend(dimension_numbers.update_window_dims)
proto.inserted_window_dims.extend(dimension_numbers.inserted_window_dims)
proto.scatter_dims_to_operand_dims.extend(
dimension_numbers.scatter_dims_to_operand_dims)
assert indices_shape.rank() > 0
proto.index_vector_dim = indices_shape.rank() - 1
return proto
def _scatter_dtype_rule(operand, scatter_indices, updates, **kwargs):
if not dtypes.issubdtype(scatter_indices.dtype, np.integer):
raise ValueError("scatter_indices must have an integer type")
_check_same_dtypes("scatter", False, operand.dtype, updates.dtype)
return dtypes.canonicalize_dtype(operand.dtype)
def _scatter_shape_rule(operand, scatter_indices, updates, *, update_jaxpr,
update_consts, dimension_numbers, indices_are_sorted,
unique_indices):
"""Validates the well-formedness of the ``dimension_numbers`` argument to
Scatter.
The code implements the checks based on the detailed operation semantics of
XLA's `Scatter <https://www.tensorflow.org/xla/operation_semantics#scatter>`_
operator and following the outline of the implementation of
ShapeInference::InferScatterShape in TensorFlow.
"""
update_window_dims = dimension_numbers.update_window_dims
inserted_window_dims = dimension_numbers.inserted_window_dims
scatter_dims_to_operand_dims = dimension_numbers.scatter_dims_to_operand_dims
# Note: in JAX, index_vector_dim is always computed as below, cf. the
# documentation of the ScatterDimensionNumbers class.
index_vector_dim = _rank(scatter_indices) - 1
# This case should never happen in JAX, due to the implicit construction of
# index_vector_dim, but is included for completeness.
if _rank(scatter_indices) < index_vector_dim or index_vector_dim < 0:
raise TypeError(f"Scatter index leaf dimension must be within [0, "
f"rank(scatter_indices) + 1). rank(scatter_indices) is "
f"{_rank(scatter_indices)} and scatter index leaf "
f"dimension is {index_vector_dim}.")
expanded_scatter_indices_shape = list(scatter_indices.shape)
# This case should never happen in JAX, due to the implicit construction of
# index_vector_dim, but is included for completeness.
if len(expanded_scatter_indices_shape) == index_vector_dim:
expanded_scatter_indices_shape.append(1)
expected_updates_rank = (len(expanded_scatter_indices_shape) - 1 +
len(update_window_dims))
if _rank(updates) != expected_updates_rank:
raise TypeError(f"Updates tensor must be of rank {expected_updates_rank}; "
f"got {_rank(updates)}.")
# Validate update_window_dims
_is_sorted(update_window_dims, "scatter", "update_window_dims")
_no_duplicate_dims(update_window_dims, "scatter", "update_window_dims")
_sorted_dims_in_range(update_window_dims, _rank(updates), "scatter",
"update_window_dims")
# Validate inserted_window_dims
_is_sorted(inserted_window_dims, "scatter", "inserted_window_dims")
_no_duplicate_dims(inserted_window_dims, "scatter", "inserted_window_dims")
_sorted_dims_in_range(inserted_window_dims, _rank(operand), "scatter",
"inserted_window_dims")
# Validate window_size
window_size = len(update_window_dims) + len(inserted_window_dims)
if _rank(operand) != window_size:
raise TypeError(f"Scatter op has window of size {window_size}; doesn't "
f"match operand of rank {_rank(operand)}.")
# Validate scatter_dims_to_operand_dims
if (len(scatter_dims_to_operand_dims) !=
scatter_indices.shape[index_vector_dim]):
raise TypeError(f"Scatter op has {len(scatter_dims_to_operand_dims)} "
f"elements in scatter_dims_to_operand_dims and the bound "
f"of dimension index_vector_dim={index_vector_dim} of "
f"scatter_indices is "
f"{scatter_indices.shape[index_vector_dim]}. These two "
f"numbers must be equal")
for i in range(len(scatter_dims_to_operand_dims)):
dim = scatter_dims_to_operand_dims[i]
if dim < 0 or dim >= _rank(operand):
raise TypeError(f"Invalid scatter_dims_to_operand_dims mapping; domain "
f"is [0, {_rank(operand)}), got: {i}->{dim}.")
_no_duplicate_dims(scatter_dims_to_operand_dims, "scatter",
"scatter_dims_to_operand_dims")
max_update_slice_sizes = [operand.shape[i] for i in range(len(operand.shape))
if not i in set(inserted_window_dims)]
for i in range(len(update_window_dims)):
update_window_dim = update_window_dims[i]
if updates.shape[update_window_dim] > max_update_slice_sizes[i]:
raise TypeError(f"Bounds of the window dimensions of updates must not "
f"exceed the bounds of the corresponding dimensions of "
f"operand. For dimension {update_window_dim}, updates "
f"bound is {updates.shape[update_window_dim]}, operand "
f"bound is {max_update_slice_sizes[i]}.")
update_scatter_dims = [dim for dim in range(_rank(updates)) if dim not in
set(update_window_dims)]
scatter_dims_seen = 0
for i in update_scatter_dims:
if scatter_dims_seen == index_vector_dim:
scatter_dims_seen += 1
if updates.shape[i] != expanded_scatter_indices_shape[scatter_dims_seen]:
raise TypeError(f"Bounds of the scatter dimensions of updates must be "
f"the same as the bounds of the corresponding dimensions "
f"of scatter indices. For scatter dimension {i}, updates "
f"bound is {updates.shape[i]}, scatter_indices bound is "
f"{expanded_scatter_indices_shape[scatter_dims_seen]}.")
scatter_dims_seen += 1
return operand.shape
def _scatter_translation_rule(c, operand, scatter_indices, updates, *,
update_jaxpr, update_consts, dimension_numbers,
indices_are_sorted, unique_indices):
dtype = c.get_shape(operand).numpy_dtype()
init_value = xb.constant(c, np.array(0, dtype))
update_computation = _reduction_computation(
c, update_jaxpr, update_consts, init_value)
indices_shape = c.get_shape(scatter_indices)
return xops.Scatter(operand, scatter_indices, updates, update_computation,
_scatter_dimensions_proto(indices_shape, dimension_numbers),
indices_are_sorted, unique_indices)
def _scatter_add_translation_rule(
c, operand, scatter_indices, updates, *, update_jaxpr, update_consts,
dimension_numbers, indices_are_sorted, unique_indices,
expand_complex128=False):
dtype = c.get_shape(operand).numpy_dtype()
scatter_dims = _scatter_dimensions_proto(c.get_shape(scatter_indices),
dimension_numbers)
def _make_reducer(dtype):
subc = xla_bridge.make_computation_builder("scatter_add_reducer")
shape = xc.Shape.array_shape(np.dtype(dtype), ())
args = [xb.parameter(subc, 0, shape), xb.parameter(subc, 1, shape)]
out = xops.Add(args[0], args[1])
return subc.build(out)
if expand_complex128 and dtype == np.complex128:
update_computation = _make_reducer(np.float64)
re = xops.Scatter(xops.Real(operand), scatter_indices, xops.Real(updates),
update_computation, scatter_dims, indices_are_sorted,
unique_indices)
im = xops.Scatter(xops.Imag(operand), scatter_indices, xops.Imag(updates),
update_computation, scatter_dims, indices_are_sorted,
unique_indices)
return xops.Complex(re, im)
else:
update_computation = _make_reducer(dtype)
return xops.Scatter(operand, scatter_indices, updates, update_computation,
scatter_dims, indices_are_sorted, unique_indices)
def _scatter_add_jvp(primals, tangents, *, update_jaxpr, update_consts,
dimension_numbers, indices_are_sorted, unique_indices):
operand, scatter_indices, updates = primals
g_operand, g_scatter_indices, g_updates = tangents
val_out = scatter_add_p.bind(
operand, scatter_indices, updates, update_jaxpr=update_jaxpr,
update_consts=update_consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
if type(g_operand) is ad_util.Zero and type(g_updates) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
g_operand = ad.instantiate_zeros(g_operand)
g_updates = ad.instantiate_zeros(g_updates)
tangent_out = scatter_add_p.bind(
g_operand, scatter_indices, g_updates, update_jaxpr=update_jaxpr,
update_consts=update_consts, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
return val_out, tangent_out
def _scatter_add_transpose_rule(t, operand, scatter_indices, updates, *,
update_jaxpr, update_consts, dimension_numbers,
indices_are_sorted, unique_indices):
assert not ad.is_undefined_primal(scatter_indices)
if ad.is_undefined_primal(updates):
updates_shape = updates.aval.shape
else:
updates_shape = updates.shape
if type(t) is ad_util.Zero:
return ad_util.Zero
operand_t = update_t = None
if ad.is_undefined_primal(operand):
operand_t = t
if ad.is_undefined_primal(updates):
gather_dnums = GatherDimensionNumbers(
offset_dims=dimension_numbers.update_window_dims,
collapsed_slice_dims=dimension_numbers.inserted_window_dims,
start_index_map=dimension_numbers.scatter_dims_to_operand_dims)
slice_sizes = []
pos = 0
for i in range(len(t.shape)):
if i in dimension_numbers.inserted_window_dims:
slice_sizes.append(1)
else:
slice_sizes.append(updates_shape[dimension_numbers.update_window_dims[pos]])
pos += 1
update_t = gather(t, scatter_indices, dimension_numbers=gather_dnums,
slice_sizes=slice_sizes)
return [operand_t, None, update_t]
def _scatter_mul_transpose_rule(t, operand, scatter_indices, updates, *,
update_jaxpr, update_consts, dimension_numbers,
indices_are_sorted, unique_indices):
assert not ad.is_undefined_primal(scatter_indices)
if ad.is_undefined_primal(updates):
updates_shape = updates.aval.shape
else:
updates_shape = updates.shape
if type(t) is ad_util.Zero:
return ad_util.Zero
operand_t = update_t = None
if ad.is_undefined_primal(operand):
operand_t = scatter_mul(
t, scatter_indices, updates, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
if ad.is_undefined_primal(updates):
gather_dnums = GatherDimensionNumbers(
offset_dims=dimension_numbers.update_window_dims,
collapsed_slice_dims=dimension_numbers.inserted_window_dims,
start_index_map=dimension_numbers.scatter_dims_to_operand_dims)
slice_sizes = []
pos = 0
for i in range(len(t.shape)):
if i in dimension_numbers.inserted_window_dims:
slice_sizes.append(1)
else:
slice_sizes.append(updates_shape[dimension_numbers.update_window_dims[pos]])
pos += 1
update_t = gather(mul(t, operand), scatter_indices,
dimension_numbers=gather_dnums, slice_sizes=slice_sizes)
return [operand_t, None, update_t]
def _scatter_batching_rule(scatter_op, batched_args, batch_dims, *,
update_jaxpr, update_consts, dimension_numbers,
indices_are_sorted, unique_indices):
operand, scatter_indices, updates = batched_args
operand_bdim, scatter_indices_bdim, updates_bdim = batch_dims
del update_jaxpr, update_consts # Unused.
# move the operand batch dim to the front if it is not None, otherwise create
# it at the front (so that we can scatter into it)
size = next(x.shape[ax] for x, ax in zip(batched_args, batch_dims)
if ax is not None)
operand = batching.bdim_at_front(operand, operand_bdim, size)
operand_bdim = 0
updates = batching.bdim_at_front(updates, updates_bdim, size)
if scatter_indices_bdim is None:
inserted_window_dims = tuple(np.add(1, dimension_numbers.inserted_window_dims))
update_window_dims = (0,) + tuple(np.add(1, dimension_numbers.update_window_dims))
scatter_dims_to_operand_dims = tuple(np.add(1, dimension_numbers.scatter_dims_to_operand_dims))
dnums = ScatterDimensionNumbers(
update_window_dims=update_window_dims,
inserted_window_dims=inserted_window_dims,
scatter_dims_to_operand_dims=scatter_dims_to_operand_dims)
return scatter_op(
operand, scatter_indices, updates, dnums,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices), 0
# see the third case in _gather_batching_rule for comparison and comments
scatter_indices = batching.bdim_at_front(
scatter_indices, scatter_indices_bdim, size)
count_shape = list(scatter_indices.shape)
count_shape[-1] = 1
counts = broadcasted_iota(scatter_indices.dtype, tuple(count_shape), 0)
scatter_indices = concatenate([counts, scatter_indices],
len(count_shape) - 1)
update_window_dims = tuple(np.add(1, dimension_numbers.update_window_dims))
inserted_window_dims = (0,) + tuple(np.add(1, dimension_numbers.inserted_window_dims))
scatter_dims_to_operand_dims = (0,) + tuple(np.add(1, dimension_numbers.scatter_dims_to_operand_dims))
dnums = ScatterDimensionNumbers(
update_window_dims=update_window_dims,
inserted_window_dims=inserted_window_dims,
scatter_dims_to_operand_dims=scatter_dims_to_operand_dims)
return scatter_op(
operand, scatter_indices, updates, dnums,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices), 0
scatter_add_p = standard_primitive(
_scatter_shape_rule, _scatter_dtype_rule, 'scatter-add',
_scatter_add_translation_rule)
ad.primitive_jvps[scatter_add_p] = _scatter_add_jvp
ad.primitive_transposes[scatter_add_p] = _scatter_add_transpose_rule
batching.primitive_batchers[scatter_add_p] = (
partial(_scatter_batching_rule, scatter_add))
xla.backend_specific_translations['gpu'][scatter_add_p] = partial(
_scatter_add_translation_rule, expand_complex128=True)
scatter_mul_p = standard_primitive(
_scatter_shape_rule, _scatter_dtype_rule, 'scatter-mul',
_scatter_translation_rule)
def _scatter_mul_jvp_rhs(g, x, i, y, *, dimension_numbers,
indices_are_sorted, unique_indices, **kw):
return mul(x, scatter_add(
zeros_like_array(x), i, g, dimension_numbers=dimension_numbers,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices))
ad.defjvp(scatter_mul_p,
lambda g, x, i, y, **kw: scatter_mul_p.bind(g, i, y, **kw),
None,
_scatter_mul_jvp_rhs)
ad.primitive_transposes[scatter_mul_p] = _scatter_mul_transpose_rule
batching.primitive_batchers[scatter_mul_p] = (
partial(_scatter_batching_rule, scatter_mul))
def _scatter_extremal_jvp(scatter_op, primals, tangents, update_jaxpr,
update_consts, dimension_numbers,
indices_are_sorted, unique_indices):
operand, scatter_indices, updates = primals
g_operand, g_scatter_indices, g_updates = tangents
scatter_dnums = dimension_numbers
updates_shape = updates.shape
val_out = scatter_op.bind(
operand, scatter_indices, updates, update_jaxpr=update_jaxpr,
update_consts=update_consts, dimension_numbers=scatter_dnums,
indices_are_sorted=indices_are_sorted,
unique_indices=unique_indices)
if type(g_operand) is ad_util.Zero and type(g_updates) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
g_operand = ad.instantiate_zeros(g_operand)
g_updates = ad.instantiate_zeros(g_updates)
# gather_dnums and slice_sizes define the gather op that is the inverse of
# the scatter op specified by scatter_dnums
gather_dnums = GatherDimensionNumbers(
offset_dims=scatter_dnums.update_window_dims,
collapsed_slice_dims=scatter_dnums.inserted_window_dims,
start_index_map=scatter_dnums.scatter_dims_to_operand_dims)
slice_sizes = []
pos = 0
for i in range(len(operand.shape)):
if i in scatter_dnums.inserted_window_dims:
slice_sizes.append(1)
else:
slice_sizes.append(updates_shape[scatter_dnums.update_window_dims[pos]])
pos += 1
# For consistency with other max operations, if there are two or more values
# in updates that are contending to replace the same index location, the
# resulting tangent at that location will be the average of the associated
# tangents for the values in updates.
initial_vals = gather(
operand, scatter_indices, gather_dnums, np.array(slice_sizes))
target_vals = gather(
val_out, scatter_indices, gather_dnums, np.array(slice_sizes))
successful_updates = (updates == target_vals)
retained_values = (initial_vals == target_vals)
num_updates = gather(
scatter_add(_zeros(operand),
scatter_indices,
select(successful_updates, _ones(updates), _zeros(updates)),
scatter_dnums),
scatter_indices,
gather_dnums,
np.array(slice_sizes))
num_refs = gather(
scatter_add(_zeros(operand),
scatter_indices,
_ones(updates),
scatter_dnums),
scatter_indices,
gather_dnums,
np.array(slice_sizes))
updates_normalizer = select(retained_values,
1.0 / (num_updates + 1),
1.0 / num_updates)
updates_coef = select(successful_updates,
updates_normalizer,
_zeros(updates))
operand_normalizer = select(retained_values,
1.0 / (num_updates + 1),
_zeros(num_updates))
operand_coef = (-1.0 + operand_normalizer) / num_refs
# This can be simplified once scatter has transpose implemented
target_tangents = gather(
g_operand, scatter_indices, gather_dnums, np.array(slice_sizes))
tangent_updates = (target_tangents * operand_coef +
g_updates * updates_coef)
tangent_out = scatter_add(g_operand,
scatter_indices,
tangent_updates,
scatter_dnums,
indices_are_sorted=indices_are_sorted,
unique_indices=unique_indices)
return val_out, tangent_out
scatter_min_p = standard_primitive(
_scatter_shape_rule, _scatter_dtype_rule, 'scatter-min',
_scatter_translation_rule)
batching.primitive_batchers[scatter_min_p] = (
partial(_scatter_batching_rule, scatter_min))
ad.primitive_jvps[scatter_min_p] = partial(_scatter_extremal_jvp, scatter_min_p)
scatter_max_p = standard_primitive(
_scatter_shape_rule, _scatter_dtype_rule, 'scatter-max',
_scatter_translation_rule)
batching.primitive_batchers[scatter_max_p] = (
partial(_scatter_batching_rule, scatter_max))
ad.primitive_jvps[scatter_max_p] = partial(_scatter_extremal_jvp, scatter_max_p)
def _scatter_jvp(primals, tangents, *, update_jaxpr, update_consts,
dimension_numbers, indices_are_sorted, unique_indices):
operand, scatter_indices, updates = primals
g_operand, g_scatter_indices, g_updates = tangents
dnums = dimension_numbers
if type(g_operand) is ad_util.Zero and type(g_updates) is ad_util.Zero:
val_out = scatter_p.bind(
operand, scatter_indices, updates, update_jaxpr=update_jaxpr,
update_consts=update_consts, dimension_numbers=dnums,
indices_are_sorted=indices_are_sorted, unique_indices=unique_indices)
return val_out, ad_util.Zero.from_value(val_out)
g_operand = ad.instantiate_zeros(g_operand)
g_updates = ad.instantiate_zeros(g_updates)
# If there are overlapping indices in the scatter, it is unspecified which
# update "wins". So we use the following perhaps surprising scheme:
# a) attach a positive ID to each update in updates, and perform the scatter
# on the IDs
# b) perform the inverse gather on the scattered IDs (similar to
# _scatter_add_transpose).
# c) use the gathered IDs to mask the primal and tangent values.
# d) perform a scatter-add on the masked primal and tangent values. A benefit
# of using scatter-add here is that we don't need a `scatter` transpose
# rule.
# a) attach a positive ID to each update in `updates`, and perform a scatter
# on the IDs.
ids_shape = np.array(updates.shape, dtype=np.int64)
ids_shape[dnums.update_window_dims,] = 1
num_ids = np.prod(ids_shape)
id_dtype = np.uint32 if (num_ids + 1) < np.iinfo(np.uint32).max else np.uint64
update_ids = add(reshape(iota(id_dtype, num_ids), ids_shape),
_ones(updates, dtype=id_dtype))
scattered_ids = scatter(full(operand.shape, 0, id_dtype),
scatter_indices, update_ids, dnums,
indices_are_sorted=indices_are_sorted,
unique_indices=unique_indices)
# b) compute the inverse gather that "undoes" the scatter on the id values.
gather_dnums = GatherDimensionNumbers(
offset_dims=dnums.update_window_dims,
collapsed_slice_dims=dnums.inserted_window_dims,
start_index_map=dnums.scatter_dims_to_operand_dims)
slice_sizes = []
pos = 0
for i in range(len(scattered_ids.shape)):
if i in dnums.inserted_window_dims:
slice_sizes.append(1)
else:
slice_sizes.append(updates.shape[dnums.update_window_dims[pos]])
pos += 1
gathered_update_ids = gather(scattered_ids, scatter_indices,
dimension_numbers=gather_dnums,
slice_sizes=slice_sizes)
# c) mask off input elements that do not correspond to a primal output.
masked_operand = select(eq(scattered_ids, _zeros(scattered_ids)),
operand, _zeros(operand))
masked_updates = select(eq(update_ids, gathered_update_ids),
updates, _zeros(updates))
masked_g_operand = select(eq(scattered_ids, _zeros(scattered_ids)),
g_operand, _zeros(g_operand))
masked_g_updates = select(eq(update_ids, gathered_update_ids),
g_updates, _zeros(g_updates))
# d) perform scatter-adds to compute the primal and tangent outputs.
val_out = scatter_add(masked_operand, scatter_indices, masked_updates,
dimension_numbers=dnums,
indices_are_sorted=indices_are_sorted,
unique_indices=unique_indices)
tangent_out = scatter_add(masked_g_operand, scatter_indices, masked_g_updates,
dimension_numbers=dnums,
indices_are_sorted=indices_are_sorted,
unique_indices=unique_indices)
return val_out, tangent_out
scatter_p = standard_primitive(
_scatter_shape_rule, _scatter_dtype_rule, 'scatter',
_scatter_translation_rule)
ad.primitive_jvps[scatter_p] = _scatter_jvp
batching.primitive_batchers[scatter_p] = (
partial(_scatter_batching_rule, scatter))
def _reduce_shape_rule(*args, computation, jaxpr, consts, dimensions):
operand_args, init_value_args = split_list(args, [len(args) // 2])
if any(arg.shape != () for arg in init_value_args):
init_value_shapes = [a.shape for a in init_value_args]
raise ValueError(f'Found non-scalar init_value: {init_value_shapes}')
return [
tuple(np.delete(op_arg.shape, dimensions))
for op_arg in operand_args
]
def _reduce_dtype_rule(*args, computation, jaxpr, consts, dimensions):
_, init_value_args = split_list(args, [len(args) // 2])
return [
dtypes.canonicalize_dtype(in_arg.dtype)
for in_arg in init_value_args
]
def _reduce_translation_rule(c, *values, computation, jaxpr,
consts, dimensions):
operands, init_values = split_list(values, [len(values) // 2])
if len(operands) == 1:
init_value = init_values[0]
xla_computation = _reduction_computation(c, jaxpr, consts, init_value)
out = xops.Reduce(c, operands, init_values, xla_computation, dimensions)
return xops.Tuple(c, (out,))
xla_computation = _reduction_computation(c, jaxpr, consts, init_values, singleton=False)
return xops.Reduce(c, operands, init_values, xla_computation, dimensions)
def _reduce_batch_rule(batched_args, batch_dims, *, computation, jaxpr,
consts, dimensions):
num_operands = len(batched_args) // 2
operands, init_values = split_list(batched_args, [num_operands])
operand_bdims, init_value_bdims = split_list(batch_dims, [num_operands])
if all(init_value_bdim is None for init_value_bdim in init_value_bdims):
# Assume all batch dims are the same for each of the operands
assert all(operand_bdim is not None for operand_bdim in operand_bdims)
assert all(operand_bdim == operand_bdims[0] for operand_bdim in operand_bdims)
# TODO(sharadmv): handle the case when batch dims are different across
# operands or when some are unbatched
operand_bdim = operand_bdims[0]
new_dimensions = [d + bool(d >= operand_bdim) for d in dimensions]
new_operand_bdim = operand_bdim - int(np.sum(np.less(dimensions, operand_bdim)))
new_operand_bdims = [new_operand_bdim] * num_operands
return reduce_p.bind(*(operands + init_values),
computation=computation, dimensions=tuple(new_dimensions),
consts=consts,
jaxpr=jaxpr), new_operand_bdims
else:
raise NotImplementedError # loop and stack
def _reduction_computation(c, jaxpr, consts, init_values, singleton=True):
if singleton:
init_values = [init_values]
shapes = safe_map(c.get_shape, init_values + init_values)
axis_env = xla.AxisEnv(1, (), ()) # no parallel primitives inside reductions
subc = xla_bridge.make_computation_builder("reduction_computation")
assert len(consts) == 0, "Reduction computations cannot have constants"
args = [xb.parameter(subc, i, shape) for i, shape in enumerate(shapes)]
out_nodes = xla.jaxpr_subcomp(subc, jaxpr, None, axis_env, consts, '', *args)
if singleton:
return subc.build(out_nodes[0])
out_nodes = xops.Tuple(subc, out_nodes)
return subc.build(out_nodes)
def _masking_defreducer(prim, identity):
masking.masking_rules[prim] = partial(_reducer_masking_rule, prim, identity)
def _reducer_masking_rule(prim, identity, padded_vals, logical_shapes,
axes, input_shape=None, **reduce_kwargs):
(padded_val,), (logical_shape,) = padded_vals, logical_shapes
padded_shape = masking.padded_shape_as_value(padded_val.shape)
masks = [broadcasted_iota(np.int32, padded_shape, i) < d
for i, d in enumerate(logical_shape) if i in axes]
mask = _reduce(operator.and_, masks)
masked_val = select(mask, padded_val, identity(padded_shape, padded_val.dtype))
prim_bind = partial(prim.bind, **reduce_kwargs)
bind = prim_bind if input_shape is None else partial(prim_bind, input_shape=padded_shape)
return bind(masked_val, axes=axes)
reduce_p = standard_primitive(_reduce_shape_rule, _reduce_dtype_rule,
'reduce', translation_rule=_reduce_translation_rule,
multiple_results=True)
batching.primitive_batchers[reduce_p] = _reduce_batch_rule
def _reduce_number_dtype_rule(name, operand, *args, **kw):
if not dtypes.issubdtype(operand.dtype, np.number):
raise TypeError("{} does not accept dtype {}. Accepted dtypes are subtypes "
"of number.".format(name, np.dtype(operand.dtype).name))
return dtypes.canonicalize_dtype(operand.dtype)
def _reduce_sum_shape_rule(operand, *, axes):
return _reduce_op_shape_rule(operand, axes=axes)
def _reduce_sum_translation_rule(c, operand, *, axes):
shape = c.get_shape(operand)
dtype = shape.numpy_dtype()
scalar = ShapedArray((), dtype)
return xops.Reduce(c, [operand], [xb.constant(c, np.array(0, dtype))],
xla.primitive_subcomputation(add_p, scalar, scalar),
axes)
def _reduce_sum_transpose_rule(cotangent, operand, *, axes):
assert ad.is_undefined_primal(operand)
input_shape = operand.aval.shape
broadcast_dimensions = tuple(np.delete(np.arange(len(input_shape)), axes))
result = broadcast_in_dim(cotangent, input_shape, broadcast_dimensions)
assert result.shape == input_shape
return [result]
reduce_sum_p = standard_primitive(
_reduce_sum_shape_rule, partial(_reduce_number_dtype_rule, 'reduce_sum'),
'reduce_sum', _reduce_sum_translation_rule)
ad.deflinear2(reduce_sum_p, _reduce_sum_transpose_rule)
batching.defreducer(reduce_sum_p)
_masking_defreducer(reduce_sum_p,
lambda shape, dtype: np.broadcast_to(np.array(0, dtype), shape))
def _reduce_op_shape_rule(operand, *, axes, input_shape=None):
del input_shape # Unused.
if len(axes) != len(set(axes)):
raise ValueError(f"duplicate value in 'axes' of reduction: {axes}")
return tuple(np.delete(operand.shape, axes))
def _reduce_prod_translation_rule(c, operand, *, axes):
dtype = c.get_shape(operand).numpy_dtype()
scalar = ShapedArray((), dtype)
return xops.Reduce(c, [operand], [xb.constant(c, np.array(1, dtype))],
xla.primitive_subcomputation(mul_p, scalar, scalar), axes)
def _reduce_prod_jvp_rule(primals, tangents, *, axes):
operand, = primals
tangent, = tangents
input_shape = np.array(operand.shape)
n = np.prod(input_shape[list(axes)])
non_axes = np.delete(np.arange(len(input_shape)), axes)
# Move the reduced axes to the front, and flatten them to 1D.
permutation = axes + tuple(non_axes)
new_shape = (n,) + tuple(input_shape[non_axes])
operand = reshape(operand, new_shape, permutation)
tangent = reshape(tangent, new_shape, permutation)
def _reduce_prod_tree(x, axis=0):
"""Reduce by repeatedly splitting the array and multiplying."""
while x.shape[axis] > 1:
n = x.shape[axis]
n1 = (n + 1) // 2
n2 = n - n1
x1 = slice_in_dim(x, 0, n1)
x2 = slice_in_dim(x, n1, None)
if n2 != n1:
paddings = [(0, 0, 0)] * len(x.shape)
paddings[axis] = (0, 1, 0)
x2 = pad(x2, _const(x, 1), paddings)
x = x1 * x2
if x.shape[axis] == 0:
return full(input_shape[non_axes], _one(x))
return squeeze(x, (axis,))
return api.jvp(_reduce_prod_tree, (operand,), (tangent,))
reduce_prod_p = standard_primitive(
_reduce_op_shape_rule, partial(_reduce_number_dtype_rule, 'reduce_prod'),
'reduce_prod', _reduce_prod_translation_rule)
ad.primitive_jvps[reduce_prod_p] = _reduce_prod_jvp_rule
batching.defreducer(reduce_prod_p)
_masking_defreducer(reduce_prod_p,
lambda shape, dtype: np.broadcast_to(np.array(1, dtype), shape))
def _reduce_chooser_shape_rule(operand, *, axes):
return tuple(np.delete(operand.shape, axes))
def _reduce_chooser_translation_rule(prim, identity, c, operand, *, axes):
dtype = c.get_shape(operand).numpy_dtype()
scalar = ShapedArray((), dtype)
return xops.Reduce(c, [operand], [xb.constant(c, identity(dtype))],
xla.primitive_subcomputation(prim, scalar, scalar), axes)
def _reduce_chooser_jvp_rule(g, ans, operand, *, axes):
# TODO(mattjj): an alternative is to use variadic reduce to compute the chosen
# locations in a single pass (rather than comparing equality) and use a
# gather, and/or even push along the chosen elements of g (b/112040122)
shape = [1 if i in axes else d for i, d in enumerate(operand.shape)]
location_indicators = convert_element_type(
_eq_meet(operand, reshape(ans, shape)), g.dtype)
counts = _reduce_sum(location_indicators, axes)
return div(_reduce_sum(mul(g, location_indicators), axes), counts)
_reduce_max_translation_rule = partial(_reduce_chooser_translation_rule, max_p,
_get_max_identity)
reduce_max_p = standard_primitive(_reduce_op_shape_rule, _input_dtype,
'reduce_max', _reduce_max_translation_rule)
ad.defjvp2(reduce_max_p, _reduce_chooser_jvp_rule)
batching.defreducer(reduce_max_p)
_masking_defreducer(reduce_max_p,
lambda shape, dtype: np.broadcast_to(np.array(-np.inf, dtype), shape))
_reduce_min_translation_rule = partial(
_reduce_chooser_translation_rule, min_p, _get_min_identity)
reduce_min_p = standard_primitive(_reduce_op_shape_rule, _input_dtype,
'reduce_min', _reduce_min_translation_rule)
ad.defjvp2(reduce_min_p, _reduce_chooser_jvp_rule)
batching.defreducer(reduce_min_p)
_masking_defreducer(reduce_min_p,
lambda shape, dtype: np.broadcast_to(np.array(np.inf, dtype), shape))
def _argminmax_shape_rule(operand, *, axes, index_dtype):
axis, = axes
return tuple(np.delete(operand.shape, axis))
def _argminmax_dtype_rule(operand, *, axes, index_dtype):
if not dtypes.issubdtype(index_dtype, np.integer):
raise TypeError("index_dtype must be an integer type, but got {}"
.format(np.dtype(index_dtype).name))
return index_dtype
def _argminmax_translation_rule(value_comparator, identity,
c, operand, *, axes, index_dtype):
axis, = axes
shape = c.get_shape(operand)
dtype = shape.numpy_dtype()
subc = xb.make_computation_builder("argminmax_comparator")
value_shape = xc.Shape.array_shape(shape.xla_element_type(), ())
index_shape = xc.Shape.array_shape(index_dtype, ())
x_value = xb.parameter(subc, 0, value_shape)
x_index = xb.parameter(subc, 1, index_shape)
y_value = xb.parameter(subc, 2, value_shape)
y_index = xb.parameter(subc, 3, index_shape)
which_value = value_comparator(x_value, y_value)
which_index = xops.Or(which_value, xops.And(xops.Eq(x_value, y_value),
xops.Lt(x_index, y_index)))
xops.Tuple(subc, [xops.Select(which_value, x_value, y_value),
xops.Select(which_index, x_index, y_index)])
comparator = subc.build()
iota_shape = xc.Shape.array_shape(index_dtype, shape.dimensions())
iota = xc.ops.Iota(c, iota_shape, axis)
out = xops.Reduce(
c, [operand, iota],
[xb.constant(c, identity(dtype)),
xb.constant(c, np.array(0, index_dtype))], comparator, [axis])
return xops.GetTupleElement(out, 1)
def _argminmax_gpu_translation_rule(op, a, *, axes, index_dtype):
axis, = axes
idxs = tie_in(a, broadcasted_iota(index_dtype, a.shape, axis))
maxval = np.array(dtypes.iinfo(index_dtype).max, dtype=index_dtype)
maxval = broadcast(tie_in(a, maxval), a.shape)
mask_idxs = select(eq(a, expand_dims(op(a, (axis,)), (axis,))), idxs,
maxval)
return _reduce_min(mask_idxs, (axis,))
_argmin_translation_rule = partial(_argminmax_translation_rule, xops.Lt,
_get_min_identity)
_argmax_translation_rule = partial(_argminmax_translation_rule, xops.Gt,
_get_max_identity)
argmin_p = standard_primitive(_argminmax_shape_rule, _argminmax_dtype_rule,
'argmin', _argmin_translation_rule)
batching.defreducer(argmin_p)
ad.defjvp_zero(argmin_p)
xla.backend_specific_translations['gpu'][argmin_p] = xla.lower_fun(
partial(_argminmax_gpu_translation_rule, _reduce_min),
multiple_results=False)
argmax_p = standard_primitive(_argminmax_shape_rule, _argminmax_dtype_rule,
'argmax', _argmax_translation_rule)
batching.defreducer(argmax_p)
ad.defjvp_zero(argmax_p)
xla.backend_specific_translations['gpu'][argmax_p] = xla.lower_fun(
partial(_argminmax_gpu_translation_rule, _reduce_max),
multiple_results=False)
def _reduce_logical_shape_rule(operand, *, axes):
if operand.dtype != np.bool_:
msg = "logical reduction requires operand dtype bool, got {}."
raise TypeError(msg.format(operand.dtype))
return tuple(np.delete(operand.shape, axes))
def _reduce_logical_translation_rule(prim, identity, c, operand, *, axes):
scalar = ShapedArray((), np.bool_)
return xops.Reduce(c, [operand], [xb.constant(c, identity(np.bool_))],
xla.primitive_subcomputation(prim, scalar, scalar), axes)
_reduce_or_translation_rule = partial(_reduce_logical_translation_rule,
or_p, _get_max_identity)
reduce_or_p = standard_primitive(_reduce_logical_shape_rule, _fixed_dtype(np.bool_),
'reduce_or', _reduce_or_translation_rule)
batching.defreducer(reduce_or_p)
_reduce_and_translation_rule = partial(_reduce_logical_translation_rule,
and_p, _get_min_identity)
reduce_and_p = standard_primitive(_reduce_logical_shape_rule, _fixed_dtype(np.bool_),
'reduce_and', _reduce_and_translation_rule)
batching.defreducer(reduce_and_p)
def _reduce_window_shape_rule(operand, init_value, *, jaxpr, consts,
window_dimensions, window_strides, padding,
base_dilation, window_dilation):
if operand.dtype != init_value.dtype:
msg = ("reduce_window got inconsistent dtypes for operand and init_value: "
" got operand dtype {} and init_value dtype {}.")
raise TypeError(msg.format(operand.dtype, init_value.dtype))
if init_value.shape != ():
msg = ("reduce_window expected init_value to be a scalar but init_value "
"has shape {}.")
raise TypeError(msg.format(init_value.shape))
return _common_reduce_window_shape_rule(
operand, window_dimensions, window_strides, padding, base_dilation,
window_dilation)
def _reduce_window_translation_rule(c, operand, init_value, *, jaxpr, consts,
window_dimensions, window_strides, padding,
base_dilation, window_dilation):
xla_computation = _reduction_computation(c, jaxpr, consts, init_value)
return xops.ReduceWindowWithGeneralPadding(
operand, init_value, xla_computation, window_dimensions,
window_strides, base_dilation, window_dilation, padding)
def _generic_reduce_window_batch_rule(
batched_args, batch_dims, *, jaxpr, consts, window_dimensions,
window_strides, padding, base_dilation, window_dilation):
operand, init = batched_args
bdim, init_bdim = batch_dims
if init_bdim is not None:
raise NotImplementedError("reduce_window batching is not implemented for "
"initial values")
def reduce_window(x, window_dimensions, window_strides, padding, base_dilation,
window_dilation):
return reduce_window_p.bind(
x, init, jaxpr=jaxpr, consts=consts, window_dimensions=window_dimensions,
window_strides=window_strides, padding=padding, base_dilation=base_dilation,
window_dilation=window_dilation)
return _reduce_window_batch_rule(
reduce_window, (operand,), (bdim,), window_dimensions=window_dimensions,
window_strides=window_strides, padding=padding, base_dilation=base_dilation,
window_dilation=window_dilation)
reduce_window_p = standard_primitive(
_reduce_window_shape_rule, _input_dtype, 'reduce_window',
_reduce_window_translation_rule)
batching.primitive_batchers[reduce_window_p] = _generic_reduce_window_batch_rule
def _reduce_window_sum_shape_rule(operand, *, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
if not dtypes.issubdtype(operand.dtype, np.number):
msg = "operand to reduce_window_sum must have a number dtype, got {}"
raise TypeError(msg.format(np.dtype(operand.dtype).name))
return _common_reduce_window_shape_rule(operand, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
def _reduce_window_sum_translation_rule(c, operand, *, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
dtype = c.get_shape(operand).numpy_dtype()
scalar = ShapedArray((), dtype)
return xops.ReduceWindowWithGeneralPadding(
operand, xb.constant(c, np.array(0, dtype)),
xla.primitive_subcomputation(add_p, scalar, scalar), window_dimensions,
window_strides, base_dilation, window_dilation, padding)
def _reduce_window_sum_transpose_rule(cotangent, operand, *, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
assert ad.is_undefined_primal(operand)
input_shape = operand.aval.shape
pads = _conv_general_vjp_lhs_padding(
input_shape, window_dimensions, window_strides, cotangent.shape, padding,
base_dilation, window_dilation)
ones = [1] * len(input_shape)
padding_config = [(lo, hi, stride - 1)
for (lo, hi), stride in zip(pads, window_strides)]
pad_cotangent = pad(cotangent, _zero(cotangent), padding_config)
result = _reduce_window_sum(pad_cotangent, window_dimensions, base_dilation,
[(0, 0)] * len(input_shape),
base_dilation=ones,
window_dilation=window_dilation)
assert result.shape == input_shape, (result.shape, input_shape)
return [result]
def _reduce_window_batch_rule(reduce_window, batched_args, bdims, *,
window_dimensions, window_strides, padding,
base_dilation, window_dilation):
operand, = batched_args
bdim, = bdims
if bdim is not None:
window_dimensions = \
window_dimensions[:bdim] + (1,) + window_dimensions[bdim:]
window_strides = window_strides[:bdim] + (1,) + window_strides[bdim:]
padding = padding[:bdim] + ((0, 0),) + padding[bdim:]
base_dilation = base_dilation[:bdim] + (1,) + base_dilation[bdim:]
window_dilation = window_dilation[:bdim] + (1,) + window_dilation[bdim:]
operand = reduce_window(operand, window_dimensions, window_strides, padding,
base_dilation, window_dilation)
return operand, bdim
reduce_window_sum_p = standard_primitive(
_reduce_window_sum_shape_rule, _input_dtype, 'reduce_window_sum',
_reduce_window_sum_translation_rule)
ad.deflinear2(reduce_window_sum_p, _reduce_window_sum_transpose_rule)
batching.primitive_batchers[reduce_window_sum_p] = partial(
_reduce_window_batch_rule, _reduce_window_sum)
def _reduce_window_chooser_translation_rule(
prim, identity, c, operand, *, window_dimensions, window_strides, padding,
base_dilation, window_dilation):
dtype = c.get_shape(operand).numpy_dtype()
scalar = ShapedArray((), dtype)
return xops.ReduceWindowWithGeneralPadding(
operand, xb.constant(c, identity(dtype)),
xla.primitive_subcomputation(prim, scalar, scalar), window_dimensions,
window_strides, base_dilation, window_dilation, padding)
def _reduce_window_chooser_jvp_rule(prim, g, operand, *, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
assert prim is max_p or prim is min_p
select_prim = ge_p if prim is max_p else le_p
return _select_and_gather_add(g, operand, select_prim, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
def _common_reduce_window_shape_rule(operand, window_dimensions,
window_strides, padding, base_dilation,
window_dilation):
_check_shapelike("reduce_window", "window_dimensions", window_dimensions,
non_zero_shape=True)
_check_shapelike("reduce_window", "window_strides", window_strides,
non_zero_shape=True)
_check_shapelike("reduce_window", "base_dilation", base_dilation)
_check_shapelike("reduce_window", "window_dilation", window_dilation)
if operand.ndim != len(window_dimensions):
msg = ("reduce_window got the wrong number of window_dimensions for "
"operand: got operand shape {} with window_dimensions {}.")
raise TypeError(msg.format(operand.shape, window_dimensions))
if len(window_strides) != len(window_dimensions):
msg = ("reduce_window got inconsistent window_strides and "
"window_dimensions: got window_strides {} and window_dimensions {}.")
raise TypeError(msg.format(window_strides, window_dimensions))
if len(base_dilation) != len(window_dimensions):
msg = ("reduce_window got inconsistent base_dilation and "
"window_dimensions: got base_dilation {} and window_dimensions {}.")
raise TypeError(msg.format(base_dilation, window_dimensions))
if len(window_dilation) != len(window_dimensions):
msg = ("reduce_window got inconsistent window_dilation and "
"window_dimensions: got window_dilation {} and window_dimensions "
"{}.")
raise TypeError(msg.format(window_dilation, window_dimensions))
return reduce_window_shape_tuple(operand.shape, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
def reduce_window_shape_tuple(operand_shape, window_dimensions, window_strides,
padding, base_dilation=None,
window_dilation=None):
if base_dilation is not None:
operand_shape = _dilate_shape(operand_shape, base_dilation)
if window_dilation is not None:
window_dimensions = _dilate_shape(window_dimensions, window_dilation)
operand_padded = np.add(operand_shape, np.add(*zip(*padding)))
t = np.floor_divide(
np.subtract(operand_padded, window_dimensions), window_strides) + 1
return tuple(t)
_reduce_window_max_translation_rule = partial(
_reduce_window_chooser_translation_rule, max_p, _get_max_identity)
reduce_window_max_p = standard_primitive(
_common_reduce_window_shape_rule, _input_dtype, 'reduce_window_max',
_reduce_window_max_translation_rule)
ad.defjvp(reduce_window_max_p, partial(_reduce_window_chooser_jvp_rule, max_p))
batching.primitive_batchers[reduce_window_max_p] = partial(
_reduce_window_batch_rule, _reduce_window_max)
_reduce_window_min_translation_rule = partial(
_reduce_window_chooser_translation_rule, min_p, _get_min_identity)
reduce_window_min_p = standard_primitive(
_common_reduce_window_shape_rule, _input_dtype, 'reduce_window_min',
_reduce_window_min_translation_rule)
ad.defjvp(reduce_window_min_p, partial(_reduce_window_chooser_jvp_rule, min_p))
_reduce_window_min_batch_rule = partial(_reduce_window_batch_rule,
_reduce_window_min)
batching.primitive_batchers[reduce_window_min_p] = partial(
_reduce_window_batch_rule, _reduce_window_min)
def _select_and_scatter_shape_rule(
operand, source, init_value, *, select_jaxpr, select_consts, scatter_jaxpr,
scatter_consts, window_dimensions, window_strides, padding):
_check_shapelike("select_and_scatter", "window_dimensions", window_dimensions)
_check_shapelike("select_and_scatter", "window_strides", window_strides)
if len(window_dimensions) != len(window_strides):
msg = ("select_and_scatter got inconsistent window_strides and "
"window_dimensions: got window_strides {} and window_dimensions {}.")
raise TypeError(msg.format(window_strides, window_dimensions))
return operand.shape
def _select_and_scatter_translation(
c, operand, source, init_value, *, select_jaxpr, select_consts, scatter_jaxpr,
scatter_consts, window_dimensions, window_strides, padding):
select = _reduction_computation(c, select_jaxpr, select_consts, init_value)
scatter = _reduction_computation(c, scatter_jaxpr, scatter_consts, init_value)
return xops.SelectAndScatterWithGeneralPadding(
operand, select, window_dimensions, window_strides, padding, source,
init_value, scatter)
select_and_scatter_p = standard_primitive(
_select_and_scatter_shape_rule, _input_dtype, 'select_and_scatter',
_select_and_scatter_translation)
def _select_and_scatter_add_shape_rule(
source, operand, *, select_prim, window_dimensions, window_strides,
padding):
return operand.shape
def _select_and_scatter_add_translation(
c, source, operand, *, select_prim, window_dimensions, window_strides,
padding):
dtype = c.get_shape(operand).numpy_dtype()
scalar = ShapedArray((), dtype)
select = xla.primitive_subcomputation(select_prim, scalar, scalar)
scatter = xla.primitive_subcomputation(add_p, scalar, scalar)
zero = xb.constant(c, np.array(0, dtype))
return xops.SelectAndScatterWithGeneralPadding(
operand, select, window_dimensions, window_strides, padding, source, zero,
scatter)
def _select_and_scatter_add_jvp(
primals, tangents, *, select_prim, window_dimensions, window_strides,
padding):
source, operand = primals
g_source, g_operand = tangents
val_out = _select_and_scatter_add(
source, operand, select_prim, window_dimensions, window_strides,
padding)
del g_operand
if type(g_source) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
tangent_out = _select_and_scatter_add(
g_source, operand, select_prim, window_dimensions,
window_strides, padding)
return val_out, tangent_out
def _select_and_scatter_add_transpose(
t, source, operand, *, select_prim, window_dimensions, window_strides,
padding):
assert ad.is_undefined_primal(source) and not ad.is_undefined_primal(operand)
ones = (1,) * len(window_dimensions)
source_t = _select_and_gather_add(t, operand, select_prim, window_dimensions,
window_strides, padding, ones, ones)
return [source_t, None]
def _select_and_scatter_add_batch_rule(
batched_args, batch_dims, *, select_prim, window_dimensions, window_strides,
padding):
source, operand = batched_args
s_bdim, o_bdim = batch_dims
size = next(a.shape[bdim] for a, bdim in zip(batched_args, batch_dims)
if bdim is not None)
source = batching.bdim_at_front(source, s_bdim, size)
operand = batching.bdim_at_front(operand, o_bdim, size)
window_dimensions = (1,) + window_dimensions
window_strides = (1,) + window_strides
padding = ((0, 0),) + padding
out = _select_and_scatter_add(source, operand, select_prim, window_dimensions,
window_strides, padding)
return out, 0
select_and_scatter_add_p = standard_primitive(
_select_and_scatter_add_shape_rule, _input_dtype, 'select_and_scatter_add',
_select_and_scatter_add_translation)
ad.primitive_transposes[select_and_scatter_add_p] = \
_select_and_scatter_add_transpose
ad.primitive_jvps[select_and_scatter_add_p] = _select_and_scatter_add_jvp
batching.primitive_batchers[select_and_scatter_add_p] = \
_select_and_scatter_add_batch_rule
def _select_and_gather_add_shape_rule(
tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
if tangents.shape != operand.shape:
msg = ("select_and_gather_add tangents and operand shapes must match, "
"got {} and {}.")
raise TypeError(msg.format(tangents.shape, operand.shape))
return _common_reduce_window_shape_rule(
operand, window_dimensions, window_strides, padding, base_dilation,
window_dilation)
_UINT_DTYPES = {
16: np.uint16,
32: np.uint32,
64: np.uint64,
}
_INT_DTYPES = {
16: np.int16,
32: np.int32,
64: np.int64,
}
def _select_and_gather_add_translation(
c, tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation, max_bits=64):
shape = c.get_shape(operand)
dtype = shape.numpy_dtype()
etype = shape.xla_element_type()
nbits = dtypes.finfo(dtype).bits
assert nbits <= max_bits
double_word_reduction = nbits * 2 <= max_bits
const = lambda c, dtype, x: xb.constant(c, np.array(x, dtype=dtype),
canonicalize_types=False)
if double_word_reduction:
# TODO(b/73062247): XLA doesn't yet implement ReduceWindow on tuples, so
# we implement a pair-wise ReduceWindow by packing two k-bit values into
# 2k-bit unsigned integer using bit tricks.
word_dtype = _UINT_DTYPES[nbits]
double_word_dtype = _UINT_DTYPES[nbits * 2]
word_type = xla_client.dtype_to_etype(word_dtype)
double_word_type = xla_client.dtype_to_etype(double_word_dtype)
# Packs two values into a tuple.
def pack(a, b):
a = xops.BitcastConvertType(a, word_type)
b = xops.BitcastConvertType(b, word_type)
a = xops.ConvertElementType(a, double_word_type)
b = xops.ConvertElementType(b, double_word_type)
a = xops.ShiftLeft(a, const(c, double_word_dtype, nbits))
return xops.Or(a, b)
# Unpacks the first element of a tuple.
def fst(c, t):
st = xops.ShiftRightLogical(t, const(c, double_word_dtype, nbits))
return xops.BitcastConvertType(xops.ConvertElementType(st, word_type), etype)
# Unpacks the second element of a tuple.
def snd(t):
return xops.BitcastConvertType(xops.ConvertElementType(t, word_type), etype)
else:
# The double-word trick above only works if we have a sufficiently large
# type. As an alternative, we can pack two half words into a single word,
# at the cost of precision.
# TODO(b/73062247): add support for tuple reductions and remove this case.
warnings.warn("Using reduced precision for gradient of reduce-window "
"min/max operator to work around missing XLA support for "
"pair-reductions. This is likely from a second or "
"higher derivative of a max-pooling operation.")
r_nbits = nbits // 2
# Drop/round the bottom mantissa bits.
nexp = dtypes.finfo(dtype).nexp
nmant = r_nbits - nexp - 1
double_word_dtype = word_dtype = _UINT_DTYPES[nbits]
word_type = xla_client.dtype_to_etype(word_dtype)
# Packs two values into a tuple.
def pack(a, b):
a = xops.ReducePrecision(a, exponent_bits=nexp, mantissa_bits=nmant)
b = xops.ReducePrecision(b, exponent_bits=nexp, mantissa_bits=nmant)
a = xops.BitcastConvertType(a, word_type)
b = xops.BitcastConvertType(b, word_type)
b = xops.ShiftRightLogical(b, const(c, word_dtype, r_nbits))
return xops.Or(a, b)
# Unpacks the first element of a tuple.
def fst(c, t):
st = xops.And(t, const(c, word_dtype, ((1 << r_nbits) - 1) << r_nbits))
return xops.BitcastConvertType(st, etype)
# Unpacks the second element of a tuple.
def snd(t):
return xops.BitcastConvertType(xops.ShiftLeft(t, const(c, word_dtype, r_nbits)),
etype)
def reducer():
c = xla_bridge.make_computation_builder("select_and_gather_pair_reducer")
x = xb.parameter(c, 0,
xla_client.Shape.array_shape(np.dtype(double_word_dtype), ()))
y = xb.parameter(c, 1,
xla_client.Shape.array_shape(np.dtype(double_word_dtype), ()))
assert select_prim is ge_p or select_prim is le_p
which = xops.Ge if select_prim is ge_p else xops.Le
xops.Select(which(fst(c, x), fst(c, y)), x, y)
return c.build()
assert select_prim is ge_p or select_prim is le_p, select_prim
init = -np.inf if select_prim is ge_p else np.inf
out = xops.ReduceWindowWithGeneralPadding(
pack(operand, tangents), pack(const(c, dtype, init), const(c, dtype, 0)),
reducer(), window_dimensions, window_strides, base_dilation,
window_dilation, padding)
return snd(out)
def _select_and_gather_add_jvp(
primals, tangents, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
source, operand = primals
g_source, g_operand = tangents
val_out = _select_and_gather_add(
source, operand, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation)
del g_operand
if type(g_source) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(val_out)
else:
tangent_out = _select_and_gather_add(
g_source, operand, select_prim, window_dimensions,
window_strides, padding, base_dilation, window_dilation)
return val_out, tangent_out
def _select_and_gather_add_transpose(
t, tangents, operand, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
assert select_prim in (le_p, ge_p)
assert ad.is_undefined_primal(tangents) and not ad.is_undefined_primal(operand)
if any(d != 1 for d in window_dilation):
msg = ("VJP not implemented for select_and_gather (MaxPool) with window "
"dilation, got window_dilation={}.")
raise NotImplementedError(msg.format(window_dilation))
if type(t) is ad_util.Zero:
return [ad_util.Zero, None]
has_base_dilation = any(d != 1 for d in base_dilation)
if has_base_dilation:
select_identity = (_get_max_identity if select_prim is ge_p
else _get_min_identity)
operand = pad(operand, select_identity(operand.dtype),
tuple((0, 0, d - 1) for d in base_dilation))
result = _select_and_scatter_add(t, operand, select_prim, window_dimensions,
window_strides, padding)
if has_base_dilation:
result = slice(operand, (0,) * len(operand.shape), operand.shape,
base_dilation)
return [result, None]
def _select_and_gather_add_batching_rule(
batched_args, batch_dims, *, select_prim, window_dimensions, window_strides,
padding, base_dilation, window_dilation):
t, x = batched_args
t_bdim, x_bdim = batch_dims
size = next(a.shape[bdim] for a, bdim in zip(batched_args, batch_dims)
if bdim is not None)
t = batching.bdim_at_front(t, t_bdim, size)
x = batching.bdim_at_front(x, x_bdim, size)
window_dimensions = (1,) + window_dimensions
window_strides = (1,) + window_strides
padding = ((0, 0),) + padding
base_dilation = (1,) + base_dilation
window_dilation = (1,) + window_dilation
out = _select_and_gather_add(t, x, select_prim, window_dimensions,
window_strides, padding, base_dilation,
window_dilation)
return (out, 0)
select_and_gather_add_p = standard_primitive(
_select_and_gather_add_shape_rule, _input_dtype, 'select_and_gather_add',
_select_and_gather_add_translation)
ad.primitive_jvps[select_and_gather_add_p] = _select_and_gather_add_jvp
ad.primitive_transposes[select_and_gather_add_p] = \
_select_and_gather_add_transpose
batching.primitive_batchers[select_and_gather_add_p] = \
_select_and_gather_add_batching_rule
xla.backend_specific_translations['tpu'][select_and_gather_add_p] = partial(
_select_and_gather_add_translation,
max_bits=32)
def _sort_abstract_eval(*args, **kwargs):
args = tuple(raise_to_shaped(arg) for arg in args)
if any(arg.shape != args[0].shape for arg in args[1:]):
shapes = " ".join(str(a.shape) for a in args)
raise TypeError(f"Arguments to sort must have equal shapes, got: {shapes}")
return args
def _float_to_int_for_sort(x):
# Switch from a floating point value to a integer value in such a way that
# when using the integer value to compare, we get the same result for normal
# values, and -nan is treated as the smallest value, and nan is treated as
# the largest value.
# If f is a float, and
# x = bit_cast<int32>(f);
# y = x < 0 ? int32_max - x : x;
# then y is ordered as an int32 such that finite values have the obvious
# order, -0 is ordered before 0, and -NaN and NaN appear at the beginning
# and end of the ordering.
# Note that in order to avoid -x to overflow, we calculate
# int32_max - x as unsigned, and then convert back to signed.
if x.dtype == dtypes.bfloat16:
x = convert_element_type(x, np.float32)
nbits = np.finfo(x).bits
signed_dtype = _INT_DTYPES[nbits]
unsigned_dtype = _UINT_DTYPES[nbits]
signed = bitcast_convert_type(x, signed_dtype)
unsigned = bitcast_convert_type(x, unsigned_dtype)
flipped = bitcast_convert_type(
sub(unsigned_dtype(np.iinfo(signed_dtype).max), unsigned), signed_dtype)
return select(lt(signed, _zero(signed)), flipped, signed)
# Default comparator that sorts the operands lexicographically on the
# first `num_keys` arguments.
# For floating point types, a total order is created where
# -NaN < -infinity < ... < -0 < 0 < ... < infinity < NaN.
# For complex types, the (real, imag) pairs are sorted lexicographically
# (following NumPy's semantics).
# This code adds complex-number support and lexicographic ordering to the algorithm from:
# https://github.com/tensorflow/tensorflow/blob/ba43780830f09da72081fe5061c436f1c6203a92/tensorflow/compiler/xla/client/lib/comparators.h#L33
def _sort_lt_comparator(*operands, num_keys=1):
assert len(operands) >= 2 and len(operands) % 2 == 0, operands
assert len(operands) // 2 >= num_keys, (operands, num_keys)
x_keys, y_keys = [], []
for x, y in zip(operands[:2*num_keys:2], operands[1:2*num_keys:2]):
assert x.dtype == y.dtype, (x.dtype, y.dtype)
if np.issubdtype(x.dtype, np.complexfloating):
x_keys.extend([_float_to_int_for_sort(real(x)), _float_to_int_for_sort(imag(x))])
y_keys.extend([_float_to_int_for_sort(real(y)), _float_to_int_for_sort(imag(y))])
elif np.issubdtype(x.dtype, np.floating):
x_keys.append(_float_to_int_for_sort(x))
y_keys.append(_float_to_int_for_sort(y))
else:
x_keys.append(x)
y_keys.append(y)
p = None
for xk, yk in zip(x_keys[::-1], y_keys[::-1]):
p = (bitwise_or(lt(xk, yk), bitwise_and(eq(xk, yk), p)) if p is not None
else lt(xk, yk))
return p
def _sort_translation_rule(c, *operands, dimension, is_stable, num_keys):
types = [c.get_shape(x).xla_element_type() for x in operands]
subc = xla_bridge.make_computation_builder("sort_lt_comparator")
params = [xb.parameter(subc, 2 * i + j, xc.Shape.array_shape(typ, ()))
for i, typ in enumerate(types) for j in range(2)]
result = xla.lower_fun(partial(_sort_lt_comparator, num_keys=num_keys),
multiple_results=False)(subc, *params)
comparator = subc.build(result)
out = xops.Sort(c, operands, dimension=dimension, is_stable=is_stable,
comparator=comparator)
return out if len(operands) != 1 else xops.Tuple(c, [out])
def _sort_jvp(primals, tangents, *, dimension, is_stable, num_keys):
shape = primals[0].shape
iotas = []
for dim, size in enumerate(shape):
dtype = np.int32 if size < np.iinfo(np.int32).max else np.int64
iotas.append(broadcasted_iota(dtype, shape, dim))
primals = sort_p.bind(*(primals + (iotas[dimension],)), dimension=dimension,
is_stable=is_stable, num_keys=num_keys)
idx = tuple(primals[-1] if i == dimension else iotas[i]
for i in range(len(shape)))
tangents_out = tuple(t if type(t) is ad_util.Zero else t[idx] for t in tangents)
return tuple(primals[:-1]), tangents_out
def _sort_batch_rule(batched_args, batch_dims, *, dimension, is_stable, num_keys):
prototype_arg, new_bdim = next(
(a, b) for a, b in zip(batched_args, batch_dims) if b is not None)
new_args = []
for arg, bdim in zip(batched_args, batch_dims):
if bdim is None:
dims = np.delete(np.arange(prototype_arg.ndim), new_bdim)
new_args.append(broadcast_in_dim(arg, prototype_arg.shape, dims))
else:
new_args.append(batching.moveaxis(arg, bdim, new_bdim))
new_dimension = dimension + (new_bdim <= dimension)
bdims = (new_bdim,) * len(new_args)
return (sort_p.bind(*new_args, dimension=new_dimension, is_stable=is_stable, num_keys=num_keys),
bdims)
sort_p = Primitive('sort')
sort_p.multiple_results = True
sort_p.def_impl(partial(xla.apply_primitive, sort_p))
sort_p.def_abstract_eval(_sort_abstract_eval)
xla.translations[sort_p] = _sort_translation_rule
ad.primitive_jvps[sort_p] = _sort_jvp
batching.primitive_batchers[sort_p] = _sort_batch_rule
def _top_k_abstract_eval(operand, *, k):
if k < 0:
raise ValueError("k argument to top_k must be nonnegative, got {}".format(k))
if len(operand.shape) == 0:
raise TypeError("top_k operand must have >= 1 dimension, got {}"
.format(operand.shape))
shape = list(operand.shape)
if shape[-1] < k:
msg = "k argument to top_k must be no larger than minor dimension; {} vs {}"
raise ValueError(msg.format(k, shape))
shape[-1] = k
return (ShapedArray(shape, operand.dtype),
ShapedArray(shape, np.dtype(np.int32)))
def _top_k_jvp(primals, tangents, *, k):
operand, = primals
tangent, = tangents
primals_out = top_k(operand, k)
if type(tangent) is ad_util.Zero:
tangent_out = ad_util.Zero.from_value(primals_out[0])
else:
_, k_idxs = primals_out
idx_shape = k_idxs.shape
rank = len(idx_shape)
gather_index_shape = idx_shape + (1,)
gather_indices = []
for i in range(rank-1):
_iota = iota(k_idxs.dtype, idx_shape[i])
if not config.omnistaging_enabled:
_iota = tie_in(operand, _iota)
_iota = broadcast_in_dim(_iota, gather_index_shape, (i,))
gather_indices.append(_iota)
gather_indices.append(reshape(k_idxs, gather_index_shape))
gather_indices = concatenate(gather_indices, dimension=rank)
slice_sizes = (1,) * rank
dnums = GatherDimensionNumbers(
offset_dims=(),
collapsed_slice_dims=tuple(range(rank)),
start_index_map=tuple(range(rank)))
tangent_out = gather(tangent, gather_indices, dnums, slice_sizes)
return primals_out, (tangent_out, ad_util.Zero.from_value(primals_out[1]))
def _top_k_batch_rule(batched_args, batch_dims, *, k):
operand, = batched_args
bdim, = batch_dims
if bdim == operand.ndim-1:
perm = np.arange(operand.ndim)
perm[bdim-1], perm[bdim] = perm[bdim], perm[bdim-1]
top_k_v, top_k_i = top_k(transpose(operand, perm), k=k)
return (transpose(top_k_v, perm),
transpose(top_k_i, perm)), (bdim, bdim)
else:
return top_k(operand, k=k), (bdim, bdim)
top_k_p = Primitive('top_k')
top_k_p.multiple_results = True
top_k_p.def_impl(partial(xla.apply_primitive, top_k_p))
top_k_p.def_abstract_eval(_top_k_abstract_eval)
xla.translations[top_k_p] = partial(standard_translate, 'top_k')
ad.primitive_jvps[top_k_p] = _top_k_jvp
batching.primitive_batchers[top_k_p] = _top_k_batch_rule
def _stop_gradient_jvp_rule(primals, tangents):
# if we don't call stop_gradient here, we'd only peel off one autodiff tracer
x, = primals
return stop_gradient(x), ad_util.Zero.from_value(x)
def _stop_gradient_batch_rule(batched_args, batch_dims):
x, = batched_args
dim, = batch_dims
return stop_gradient(x), dim
ad.primitive_jvps[ad_util.stop_gradient_p] = _stop_gradient_jvp_rule
batching.primitive_batchers[ad_util.stop_gradient_p] = _stop_gradient_batch_rule
def create_token(x):
"""Creates an XLA token value with no preconditions for sequencing effects.
Experimental.
Args:
x: a dummy argument used to tie the CreateToken operator into a trace. The
value of `x` is ignored.
"""
# x is a dummy argument used to tie the operator into a trace.
return create_token_p.bind(stop_gradient(x))
create_token_p = Primitive("create_token")
create_token_p.def_impl(partial(xla.apply_primitive, create_token_p))
create_token_p.def_abstract_eval(lambda _: abstract_token)
xla.translations[create_token_p] = lambda c, *_: xops.CreateToken(c)
def after_all(*operands):
"""Merges one or more XLA token values. Experimental.
Wraps the XLA AfterAll operator."""
return after_all_p.bind(*operands)
def _after_all_abstract_eval(*operands):
if any(x is not abstract_token for x in operands):
raise TypeError("Arguments to after_all must be tokens")
return abstract_token
def _after_all_translation_rule(c, *operands):
return xops.AfterAll(c, operands)
after_all_p = Primitive("after_all")
after_all_p.def_impl(partial(xla.apply_primitive, after_all_p))
after_all_p.def_abstract_eval(_after_all_abstract_eval)
xla.translations[after_all_p] = _after_all_translation_rule
def infeed(token, shape=None, partitions=None):
"""Consumes an infeed value of `shape` from the host. Experimental.
`token` is used to sequence infeed and outfeed effects.
`partitions` may be specifed inside a `sharded_jit` function.
"""
flat_shapes, treedef = pytree.flatten(shape)
for shape in flat_shapes:
if not isinstance(shape, ShapedArray):
raise TypeError("shape argument to infeed must be a pytree of "
"ShapedArray values, got {}".format(shape))
if partitions is not None:
# Always replicate token.
# We specifically use type() to raise an error for PartitionSpecs.
if type(partitions) != tuple: # pylint: disable=unidiomatic-typecheck
raise ValueError(f"'partitions' argument to infeed should be a tuple, "
f"got {partitions}")
partitions = partitions + (None,)
xs_and_token = infeed_p.bind(token, shapes=tuple(flat_shapes),
partitions=partitions)
return (treedef.unflatten(xs_and_token[:-1]), xs_and_token[-1])
def _infeed_abstract_eval(token, *, shapes, partitions):
if token is not abstract_token:
raise TypeError("First argument to infeed must be a token")
return shapes + (abstract_token,)
def _infeed_translation_rule(c, token, *, shapes, partitions):
shape = tuple(shape.with_major_to_minor_layout_if_absent()
for x in shapes for shape in xla.aval_to_xla_shapes(x))
build_infeed = partial(xops.InfeedWithToken, token,
xla_client.Shape.tuple_shape(shape))
if partitions:
xs_and_token = xb.with_sharding(c, partitions, build_infeed)
else:
# Note that infeed will default to replication if inside a sharded
# computation and no sharding is specified.
xs_and_token = build_infeed()
xs = xops.GetTupleElement(xs_and_token, 0)
token = xops.GetTupleElement(xs_and_token, 1)
outs = [xops.GetTupleElement(xs, i) for i in range(len(shapes))] + [token]
return xops.Tuple(c, outs)
infeed_p = Primitive("infeed")
infeed_p.multiple_results = True
infeed_p.def_impl(partial(xla.apply_primitive, infeed_p))
infeed_p.def_abstract_eval(_infeed_abstract_eval)
xla.translations[infeed_p] = _infeed_translation_rule
def outfeed(token, xs):
"""Outfeeds value `xs` to the host. Experimental.
`token` is used to sequence infeed and outfeed effects.
"""
flat_xs, _ = pytree.flatten(xs)
return outfeed_p.bind(token, *flat_xs)
def _outfeed_abstract_eval(token, *xs):
if token is not abstract_token:
raise TypeError("First argument to outfeed must be a token")
return abstract_token
def _outfeed_translation_rule(c, token, *xs):
t = xops.Tuple(c, xs)
return xops.OutfeedWithToken(t, token, c.get_shape(t))
outfeed_p = Primitive("outfeed")
outfeed_p.def_impl(partial(xla.apply_primitive, outfeed_p))
outfeed_p.def_abstract_eval(_outfeed_abstract_eval)
xla.translations[outfeed_p] = _outfeed_translation_rule
def rng_uniform(a, b, shape):
"""Stateful PRNG generator. Experimental and its use is discouraged.
Returns uniformly distributed random numbers in the range [a, b)
You should use jax.random for most purposes; this function exists only for
niche use cases with special performance requirements.
This API may be removed at any time.
"""
return rng_uniform_p.bind(a, b, shape=tuple(shape))
def _rng_uniform_abstract_eval(a, b, *, shape):
if a.dtype != b.dtype:
raise ValueError(
"Arguments to rng_uniform must have identical dtypes, got {} "
"and {}.".format(a.dtype, b.dtype))
if a.shape != () or b.shape != ():
raise ValueError(
"Arguments to rng_uniform must be scalars; got shapes {} and {}."
.format(a.shape, b.shape))
return ShapedArray(shape, a.dtype)
def _rng_uniform_translation_rule(c, a, b, *, shape):
xla_shape = xc.Shape.array_shape(c.get_shape(a).xla_element_type(), shape)
return xops.RngUniform(a, b, xla_shape)
rng_uniform_p = Primitive("rng_uniform")
rng_uniform_p.def_impl(partial(xla.apply_primitive, rng_uniform_p))
rng_uniform_p.def_abstract_eval(_rng_uniform_abstract_eval)
xla.translations[rng_uniform_p] = _rng_uniform_translation_rule
def _iota_abstract_eval(*, dtype, shape, dimension):
_check_shapelike("iota", "shape", shape)
if not any(dtypes.issubdtype(dtype, t) for t in _num):
msg = 'iota does not accept dtype {}. Accepted dtypes are subtypes of {}.'
typename = str(np.dtype(dtype).name)
accepted_typenames = (t.__name__ for t in _num)
raise TypeError(msg.format(typename, ', '.join(accepted_typenames)))
if not 0 <= dimension < len(shape):
raise ValueError("iota dimension must be between 0 and len(shape), got "
f"dimension={dimension} for shape {shape}")
return ShapedArray(shape, dtype)
def _iota_translation_rule(c, dtype, shape, dimension):
etype = xla_client.dtype_to_etype(dtype)
xla_shape = xc.Shape.array_shape(etype, shape)
return xops.Iota(c, xla_shape, dimension)
iota_p = Primitive('iota')
iota_p.def_impl(partial(xla.apply_primitive, iota_p))
iota_p.def_abstract_eval(_iota_abstract_eval)
xla.translations[iota_p] = _iota_translation_rule
### util
_ndim = np.ndim
def _dilate_shape(shape, dilation):
"""Utility function for computing the shape resulting from a dilation."""
if not np.all(np.greater(dilation, 0)):
msg = "All dilations must be positive, got {}."
raise TypeError(msg.format(dilation))
dilation = (1,) * (len(shape) - len(dilation)) + tuple(dilation)
return np.where(shape == 0, 0,
np.multiply(dilation, np.subtract(shape, 1)) + 1)
def _ceil_divide(x1, x2):
return -np.floor_divide(np.negative(x1), x2)
def padtype_to_pads(in_shape, window_shape, window_strides, padding):
"""Convert padding string to list of pairs of pad values."""
PaddingType = xla_client.PaddingType
if isinstance(padding, str):
mapping = {'VALID': PaddingType.VALID, 'SAME': PaddingType.SAME}
try:
padding = mapping[padding.upper()]
except KeyError as err:
msg = "Unrecognized padding type: expected 'VALID' or 'SAME', got {}."
raise RuntimeError(msg.format(padding)) from err
if padding == PaddingType.SAME:
out_shape = _ceil_divide(in_shape, window_strides)
pad_sizes = np.maximum(0, (out_shape - 1) * window_strides +
window_shape - in_shape)
return [(pad_size // 2, pad_size - pad_size // 2) for pad_size in pad_sizes]
elif padding == PaddingType.VALID:
return [(0, 0)] * len(in_shape)
else:
msg = "Unknown padding type: {}."
raise TypeError(msg.format(padding))
def _check_same_dtypes(name, ignore_fp_precision, *ttypes):
"""Check that dtypes agree, possibly ignoring float precision."""
# the `ignore_fp_precision` flag exists because the XLA shape inference logic
# allows mixed floating point precision, but the HLO verifier often rejects it
types = list(map(np.dtype, ttypes)) # canonicalize
if ignore_fp_precision:
types = [
np.floating if dtypes.issubdtype(dtype, np.floating)
else np.complexfloating if dtypes.issubdtype(dtype, np.complexfloating)
else dtype for dtype in types]
if len({dtypes.canonicalize_dtype(t) for t in types}) != 1:
if ignore_fp_precision:
msg = ("{} requires arguments to have same dtypes up to floating point "
"precision, got {}.")
else:
msg = "{} requires arguments to have the same dtypes, got {}."
raise TypeError(msg.format(name, ", ".join(map(str, types))))
def _check_conv_shapes(name, lhs_shape, rhs_shape, window_strides):
"""Check that conv shapes are valid and are consistent with window_strides."""
if len(lhs_shape) != len(rhs_shape):
msg = "Arguments to {} must have same rank, got {} and {}."
raise TypeError(msg.format(name, len(lhs_shape), len(rhs_shape)))
if len(lhs_shape) < 2:
msg = "Arguments to {} must have rank at least 2, got {} and {}."
raise TypeError(msg.format(name, len(lhs_shape), len(rhs_shape)))
if lhs_shape[1] != rhs_shape[1]:
msg = "Arguments to {} must agree on input feature size, got {} and {}."
raise TypeError(msg.format(name, lhs_shape[1], rhs_shape[1]))
_check_shapelike(name, "window_strides", window_strides)
if not np.all(np.greater(window_strides, 0)):
msg = "All elements of window_strides must be positive, got {}."
raise TypeError(msg.format(window_strides))
if len(window_strides) != len(lhs_shape) - 2:
msg = "{} window_strides has wrong length: expected {}, got {}."
expected_length = len(lhs_shape) - 2
raise TypeError(msg.format(name, expected_length, len(window_strides)))
def conv_shape_tuple(lhs_shape, rhs_shape, strides, pads, batch_group_count=1):
"""Compute the shape tuple of a conv given input shapes in canonical order."""
if isinstance(pads, str):
pads = padtype_to_pads(lhs_shape[2:], rhs_shape[2:], strides, pads)
if len(pads) != len(lhs_shape) - 2:
msg = "Wrong number of explicit pads for convolution: expected {}, got {}."
raise TypeError(msg.format(len(lhs_shape) - 2, len(pads)))
lhs_padded = np.add(lhs_shape[2:], np.sum(np.array(pads).reshape(-1, 2),
axis=1))
out_space = np.floor_divide(
np.subtract(lhs_padded, rhs_shape[2:]), strides) + 1
out_space = np.maximum(0, out_space)
assert lhs_shape[0] % batch_group_count == 0
out_shape = (lhs_shape[0] // batch_group_count, rhs_shape[0])
return tuple(out_shape + tuple(out_space))
def conv_general_shape_tuple(lhs_shape, rhs_shape, window_strides, padding,
dimension_numbers):
lhs_perm, rhs_perm, out_perm = conv_general_permutations(dimension_numbers)
lhs_trans = np.take(lhs_shape, lhs_perm)
rhs_trans = np.take(rhs_shape, rhs_perm)
out_trans = conv_shape_tuple(lhs_trans, rhs_trans, window_strides, padding)
return tuple(np.take(out_trans, np.argsort(out_perm)))
def conv_transpose_shape_tuple(lhs_shape, rhs_shape, window_strides, padding,
dimension_numbers):
lhs_perm, rhs_perm, out_perm = conv_general_permutations(dimension_numbers)
lhs_trans = np.take(lhs_shape, lhs_perm)
rhs_trans = np.take(rhs_shape, rhs_perm)
if isinstance(padding, str):
padding = [_conv_transpose_padding(k, s, padding)
for k,s in zip(rhs_trans[2:], window_strides)]
padding = list(map(np.sum, padding))
unpad_out_space = [(i-1) * s - k + 2
for i, k, s in zip(lhs_trans[2:],
rhs_trans[2:],
window_strides)]
out_space = np.sum([unpad_out_space, padding], axis=0).tolist()
out_trans = tuple((lhs_trans[0], rhs_trans[0]) + tuple(out_space))
return tuple(np.take(out_trans, np.argsort(out_perm)))
def _check_shapelike(fun_name, arg_name, obj, non_zero_shape=False):
"""Check that `obj` is a shape-like value (e.g. tuple of nonnegative ints)."""
if not isinstance(obj, (tuple, list, np.ndarray)):
msg = "{} {} must be of type tuple/list/ndarray, got {}."
raise TypeError(msg.format(fun_name, arg_name, type(obj)))
# bool(obj) for an ndarray raises an error, so we check len
if not len(obj): # pylint: disable=g-explicit-length-test
return
obj_arr = np.array(obj)
if obj_arr.ndim != 1:
msg = "{} {} must be rank 1, got {}."
raise TypeError(msg.format(obj_arr.ndim))
try:
canonicalize_shape(obj_arr)
except TypeError as err:
msg = "{} {} must have every element be an integer type, got {}."
raise TypeError(msg.format(fun_name, arg_name, tuple(map(type, obj)))) from err
lower_bound, bound_error = (
(1, "strictly positive") if non_zero_shape else (0, "nonnegative"))
if not (obj_arr >= lower_bound).all():
msg = "{} {} must have every element be {}, got {}."
raise TypeError(msg.format(fun_name, arg_name, bound_error, obj))
def _dynamic_slice_indices(operand, start_indices):
if len(start_indices) != operand.ndim:
msg = ("Length of slice indices must match number of operand dimensions ({} "
"vs {})")
raise ValueError(msg.format(len(start_indices), operand.shape))
# map int over operand.shape to raise any dynamic-shape errors
safe_map(int, operand.shape)
if not isinstance(start_indices, (tuple, list)):
if start_indices.ndim != 1:
raise ValueError("Slice indices must be a 1D sequence, got {}"
.format(start_indices.shape))
return select(lt(start_indices, _zeros(start_indices)),
add(start_indices, _const(start_indices, operand.shape)),
start_indices)
else:
return [np.asarray(i + d if i < 0 else i, getattr(i, 'dtype', dtypes.int_))
if isinstance(i, (int, np.integer))
else select(lt(i, _const(i, 0)), add(i, _const(i, d)), i)
for i, d in zip(start_indices, operand.shape)]
def _const(example, val):
if dtypes.is_python_scalar(example):
return dtypes.scalar_type_of(example)(val)
return np.array(val, _dtype(example))
_zeros: Callable = partial(full_like, fill_value=0)
_zero: Callable = partial(full_like, shape=(), fill_value=0)
_ones: Callable = partial(full_like, fill_value=1)
_one: Callable = partial(full_like, shape=(), fill_value=1)
_twos: Callable = partial(full_like, fill_value=2)
_two: Callable = partial(full_like, shape=(), fill_value=2)
dtype: Callable = dtypes.result_type
_dtype: Callable = dtypes.result_type
def _iscomplex(x) -> bool:
return dtypes.issubdtype(_dtype(x), np.complexfloating)
def ranges_like(*xs):
start = 0
for x in xs:
x_len = len(x)
yield range(start, start + x_len)
start += x_len
def remaining(original, *removed_lists):
removed = set(itertools.chain(*removed_lists))
return [i for i in original if i not in removed]
def _canonicalize_precision(precision):
if precision is None:
return None
if isinstance(precision, Precision) or (
isinstance(precision, tuple)
and len(precision) == 2
and all(isinstance(p, Precision) for p in precision)
):
return precision
else:
raise ValueError("Precision argument must be None, a lax.Precision value "
f"or a tuple of two lax.Precision values; got {precision}")
def conv_dimension_numbers(lhs_shape, rhs_shape, dimension_numbers
) -> ConvDimensionNumbers:
"""Converts convolution `dimension_numbers` to a `ConvDimensionNumbers`.
Args:
lhs_shape: tuple of nonnegative integers, shape of the convolution input.
rhs_shape: tuple of nonnegative integers, shape of the convolution kernel.
dimension_numbers: None or a tuple/list of strings or a ConvDimensionNumbers
object following the convolution dimension number specification format in
xla_client.py.
Returns:
A `ConvDimensionNumbers` object that represents `dimension_numbers` in the
canonical form used by lax functions.
"""
if isinstance(dimension_numbers, ConvDimensionNumbers):
return dimension_numbers
if len(lhs_shape) != len(rhs_shape):
msg = "convolution requires lhs and rhs ndim to be equal, got {} and {}."
raise TypeError(msg.format(len(lhs_shape), len(rhs_shape)))
if dimension_numbers is None:
iota = tuple(range(len(lhs_shape)))
return ConvDimensionNumbers(iota, iota, iota)
elif isinstance(dimension_numbers, (list, tuple)):
if len(dimension_numbers) != 3:
msg = "convolution dimension_numbers list/tuple must be length 3, got {}."
raise TypeError(msg.format(len(dimension_numbers)))
if not all(isinstance(elt, str) for elt in dimension_numbers):
msg = "convolution dimension_numbers elements must be strings, got {}."
raise TypeError(msg.format(tuple(map(type, dimension_numbers))))
msg = ("convolution dimension_numbers[{}] must have len equal to the ndim "
"of lhs and rhs, got {} for lhs and rhs shapes {} and {}.")
for i, elt in enumerate(dimension_numbers):
if len(elt) != len(lhs_shape):
raise TypeError(msg.format(i, len(elt), lhs_shape, rhs_shape))
lhs_spec, rhs_spec, out_spec = conv_general_permutations(dimension_numbers)
return ConvDimensionNumbers(lhs_spec, rhs_spec, out_spec)
else:
msg = "convolution dimension_numbers must be tuple/list or None, got {}."
raise TypeError(msg.format(type(dimension_numbers)))
def conv_general_permutations(dimension_numbers):
"""Utility for convolution dimension permutations relative to Conv HLO."""
lhs_spec, rhs_spec, out_spec = dimension_numbers
lhs_char, rhs_char, out_char = charpairs = ("N", "C"), ("O", "I"), ("N", "C")
for i, (a, b) in enumerate(charpairs):
if not dimension_numbers[i].count(a) == dimension_numbers[i].count(b) == 1:
msg = ("convolution dimension_numbers[{}] must contain the characters "
"'{}' and '{}' exactly once, got {}.")
raise TypeError(msg.format(i, a, b, dimension_numbers[i]))
if len(dimension_numbers[i]) != len(set(dimension_numbers[i])):
msg = ("convolution dimension_numbers[{}] cannot have duplicate "
"characters, got {}.")
raise TypeError(msg.format(i, dimension_numbers[i]))
if not (set(lhs_spec) - set(lhs_char) == set(rhs_spec) - set(rhs_char) ==
set(out_spec) - set(out_char)):
msg = ("convolution dimension_numbers elements must each have the same "
"set of spatial characters, got {}.")
raise TypeError(msg.format(dimension_numbers))
def getperm(spec, charpair):
spatial = (i for i, c in enumerate(spec) if c not in charpair)
if spec is not rhs_spec:
spatial = sorted(spatial, key=lambda i: rhs_spec.index(spec[i]))
return (spec.index(charpair[0]), spec.index(charpair[1])) + tuple(spatial)
lhs_perm, rhs_perm, out_perm = map(getperm, dimension_numbers, charpairs)
return lhs_perm, rhs_perm, out_perm
def _conv_general_proto(dimension_numbers):
assert type(dimension_numbers) is ConvDimensionNumbers
lhs_spec, rhs_spec, out_spec = dimension_numbers
proto = xla_client.ConvolutionDimensionNumbers()
proto.input_batch_dimension = lhs_spec[0]
proto.input_feature_dimension = lhs_spec[1]
proto.output_batch_dimension = out_spec[0]
proto.output_feature_dimension = out_spec[1]
proto.kernel_output_feature_dimension = rhs_spec[0]
proto.kernel_input_feature_dimension = rhs_spec[1]
proto.input_spatial_dimensions.extend(lhs_spec[2:])
proto.kernel_spatial_dimensions.extend(rhs_spec[2:])
proto.output_spatial_dimensions.extend(out_spec[2:])
return proto
def _conv_general_vjp_lhs_padding(
in_shape, window_dimensions, window_strides, out_shape, padding,
lhs_dilation, rhs_dilation) -> List[Tuple[int, int]]:
lhs_dilated_shape = _dilate_shape(in_shape, lhs_dilation)
rhs_dilated_shape = _dilate_shape(window_dimensions, rhs_dilation)
out_dilated_shape = _dilate_shape(out_shape, window_strides)
pad_before = np.subtract(rhs_dilated_shape, [lo for lo, _ in padding]) - 1
pad_after = (np.add(lhs_dilated_shape, rhs_dilated_shape) - 1
- out_dilated_shape - pad_before)
return safe_zip(pad_before, pad_after)
def _conv_general_vjp_rhs_padding(
in_shape, window_dimensions, window_strides, out_shape, padding,
lhs_dilation, rhs_dilation):
lhs_dilated_shape = _dilate_shape(in_shape, lhs_dilation)
rhs_dilated_shape = _dilate_shape(window_dimensions, rhs_dilation)
out_dilated_shape = _dilate_shape(out_shape, window_strides)
total_in_pad = out_dilated_shape + rhs_dilated_shape - lhs_dilated_shape - 1
return [(pad[0], tot - pad[0]) for pad, tot in zip(padding, total_in_pad)]
def _balanced_eq(x, z, y):
return div(select(_eq_meet(x, z), _ones(z), _zeros(z)),
select(_eq_meet(y, z), _twos(z), _ones(z)))
def _eq_meet(a, b):
a_dtype, b_dtype = _dtype(a), _dtype(b)
if a_dtype != b_dtype:
higher_dtype = dtypes.promote_types(a_dtype, b_dtype)
if higher_dtype == a_dtype:
a = convert_element_type(a, b_dtype)
else:
b = convert_element_type(b, a_dtype)
return eq(a, b)
def _abstractify(x):
return raise_to_shaped(core.get_aval(x))
def _check_user_dtype_supported(dtype, fun_name=None):
# Avoid using `dtype in [...]` becuase of numpy dtype equality overloading.
if isinstance(dtype, type) and dtype in {bool, int, float, complex}:
return
np_dtype = np.dtype(dtype)
if np_dtype.kind not in "biufc" and np_dtype.type != dtypes.bfloat16:
msg = f"JAX only supports number and bool dtypes, got dtype {dtype}"
msg += f" in {fun_name}" if fun_name else ""
raise TypeError(msg)
if dtype is not None and np_dtype != dtypes.canonicalize_dtype(dtype):
msg = ("Explicitly requested dtype {} {} is not available, "
"and will be truncated to dtype {}. To enable more dtypes, set the "
"jax_enable_x64 configuration option or the JAX_ENABLE_X64 shell "
"environment variable. "
"See https://github.com/google/jax#current-gotchas for more.")
fun_name = f"requested in {fun_name}" if fun_name else ""
truncated_dtype = dtypes.canonicalize_dtype(dtype).name
warnings.warn(msg.format(dtype, fun_name , truncated_dtype))
def _canonicalize_axis(axis, num_dims):
"""Canonicalize an axis in [-num_dims, num_dims) to [0, num_dims)."""
axis = operator.index(axis)
if not -num_dims <= axis < num_dims:
raise ValueError(
"axis {} is out of bounds for array of dimension {}".format(
axis, num_dims))
if axis < 0:
axis = axis + num_dims
return axis
tie_in_p = Primitive('tie_in')
@config.register_omnistaging_disabler
def omnistaging_disabler() -> None:
global tie_in
def tie_in(x: Array, y: Array) -> Array:
"""Returns the value of ``y`` but with a fake data dependence on ``x``.
When staging to XLA (e.g. running under jit or pmap), values that don't depend
on computation inputs are computed op-by-op, and folded into the XLA
computation as constants.
``tie_in`` provides a way to explicitly stage values into the computation.
When staging to XLA and ``x`` is already staged, then the result of ``tie_in``
is ``y``, but staged to XLA. Downstream use of the result will also be staged
to XLA.
For example, ``lax.sin(const)`` would be constant-folded if ``const`` is
a constant array, but ``lax.sin(lax.tie_in(x, const))``, will be staged to
XLA as long as ``x`` is staged to XLA.
"""
if config.omnistaging_enabled:
return y
else:
return tie_in_p.bind(x, y)
# If lax has already been imported, we need to monkey-patch the
# lax/__init__.py import of tie_in. If not (i.e. if this is running at lax
# module creation time) then we'll get an import error.
try:
jax.lax.tie_in = tie_in
except AttributeError:
pass
def _tie_in_transpose_rule(t, x, y):
if ad.is_undefined_primal(x):
return [ad_util.Zero(x.aval), t]
else:
return [ad_util.Zero.from_value(x), t]
def _tie_in_batch_rule(batched_args, batch_dims):
y = tie_in(*batched_args)
_, bdim_y = batch_dims
return y, bdim_y
def _tie_in_impl(x, y):
core.check_valid_jaxtype(x)
core.check_valid_jaxtype(y)
return y
def _tie_in_jvp(primals, tangents):
x, y = primals
x_dot, y_dot = tangents
if type(y_dot) is ad_util.Zero or core.get_aval(y_dot).dtype is dtypes.float0:
return y, y_dot # skip tying in in this case
else:
return ad.linear_jvp(tie_in_p, primals, tangents)
tie_in_p.def_impl(_tie_in_impl)
tie_in_p.def_abstract_eval(lambda x, y: raise_to_shaped(y))
xla.translations[tie_in_p] = lambda c, x, y: y
ad.primitive_jvps[tie_in_p] = _tie_in_jvp
ad.primitive_transposes[tie_in_p] = partial(ad.linear_transpose2, _tie_in_transpose_rule)
batching.primitive_batchers[tie_in_p] = _tie_in_batch_rule
masking.masking_rules[tie_in_p] = lambda vals, logical_shapes: vals[1]
|
from flask_restful import Resource, current_app
from server.services.campaign_service import CampaignService
from server.services.organisation_service import OrganisationService
from server.models.postgis.utils import NotFound
from server.models.postgis.campaign import Campaign
from server.services.users.authentication_service import token_auth, tm
class OrganisationsCampaignsAPI(Resource):
@token_auth.login_required
def post(self, organisation_id, campaign_id):
"""
Assigns a campaign to an organisation
---
tags:
- campaigns
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: true
type: string
default: Token sessionTokenHere==
- name: organisation_id
in: path
description: Unique organisation ID
required: true
type: integer
default: 1
- name: campaign_id
in: path
description: Unique campaign ID
required: true
type: integer
default: 1
responses:
200:
description: Organisation and campaign assigned successfully
401:
description: Unauthorized - Invalid credentials
403:
description: Forbidden - users have submitted mapping
404:
description: Project not found
500:
description: Internal Server Error
"""
try:
if OrganisationService.can_user_manage_organisation(
organisation_id, tm.authenticated_user_id
):
if Campaign.campaign_organisation_exists(campaign_id, organisation_id):
message = "Campaign {} is already assigned to organisation {}.".format(
campaign_id, organisation_id
)
return {"Error": message}, 400
CampaignService.create_campaign_organisation(
organisation_id, campaign_id
)
message = "campaign with id {} assigned for organisation with id {}".format(
campaign_id, organisation_id
)
return {"Success": message}, 200
else:
return {"Error": "User is not a manager of the organisation"}, 403
except Exception as e:
error_msg = f"Campaign Organisation POST - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg}, 500
def get(self, organisation_id):
"""
Returns all campaigns related to an organisation
---
tags:
- campaigns
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: false
type: string
default: Token sessionTokenHere==
- name: organisation_id
in: path
description: Unique project ID
required: true
type: integer
default: 1
responses:
200:
description: Success
404:
description: Organisation not found
500:
description: Internal Server Error
"""
try:
campaigns = CampaignService.get_organisation_campaigns_as_dto(
organisation_id
)
return campaigns.to_primitive(), 200
except NotFound:
return {"Error": "No campaign found"}, 404
except Exception as e:
error_msg = f"Organisation Campaigns GET - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg}, 500
@token_auth.login_required
def delete(self, organisation_id, campaign_id):
"""
Unassigns an organization from an campaign
---
tags:
- campaigns
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: true
type: string
default: Token sessionTokenHere==
- name: organisation_id
in: path
description: Unique organisation ID
required: true
type: integer
default: 1
- name: campaign_id
in: path
description: Unique campaign ID
required: true
type: integer
default: 1
responses:
200:
description: Organisation and campaign unassociated successfully
401:
description: Unauthorized - Invalid credentials
403:
description: Forbidden - users have submitted mapping
404:
description: Project not found
500:
description: Internal Server Error
"""
try:
if OrganisationService.can_user_manage_organisation(
organisation_id, tm.authenticated_user_id
):
CampaignService.delete_organisation_campaign(
organisation_id, campaign_id
)
return (
{"Success": "Organisation and campaign unassociated successfully"},
200,
)
else:
return {"Error": "User is not a manager of the organisation"}, 403
except NotFound:
return {"Error": "Organisation Campaign Not Found"}, 404
except Exception as e:
error_msg = f"Organisation Campaigns DELETE - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg}, 500
|
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from collections import OrderedDict
import paddle.fluid as fluid
from ppcls.optimizer import LearningRateBuilder
from ppcls.optimizer import OptimizerBuilder
from ppcls.modeling import architectures
from ppcls.modeling.loss import CELoss
from ppcls.modeling.loss import MixCELoss
from ppcls.modeling.loss import JSDivLoss
from ppcls.modeling.loss import GoogLeNetLoss
from ppcls.utils.misc import AverageMeter
from ppcls.utils import logger
from paddle.fluid.incubate.fleet.collective import fleet
from paddle.fluid.incubate.fleet.collective import DistributedStrategy
from ema import ExponentialMovingAverage
def create_feeds(image_shape, use_mix=None):
"""
Create feeds as model input
Args:
image_shape(list[int]): model input shape, such as [3, 224, 224]
use_mix(bool): whether to use mix(include mixup, cutmix, fmix)
Returns:
feeds(dict): dict of model input variables
"""
feeds = OrderedDict()
feeds['image'] = fluid.data(
name="feed_image", shape=[None] + image_shape, dtype="float32")
if use_mix:
feeds['feed_y_a'] = fluid.data(
name="feed_y_a", shape=[None, 1], dtype="int64")
feeds['feed_y_b'] = fluid.data(
name="feed_y_b", shape=[None, 1], dtype="int64")
feeds['feed_lam'] = fluid.data(
name="feed_lam", shape=[None, 1], dtype="float32")
else:
feeds['label'] = fluid.data(
name="feed_label", shape=[None, 1], dtype="int64")
return feeds
def create_dataloader(feeds):
"""
Create a dataloader with model input variables
Args:
feeds(dict): dict of model input variables
Returns:
dataloader(fluid dataloader):
"""
trainer_num = int(os.environ.get('PADDLE_TRAINERS_NUM', 1))
capacity = 64 if trainer_num <= 1 else 8
dataloader = fluid.io.DataLoader.from_generator(
feed_list=feeds,
capacity=capacity,
use_double_buffer=True,
iterable=True)
return dataloader
def create_model(architecture, image, classes_num, is_train):
"""
Create a model
Args:
architecture(dict): architecture information,
name(such as ResNet50) is needed
image(variable): model input variable
classes_num(int): num of classes
Returns:
out(variable): model output variable
"""
name = architecture["name"]
params = architecture.get("params", {})
if "is_test" in params:
params['is_test'] = not is_train
model = architectures.__dict__[name](**params)
out = model.net(input=image, class_dim=classes_num)
return out
def create_loss(out,
feeds,
architecture,
classes_num=1000,
epsilon=None,
use_mix=False,
use_distillation=False):
"""
Create a loss for optimization, such as:
1. CrossEnotry loss
2. CrossEnotry loss with label smoothing
3. CrossEnotry loss with mix(mixup, cutmix, fmix)
4. CrossEnotry loss with label smoothing and (mixup, cutmix, fmix)
5. GoogLeNet loss
Args:
out(variable): model output variable
feeds(dict): dict of model input variables
architecture(dict): architecture information,
name(such as ResNet50) is needed
classes_num(int): num of classes
epsilon(float): parameter for label smoothing, 0.0 <= epsilon <= 1.0
use_mix(bool): whether to use mix(include mixup, cutmix, fmix)
Returns:
loss(variable): loss variable
"""
if architecture["name"] == "GoogLeNet":
assert len(out) == 3, "GoogLeNet should have 3 outputs"
loss = GoogLeNetLoss(class_dim=classes_num, epsilon=epsilon)
target = feeds['label']
return loss(out[0], out[1], out[2], target)
if use_distillation:
assert len(out) == 2, ("distillation output length must be 2, "
"but got {}".format(len(out)))
loss = JSDivLoss(class_dim=classes_num, epsilon=epsilon)
return loss(out[1], out[0])
if use_mix:
loss = MixCELoss(class_dim=classes_num, epsilon=epsilon)
feed_y_a = feeds['feed_y_a']
feed_y_b = feeds['feed_y_b']
feed_lam = feeds['feed_lam']
return loss(out, feed_y_a, feed_y_b, feed_lam)
else:
loss = CELoss(class_dim=classes_num, epsilon=epsilon)
target = feeds['label']
return loss(out, target)
def create_metric(out,
feeds,
architecture,
topk=5,
classes_num=1000,
use_distillation=False):
"""
Create measures of model accuracy, such as top1 and top5
Args:
out(variable): model output variable
feeds(dict): dict of model input variables(included label)
topk(int): usually top5
classes_num(int): num of classes
Returns:
fetchs(dict): dict of measures
"""
if architecture["name"] == "GoogLeNet":
assert len(out) == 3, "GoogLeNet should have 3 outputs"
softmax_out = out[0]
else:
# just need student label to get metrics
if use_distillation:
out = out[1]
softmax_out = fluid.layers.softmax(out, use_cudnn=False)
fetchs = OrderedDict()
# set top1 to fetchs
top1 = fluid.layers.accuracy(softmax_out, label=feeds['label'], k=1)
fetchs['top1'] = (top1, AverageMeter('top1', '.4f', need_avg=True))
# set topk to fetchs
k = min(topk, classes_num)
topk = fluid.layers.accuracy(softmax_out, label=feeds['label'], k=k)
topk_name = 'top{}'.format(k)
fetchs[topk_name] = (topk, AverageMeter(topk_name, '.4f', need_avg=True))
return fetchs
def create_fetchs(out,
feeds,
architecture,
topk=5,
classes_num=1000,
epsilon=None,
use_mix=False,
use_distillation=False):
"""
Create fetchs as model outputs(included loss and measures),
will call create_loss and create_metric(if use_mix).
Args:
out(variable): model output variable
feeds(dict): dict of model input variables.
If use mix_up, it will not include label.
architecture(dict): architecture information,
name(such as ResNet50) is needed
topk(int): usually top5
classes_num(int): num of classes
epsilon(float): parameter for label smoothing, 0.0 <= epsilon <= 1.0
use_mix(bool): whether to use mix(include mixup, cutmix, fmix)
Returns:
fetchs(dict): dict of model outputs(included loss and measures)
"""
fetchs = OrderedDict()
loss = create_loss(out, feeds, architecture, classes_num, epsilon, use_mix,
use_distillation)
fetchs['loss'] = (loss, AverageMeter('loss', '7.4f', need_avg=True))
if not use_mix:
metric = create_metric(out, feeds, architecture, topk, classes_num,
use_distillation)
fetchs.update(metric)
return fetchs
def create_optimizer(config):
"""
Create an optimizer using config, usually including
learning rate and regularization.
Args:
config(dict): such as
{
'LEARNING_RATE':
{'function': 'Cosine',
'params': {'lr': 0.1}
},
'OPTIMIZER':
{'function': 'Momentum',
'params':{'momentum': 0.9},
'regularizer':
{'function': 'L2', 'factor': 0.0001}
}
}
Returns:
an optimizer instance
"""
# create learning_rate instance
lr_config = config['LEARNING_RATE']
lr_config['params'].update({
'epochs': config['epochs'],
'step_each_epoch':
config['total_images'] // config['TRAIN']['batch_size'],
})
lr = LearningRateBuilder(**lr_config)()
# create optimizer instance
opt_config = config['OPTIMIZER']
opt = OptimizerBuilder(**opt_config)
return opt(lr)
def dist_optimizer(config, optimizer):
"""
Create a distributed optimizer based on a normal optimizer
Args:
config(dict):
optimizer(): a normal optimizer
Returns:
optimizer: a distributed optimizer
"""
exec_strategy = fluid.ExecutionStrategy()
exec_strategy.num_threads = 3
exec_strategy.num_iteration_per_drop_scope = 10
dist_strategy = DistributedStrategy()
dist_strategy.nccl_comm_num = 1
dist_strategy.fuse_all_reduce_ops = True
dist_strategy.exec_strategy = exec_strategy
optimizer = fleet.distributed_optimizer(optimizer, strategy=dist_strategy)
return optimizer
def mixed_precision_optimizer(config, optimizer):
use_fp16 = config.get('use_fp16', False)
amp_scale_loss = config.get('amp_scale_loss', 1.0)
use_dynamic_loss_scaling = config.get('use_dynamic_loss_scaling', False)
if use_fp16:
optimizer = fluid.contrib.mixed_precision.decorate(
optimizer,
init_loss_scaling=amp_scale_loss,
use_dynamic_loss_scaling=use_dynamic_loss_scaling)
return optimizer
def build(config, main_prog, startup_prog, is_train=True):
"""
Build a program using a model and an optimizer
1. create feeds
2. create a dataloader
3. create a model
4. create fetchs
5. create an optimizer
Args:
config(dict): config
main_prog(): main program
startup_prog(): startup program
is_train(bool): train or valid
Returns:
dataloader(): a bridge between the model and the data
fetchs(dict): dict of model outputs(included loss and measures)
"""
with fluid.program_guard(main_prog, startup_prog):
with fluid.unique_name.guard():
use_mix = config.get('use_mix') and is_train
use_distillation = config.get('use_distillation')
feeds = create_feeds(config.image_shape, use_mix=use_mix)
dataloader = create_dataloader(feeds.values())
out = create_model(config.ARCHITECTURE, feeds['image'],
config.classes_num, is_train)
fetchs = create_fetchs(
out,
feeds,
config.ARCHITECTURE,
config.topk,
config.classes_num,
epsilon=config.get('ls_epsilon'),
use_mix=use_mix,
use_distillation=use_distillation)
if is_train:
optimizer = create_optimizer(config)
lr = optimizer._global_learning_rate()
fetchs['lr'] = (lr, AverageMeter('lr', 'f', need_avg=False))
optimizer = mixed_precision_optimizer(config, optimizer)
optimizer = dist_optimizer(config, optimizer)
optimizer.minimize(fetchs['loss'][0])
if config.get('use_ema'):
global_steps = fluid.layers.learning_rate_scheduler._decay_step_counter(
)
ema = ExponentialMovingAverage(
config.get('ema_decay'), thres_steps=global_steps)
ema.update()
return dataloader, fetchs, ema
return dataloader, fetchs
def compile(config, program, loss_name=None):
"""
Compile the program
Args:
config(dict): config
program(): the program which is wrapped by
loss_name(str): loss name
Returns:
compiled_program(): a compiled program
"""
build_strategy = fluid.compiler.BuildStrategy()
exec_strategy = fluid.ExecutionStrategy()
exec_strategy.num_threads = 1
exec_strategy.num_iteration_per_drop_scope = 10
compiled_program = fluid.CompiledProgram(program).with_data_parallel(
loss_name=loss_name,
build_strategy=build_strategy,
exec_strategy=exec_strategy)
return compiled_program
total_step = 0
def run(dataloader,
exe,
program,
fetchs,
epoch=0,
mode='train',
vdl_writer=None):
"""
Feed data to the model and fetch the measures and loss
Args:
dataloader(fluid dataloader):
exe():
program():
fetchs(dict): dict of measures and the loss
epoch(int): epoch of training or validation
model(str): log only
Returns:
"""
fetch_list = [f[0] for f in fetchs.values()]
metric_list = [f[1] for f in fetchs.values()]
for m in metric_list:
m.reset()
batch_time = AverageMeter('elapse', '.3f')
tic = time.time()
for idx, batch in enumerate(dataloader()):
metrics = exe.run(program=program, feed=batch, fetch_list=fetch_list)
batch_time.update(time.time() - tic)
tic = time.time()
for i, m in enumerate(metrics):
metric_list[i].update(m[0], len(batch[0]))
fetchs_str = ''.join([str(m.value) + ' '
for m in metric_list] + [batch_time.value]) + 's'
if vdl_writer:
global total_step
logger.scaler('loss', metrics[0][0], total_step, vdl_writer)
total_step += 1
if mode == 'eval':
logger.info("{:s} step:{:<4d} {:s}s".format(mode, idx, fetchs_str))
else:
epoch_str = "epoch:{:<3d}".format(epoch)
step_str = "{:s} step:{:<4d}".format(mode, idx)
logger.info("{:s} {:s} {:s}".format(
logger.coloring(epoch_str, "HEADER")
if idx == 0 else epoch_str,
logger.coloring(step_str, "PURPLE"),
logger.coloring(fetchs_str, 'OKGREEN')))
end_str = ''.join([str(m.mean) + ' '
for m in metric_list] + [batch_time.total]) + 's'
if mode == 'eval':
logger.info("END {:s} {:s}s".format(mode, end_str))
else:
end_epoch_str = "END epoch:{:<3d}".format(epoch)
logger.info("{:s} {:s} {:s}".format(
logger.coloring(end_epoch_str, "RED"),
logger.coloring(mode, "PURPLE"),
logger.coloring(end_str, "OKGREEN")))
# return top1_acc in order to save the best model
if mode == 'valid':
return fetchs["top1"][1].avg
|
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urlparse,time, urllib
from resources.lib.libraries import client
from resources.lib.libraries import client2
from resources.lib.libraries import cleantitle
from resources.lib.libraries import workers
from resources.lib.libraries import control
from resources.lib.resolvers import cloudzilla
from resources.lib.resolvers import openload
from resources.lib.resolvers import uptobox
from resources.lib.resolvers import zstream
from resources.lib.resolvers import videomega
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://oneclickwatch.ws'
self.search_link = '/search/%s'
self.title = ''
def get_movie(self, imdb, title, year):
try:
query = self.search_link % urllib.quote_plus(title +' '+year)
query = urlparse.urljoin(self.base_link, query)
result = client2.http_get(query)
years = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1)]
result = client.parseDOM(result, 'h2', attrs={'class': 'title'})
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[0]) for i in result]
print('R',result)
result = [i for i in result if cleantitle.movie(title.lower()) in cleantitle.movie(i[1]).lower()]
print('R',result)
result = [i for i in result if any(x in i[1] for x in years)]
print('R',result)
result2 = [i for i in result if '1080' in i[1]]
print('R',result)
result3 = [i for i in result if '720' in i[1].lower()]
print('R',result)
if len(result3) > 0: result = result3
if len(result2) > 0: result = result2
url = result[0][0]
return url
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
url = tvshowtitle
url = client.cleanHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
try:
if url == None: return
mytitile = url.lower()
url = '%s S%02dE%02d' % (url, int(season), int(episode))
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
url = self.search_link % urllib.quote_plus(url)
query = urlparse.urljoin(self.base_link, url)
result = client2.http_get(query)
result = client.parseDOM(result, 'h2', attrs={'class': 'title'})
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[0]) for i in result]
result = [i for i in result if mytitile in i[1].lower()]
result2 = [i for i in result if '1080' in i[1].lower()]
result3 = [i for i in result if '720' in i[1].lower()]
if len(result3) > 0: result = result3
if len(result2) > 0: result = result2
url=result[0][0]
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
self.sources =[]
mylinks = []
result = client2.http_get(url)
mytitle = re.compile('<title>(.*?)</title>', re.DOTALL).findall(result)[0]
if any(word in mytitle.lower() for word in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'ts']):
quality = 'CAM'
elif '1080p' in mytitle:
quality = '1080p'
elif '720p' in mytitle:
quality = 'HD'
else:
quality = 'MQ'
links = client.parseDOM(result, 'a', attrs={'rel': 'nofollow'})
links = [i for i in links if i.startswith('http')]
for a in links:
mylinks.append([a,quality])
threads = []
for i in mylinks: threads.append(workers.Thread(self.check, i))
[i.start() for i in threads]
for i in range(0, 10 * 2):
is_alive = [x.is_alive() for x in threads]
if all(x == False for x in is_alive): break
time.sleep(1)
return self.sources
except:
return self.sources
def check(self, i):
try:
url = client.replaceHTMLCodes(i[0])
url = url.encode('utf-8')
host = urlparse.urlparse(url).netloc
host = host.replace('www.', '').replace('embed.', '')
host = host.rsplit('.', 1)[0]
host = host.lower()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
control.log("##OneClickWatch %s - url %s" % (host, i[0]))
#if host in i[2]: check = url = resolvers.request(url)
if host == 'openload': check = openload.check(url)
elif host == 'uptobox': check = uptobox.check(url)
elif host == 'cloudzilla': check = cloudzilla.check(url)
elif host == 'zstream': check = zstream.check(url)
elif host == 'videomega': check = videomega.check(url)
else: raise Exception()
if check == None or check == False: raise Exception()
self.sources.append({'source': host, 'quality': i[1], 'provider': 'Oneclickwatch', 'url': url})
except:
pass
def resolve(self, url):
try:
url = resolvers.request(url)
return url
except:
return
|
# -*- coding:utf-8 -*-
"""Handwritten digits recognition Graphic interface module : training done with the mnist dataset"""
# Third-party gui/system/plotting Libraries
import numpy as np
import tkinter as tk
import tkinter.font as tkFont
from tkinter import messagebox
from tkinter import filedialog
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
from PIL import ImageTk, Image
from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel
from PyQt5.QtGui import QPainter, QPixmap, QPen, QScreen
import pickle
import webbrowser
import os
import sys
sys.path.insert(1, str(os.getcwd()))
# Neural network module
import network
# ------------------------------------------------------------------------------tkinter GUI---------------------------------------------------------------------------------------------
class Interface(tk.Frame):
"""graphic interface class"""
# ------------------------------------------------------------------------------__init__------------------------------------------------------------------------------------------------
def __init__(self, window, **kwargs):
"""Displays the main menu"""
# Fonts
self.big_font_button = tkFont.Font(family='Calibri', size=20, weight='bold')
self.medium_large_font_button = tkFont.Font(family='Calibri', size=16, weight='bold')
self.medium_font_button = tkFont.Font(family='Calibri', size=14, weight='bold')
self.font_title = tkFont.Font(family='Calibri', size=36, weight='bold')
self.number_button_font = tkFont.Font(family='Calibri', size=25, weight='bold')
# Display main menu
self.main_menu(window, **kwargs)
# ------------------------------------------------------------------------------Main Menu Interface--------------------------------------------------------------------------------------
def main_menu(self, window, **kwargs):
"""Main menu Frame"""
# Frame creation
if hasattr(self, 'children'):
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
self.pack()
# Github Button
img_github = ImageTk.PhotoImage(Image.open("hd_recognition/assets/github.jpg").resize((50,50)))
btn_github = tk.Button(self, image=img_github, command=lambda: webbrowser.open("https://github.com/Seledriac/A-small-pedagogic-python-library-for-supervised-neural-networks/"))
btn_github.img = img_github
btn_github.grid(column=0, row=0, padx=50, pady=(0,50))
# Title
title = tk.Label(self, text="Supervised neural networks\n applied to handwritten digits recognition", bg="#fff2f2", font=self.font_title)
title.grid(column=1, row=0, pady=25)
# Readme Button
img_readme = ImageTk.PhotoImage(Image.open("hd_recognition/assets/readme.png").resize((50,50)))
btn_readme = tk.Button(self, image=img_readme, command=lambda: os.startfile("README.md"))
btn_readme.img = img_readme
btn_readme.grid(column=2, row=0, padx=60, pady=(0,50))
# Button selection frame
btns_frames = tk.LabelFrame(self, padx=50, pady=50, borderwidth=5)
btns_frames.grid(row=1, column=1, columnspan=3, pady=(65,80), padx=(0,180))
# Menu Buttons
create_model_button = tk.Button(btns_frames, text="Create a model", font=self.big_font_button, command=lambda: self.create_model(window, **kwargs))
create_model_button.grid(column=0, row=0, padx=10, pady=10)
train_model_button = tk.Button(btns_frames, text="Train a model", font=self.big_font_button, command=lambda: self.train_model(window, **kwargs))
train_model_button.grid(column = 1, row = 0, padx=10, pady=10)
evaluate_button = tk.Button(btns_frames, text="Accuracy Ladder", font=self.big_font_button, command=lambda: self.models_ladder(window, **kwargs))
evaluate_button.grid(column = 0, row = 1, padx=10, pady=10)
predict_button = tk.Button(btns_frames, text="Predict", font=self.big_font_button, command=lambda: self.choose_prediction(window, **kwargs))
predict_button.grid(column = 1, row = 1, padx=10, pady=10)
# ------------------------------------------------------------------------------Model Creation Interface------------------------------------------------------------------------------------
def create_model(self, window, **kwargs):
"""Model creation Frame"""
# Frame creation
self.destroy()
if hasattr(self, 'hidden_layers_label'):
delattr(self, 'hidden_layers_label')
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
self.pack()
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0)
# Title
title = tk.Label(self, text="Model Creation", bg="#fff2f2", font=self.font_title)
title.grid(column=1, row=0)
# Model Validation frame
model_creation_validation_frame = tk.LabelFrame(self, borderwidth=3)
model_creation_validation_frame.grid(row=0, column=2, pady=(20,0))
model_creation_validation_label = tk.Label(model_creation_validation_frame, text="Model name", font=self.medium_font_button)
model_creation_validation_label.pack()
self.model_creation_validation_entry = tk.Entry(model_creation_validation_frame)
self.model_creation_validation_entry.pack()
model_creation_validation_button = tk.Button(model_creation_validation_frame, text="Create Model", font=self.medium_font_button, command=self.model_creation_validation)
model_creation_validation_button.pack()
# Model customization frame
creation_custom_frame = tk.LabelFrame(self, padx=50, pady=50, borderwidth=5)
creation_custom_frame.grid(row=1, column=0, columnspan=3, pady=(30,0))
# Input layer Frame
input_layer_frame = tk.LabelFrame(creation_custom_frame)
input_layer_frame.grid(row=0, column=0)
input_layer_label = tk.Label(input_layer_frame, text="Input Layer", font=self.medium_font_button)
input_layer_label.pack()
self.input_layer_number = tk.Entry(input_layer_frame)
self.input_layer_number.insert(0,784)
self.input_layer_number.pack()
# Hidden layers Frame
self.hidden_layers = []
self.hidden_layers_frame = tk.LabelFrame(creation_custom_frame)
self.hidden_layers_frame.grid(row=0, column=1)
self.add_hidden_layer()
self.add_hidden_layer()
# Output layer Frame
output_layer_frame = tk.LabelFrame(creation_custom_frame)
output_layer_frame.grid(row=0, column=2, padx=70)
output_layer_label = tk.Label(output_layer_frame, text="Output Layer", font=self.medium_font_button)
output_layer_label.pack()
self.output_layer_number = tk.Entry(output_layer_frame)
self.output_layer_number.insert(0,10)
self.output_layer_number.pack()
# Hidden layer adding/deleting buttons
add_hidden_layer_button = tk.Button(creation_custom_frame, text="Add a hidden layer", font=self.medium_font_button, command=self.add_hidden_layer)
add_hidden_layer_button.grid(column = 0, row = 1, padx=50, pady=40)
del_hidden_layer_button = tk.Button(creation_custom_frame, text="Delete the last hidden layer", font=self.medium_font_button, command=self.del_hidden_layer)
del_hidden_layer_button.grid(column = 1, row = 1, padx=50, pady=40, columnspan=2)
def add_hidden_layer(self):
"""Add a hidden layer in the model creation Frame"""
if not hasattr(self, 'hidden_layers_label'):
self.hidden_layers_label = tk.Label(self.hidden_layers_frame, text="Hidden Layer(s)", font=self.medium_font_button)
self.hidden_layers_label.grid(row=0, column=0, columnspan=10)
if len(self.hidden_layers) < 5:
new_hidden_layer = tk.Scale(self.hidden_layers_frame, from_=1, to=128, length=150)
new_hidden_layer.grid(row=1,column=len(self.hidden_layers), padx=(0,20))
self.hidden_layers.append(new_hidden_layer)
def del_hidden_layer(self):
"""Delete a hidden layer in the model creation Frame"""
if len(self.hidden_layers) > 1:
self.hidden_layers[-1].destroy()
del self.hidden_layers[-1]
elif hasattr(self, 'hidden_layers_label'):
self.hidden_layers[-1].destroy()
del self.hidden_layers[-1]
self.hidden_layers_label.destroy()
delattr(self, 'hidden_layers_label')
def model_creation_validation(self):
"""This method is executed when the model creation validation button is clicked. It creates the model, serlializes it, and shows a recap od the model in a message box to the user"""
model_name = self.model_creation_validation_entry.get()
try:
input_number = int(self.input_layer_number.get())
output_number = int(self.output_layer_number.get())
except ValueError:
messagebox.showerror("Error", "Error : enter a number of neurons for all the layers")
if model_name and input_number and output_number:
sizes = [input_number]
msg = "Model \"{}\" successfully created.\n\nInput layer : {} neurons\n".format(str(self.model_creation_validation_entry.get()), str(input_number))
for i,layer in enumerate(self.hidden_layers):
nb_neurons = int(layer.get())
sizes.append(nb_neurons)
msg = msg + "Hidden layer {} : {} neurons\n".format(str(i + 1), str(nb_neurons))
sizes.append(output_number)
msg = msg + "Output layer : {} neurons\n\nActivation function : sigmoid (by default)".format(str(output_number))
net = network.Network(model_name, sizes)
with open("models/hd_recognition/{}.pickle".format(model_name), "wb") as fic:
pickler = pickle.Pickler(fic)
pickler.dump(net)
messagebox.showinfo("Model Info", msg)
else:
messagebox.showerror("Error", "Error : missing required fields")
# ------------------------------------------------------------------------------Model Training Interface------------------------------------------------------------------------------------
def train_model(self, window, **kwargs):
"""Model training specs Frame"""
# Frame creation
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
self.pack()
# Chosing the model which we will train
self.open_model_file()
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0, padx=(25,0))
# Title
title = tk.Label(self, text="Model Training\n(mnist dataset)", bg="#fff2f2", font=self.font_title)
title.grid(column=1, row=0, pady=80, padx=(200,0))
# Model training validation frame
model_training_validation_frame = tk.LabelFrame(self, borderwidth=3)
model_training_validation_frame.grid(row=0, column=2, padx=(200,0), pady=(10,0))
model_training_validation_button = tk.Button(model_training_validation_frame, text="Train", font=self.medium_large_font_button, command=lambda: self.model_training(window, **kwargs))
model_training_validation_button.pack()
# Model training customization frame
training_custom_frame = tk.LabelFrame(self, padx=50, pady=50, borderwidth=5)
training_custom_frame.grid(row=1, column=0, columnspan=100, padx=(0,15))
# Epochs Frame
epochs_frame = tk.LabelFrame(training_custom_frame)
epochs_frame.grid(row=0, column=0)
epochs_label = tk.Label(epochs_frame, text="Epochs", font=self.medium_font_button)
epochs_label.pack()
self.epochs_number = tk.Entry(epochs_frame)
self.epochs_number.insert(0,3)
self.epochs_number.pack()
# Batch size Frame
batch_size_frame = tk.LabelFrame(training_custom_frame)
batch_size_frame.grid(row=0, column=2, padx=70)
batch_size_label = tk.Label(batch_size_frame, text="batch size", font=self.medium_font_button)
batch_size_label.pack()
self.batch_size_number = tk.Entry(batch_size_frame)
self.batch_size_number.insert(0,10)
self.batch_size_number.pack()
# Display weights checkbox
display_weights_frame = tk.LabelFrame(training_custom_frame)
display_weights_frame.grid(row=0, column=3)
self.display_weights_value = tk.IntVar()
display_weights_cb = tk.Checkbutton(display_weights_frame, text="Dynamically display the weights of the first layer", font=self.medium_font_button, variable=self.display_weights_value)
display_weights_cb.pack()
def model_training(self, window, **kwargs):
"""Model training Frame"""
# Training values retrieving
disp_weights = bool(self.display_weights_value.get())
try:
epochs = int(self.epochs_number.get())
batch_size = int(self.batch_size_number.get())
except ValueError:
messagebox.showerror("Error", "Error : please enter a numeric value for each field")
if epochs and batch_size:
# Frame creation
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0)
# Training trigger button
doIt = tk.Button(self, text="Start the Training", command=lambda: self.start_training(epochs, batch_size, disp_weights), font=self.big_font_button)
doIt.grid(row=0, column=1, pady=20)
# Training logs textbox
textbox_frame = tk.LabelFrame(self)
textbox_frame.grid(row=1, column=0, columnspan=2)
self.output = tk.Text(textbox_frame, width=110, height=30, bg='black', fg='white')
self.output.pack(side=tk.LEFT)
# Scrollbar
scrollbar = tk.Scrollbar(textbox_frame, orient="vertical", command = self.output.yview)
scrollbar.pack(side=tk.RIGHT, fill="y")
self.output['yscrollcommand'] = scrollbar.set
self.pack()
else:
messagebox.showerror("Error", "Error : missing required fields")
def start_training(self, epochs, batch_size, disp_weights):
"""This method executes the SGD training method on a given model"""
# Importing the mnist dataset
import mnist_loader
training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
training_data = list(training_data)
validation_data = list(validation_data)
test_data = list(test_data)
# Model training via SGD
net = self.model_file
self.output.insert(tk.END, "\n" + str(net) + "\n")
self.update_idletasks()
net.SGD(training_data, epochs, batch_size, test_data=test_data, display_weights=disp_weights, gui=self)
# Model saving
with open("models/hd_recognition/{}.pickle".format(net.id), "wb") as saving:
saver = pickle.Pickler(saving)
saver.dump(net)
# Performance test of the network on the validation data
accuracy = str(100 * net.evaluate(validation_data) / 10000)
self.output.insert(tk.END, "\nTest on the validation data -> Accuracy : {0}%\n".format(accuracy))
self.update_idletasks()
self.output.see("end")
# Ladder update
with open("models/hd_recognition/accuracy_ladder.md", "a") as ladder:
adding = str(net) + " --> accuracy = " + accuracy + "\n"
ladder.write(adding)
with open("models/hd_recognition/accuracy_ladder.md", "r") as ladder:
shove_percent = ladder.read().replace("%", "")
content = [net.split("= ") for net in shove_percent.split('\n')]
content.pop()
content_updated = sorted([(acc,net) for net,acc in content], reverse = True)
tostring = "%\n".join(["= ".join((net,acc)) for acc,net in content_updated]) + "%\n"
with open("models/hd_recognition/accuracy_ladder.md", "w") as ladder:
ladder.write(tostring)
# ------------------------------------------------------------------------------Models Ladder Interface------------------------------------------------------------------------------------
def models_ladder(self, window, **kwargs):
"""Models ladder frame"""
# Frame creation
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0)
# Ladder label
ladder_label = tk.Label(self, text="Models Accuracy Ladder", font=self.font_title, bg="#fff2f2")
ladder_label.grid(row=0, column=1, padx=(0,150), pady=20)
# Ladder textbox
textbox_frame = tk.LabelFrame(self)
textbox_frame.grid(row=1, column=0, columnspan=2)
output = tk.Text(textbox_frame, width=100, height=20, font=self.medium_font_button)
output.pack(side=tk.LEFT)
with open("models/hd_recognition/accuracy_ladder.md", "r") as ladder:
content = ladder.read()
output.insert(tk.END, content)
self.update_idletasks()
output.see("end")
# Scrollbar
scrollbar = tk.Scrollbar(textbox_frame, orient="vertical", command = output.yview)
scrollbar.pack(side=tk.RIGHT, fill="y")
output['yscrollcommand'] = scrollbar.set
self.pack()
# ------------------------------------------------------------------------------Prediction Interface---------------------------------------------------------------------------------------
def choose_prediction(self, window, **kwargs):
"""Prediction style choice frame"""
# Frame creation
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
self.pack()
# Opening the model which will predict
self.open_model_file()
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0, padx=(0,125), pady=(15,100))
# Ladder label
choice_label = tk.Label(self, text="Choose the prediction style", font=self.font_title, bg="#fff2f2")
choice_label.grid(row=0, column=1, columnspan=10, padx=(50,250), pady=50)
# Choice buttons
choice_custom = tk.Button(self, text="Predict with custom test images", font=self.big_font_button, command=lambda: self.custom_prediction_frame(window, **kwargs))
choice_custom.grid(row=1, column=1, padx=(0,0), pady=(100))
choice_live = tk.Button(self, text="Live prediction", font=self.big_font_button, command=lambda: self.live_prediction_frame(window, **kwargs))
choice_live.grid(row=1, column=2, padx=(50,200), pady=(100))
def custom_prediction_frame(self, window, **kwargs):
"""Custom images prediction frame"""
# Frame creation
self.destroy()
tk.Frame.__init__(self, window, width=1180, height=620, bg="#fff2f2", **kwargs)
self.pack()
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0, pady=(10,30))
# Title label
title_label = tk.Label(self, text="Custom images prediction\nChoose the number to predict", font=self.number_button_font, bg="#fff2f2")
title_label.grid(row=0, column=1, columnspan=2, padx=(0,150), pady=10)
# Number buttons Frame
number_buttons_frame = tk.LabelFrame(self, borderwidth=3, bg='white', pady=10)
number_buttons_frame.grid(row=1,column=1, columnspan=2, padx=(0,150))
# Number buttons
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="0", command=lambda: self.number_button_click(0))
btn_home.grid(column=0, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="1", command=lambda: self.number_button_click(1))
btn_home.grid(column=1, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="2", command=lambda: self.number_button_click(2))
btn_home.grid(column=2, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="3", command=lambda: self.number_button_click(3))
btn_home.grid(column=3, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="4", command=lambda: self.number_button_click(4))
btn_home.grid(column=4, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="5", command=lambda: self.number_button_click(5))
btn_home.grid(column=5, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="6", command=lambda: self.number_button_click(6))
btn_home.grid(column=6, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="7", command=lambda: self.number_button_click(7))
btn_home.grid(column=7, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="8", command=lambda: self.number_button_click(8))
btn_home.grid(column=8, row=1, padx=15)
btn_home = tk.Button(number_buttons_frame, font=self.number_button_font, text="9", command=lambda: self.number_button_click(9))
btn_home.grid(column=9, row=1, padx=15)
def number_button_click(self, number):
"""This method is executed when a number button is clicked. It displays the model's prediction on a matplotlib figure"""
# Opening the corresponding custom image
img_filename_bmp = "hd_recognition/custom_test_images/test_image_"+str(number)+".bmp"
test_image = Image.open(img_filename_bmp)
# Predicting based on the custom image
image_array = 1 - (np.array(test_image).reshape(784,1) / 255)
model_activations = self.model_file.feedforward(image_array)
# Custom image display
img_filename_png = "hd_recognition/custom_test_images/test_image_"+str(number)+".png"
custom_image = ImageTk.PhotoImage(Image.open(img_filename_png))
custom_image_label = tk.Label(self, image=custom_image, relief='ridge')
custom_image_label.image=custom_image
custom_image_label.grid(row=2, column=1, padx=10, pady=(5,5))
# Prediction plot frame
prediction_frame = tk.LabelFrame(self)
prediction_frame.grid(row=2,column=2, padx=(10,150), pady=(5,5))
# Plotting the model activations
self.plot_model_activation(model_activations, prediction_frame)
def live_prediction_frame(self, window, **kwargs):
"""Live prediction of the numbers drew by the user"""
# Frame creation
self.destroy()
window.geometry("1500x800")
tk.Frame.__init__(self, window, width=1500, height=800, bg="#fff2f2", **kwargs)
self.pack()
# Main menu Button
img_home = ImageTk.PhotoImage(Image.open("hd_recognition/assets/home.png").resize((95,50)))
btn_home = tk.Button(self, image=img_home, command=lambda: self.main_menu(window, **kwargs))
btn_home.img = img_home
btn_home.grid(column=0, row=0, padx=100)
# Title
title = tk.Label(self, text="Live prediction\nDraw the number to predict", bg="#fff2f2", font=self.font_title)
title.grid(column=1, row=0, pady=80)
# Start button frame
live_prediction_starting_frame = tk.LabelFrame(self, borderwidth=3)
live_prediction_starting_frame.grid(row=0, column=2, padx=100)
live_prediction_starting_button = tk.Button(live_prediction_starting_frame, text="Start", font=self.medium_large_font_button, command=lambda: self.start_live_prediction(window))
live_prediction_starting_button.pack()
def start_live_prediction(self, window):
"""Live prediction Qt drawing window display"""
# DrawingWindow creation
App = QApplication(sys.argv)
QtWindow = DrawingWindow(App, self)
QtWindow.setWindowTitle("Digit drawing window")
QtWindow.show()
sys.exit(App.exec())
# ------------------------------------------------------------------------------Miscellaneous Methods--------------------------------------------------------------------------------------
def open_model_file(self):
"""Prompts the user to choose a model file"""
re = True
while re:
try:
# Model file opening prompt
self.model_filename = filedialog.askopenfilename(initialdir="models/hd_recognition", title="Choose the model", filetypes=(("pickle files","*.pickle"), ("model files","*.model"), ("all files", "*.*")))
assert self.model_filename
re = False
except:
messagebox.showerror("Error", "Error : please select a model file")
with open(self.model_filename, "rb") as fic:
unpickler = pickle.Unpickler(fic)
self.model_file = unpickler.load()
def plot_model_activation(self, model_activations, frame):
"""Plots the current model activations in a given frame (in a prediction context)"""
fig = Figure(figsize = (4, 4))
fig.clf()
fig.add_subplot(111).plot(range(10), model_activations)
fig.suptitle("corresponding model activations")
axes = fig.gca()
axes.set_xlabel("digit")
axes.set_ylabel("activation")
axes.set_ylim([0, 1])
axes.set_xticks(range(10))
axes.set_yticks(np.array(range(11))/10)
canvas = FigureCanvasTkAgg(fig, master=frame)
canvas.draw()
canvas.flush_events()
canvas.get_tk_widget().grid(row=0, column=1)
self.annot_max(range(10), model_activations, axes)
def annot_max(x, y, ax):
"""Max network activation anotation for a number image"""
xmax = x[np.argmax(y)]
ymax = y.max()
text = "digit = {}, activation = {:.3f}".format(xmax,ymax)
if xmax <= 4:
orientation = str((1 / abs(5 - (xmax + 1))) / 10)
else:
orientation = str(-(1 / abs(5 - (xmax + 1))) / 10)
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=1)
arrowprops=dict(arrowstyle="-|>",connectionstyle="arc3,rad="+orientation)
kw = dict(xycoords='data',textcoords="axes fraction",
arrowprops=arrowprops, bbox=bbox_props, ha="right", va="top")
# ax.annotate(text, xy=(xmax, ymax), xytext=(xmax/10 - 0.1, ymax - 0.1), **kw)
ax.annotate(text, xy=(xmax, ymax), xytext=(0.8, 0.5), **kw)
annot_max = staticmethod(annot_max)
# ------------------------------------------------------------------------------PyQt drawing window----------------------------------------------------------------------------------------
class DrawingWindow(QMainWindow):
"""Drawing window for live model prediction"""
def __init__(self, App, tkinter_root):
"""Initialization of the Drawing Window : we create a label centered in the window, in which we put a blank pixmap"""
super().__init__()
self.label = QLabel()
self.blank()
self.setCentralWidget(self.label)
self.App = App
self.tkinter_root = tkinter_root
self.last_x, self.last_y = None, None
def blank(self):
"""This method clears the QtWindow, setting the content of the centered label to a white pixmap"""
self.label.setPixmap(QPixmap("hd_recognition/assets/white.png"))
def mouseMoveEvent(self, e):
"""This method is executed while the click button is held"""
if self.last_x is None:
self.last_x = e.x()
self.last_y = e.y()
return
painter = QPainter(self.label.pixmap())
painter.drawLine(self.last_x, self.last_y, e.x(), e.y())
painter.end()
self.update()
# Updating the origin for next time
self.last_x = e.x()
self.last_y = e.y()
# Saving the screenshot and compressing it to a 28x28 image
QScreen.grabWindow(self.App.primaryScreen(), self.winId()).save("hd_recognition/tmp/screenshot.png", 'png')
resize_img = Image.open("hd_recognition/tmp/screenshot.png")
resize_img = resize_img.resize((28,28))
resize_img.save("hd_recognition/tmp/screenshot.png", 'png')
# Converting from standard png to greyscale
img_array = np.array(Image.open("hd_recognition/tmp/screenshot.png"))
img_array = np.array([[pixel[0] for pixel in line] for line in img_array])
image_array = 1 - (img_array.reshape(784,1) / 255)
# Predicting the number
model_activations = self.tkinter_root.model_file.feedforward(image_array)
# Prediction plot frame
prediction_frame = tk.LabelFrame(self.tkinter_root)
prediction_frame.grid(row=2,column=2)
# Plotting the model activations
self.tkinter_root.plot_model_activation(model_activations, prediction_frame)
def mouseReleaseEvent(self, e):
self.last_x = None
self.last_y = None
# -----------------------------------------------------------------------------Tkinter Window creation-------------------------------------------------------------------------------------
window = tk.Tk()
window.geometry("1180x620")
window.title("Neural Networks")
window.configure(bg="#fff2f2")
interface = Interface(window)
interface.mainloop()
|
import json
import getpass
import shortuuid # type: ignore
from datetime import datetime
from functools import lru_cache
from collections import defaultdict
from typing import Any, Dict, Generator, Generic, List, Optional, Set, Tuple, Union
from followthemoney.types import registry
from nomenklatura.entity import CE
from nomenklatura.judgement import Judgement
from nomenklatura.util import PathLike, is_qid
StrIdent = Union[str, "Identifier"]
Pair = Tuple["Identifier", "Identifier"]
class ResolverLogicError(Exception):
pass
class Identifier(object):
PREFIX = "NK-"
__slots__ = ("id", "canonical", "weight")
def __init__(self, id: str):
self.id = id
self.weight: int = 1
if self.id.startswith(self.PREFIX):
self.weight = 2
elif is_qid(id):
self.weight = 3
self.canonical = self.weight > 1
def __eq__(self, other: Any) -> bool:
return str(self) == str(other)
def __lt__(self, other: Any) -> bool:
return (self.weight, self.id) < (other.weight, other.id)
def __str__(self) -> str:
return self.id
def __hash__(self) -> int:
return hash(self.id)
def __len__(self) -> int:
return len(self.id)
def __repr__(self) -> str:
return f"<I({self.id})>"
@classmethod
def get(cls, id: StrIdent) -> "Identifier":
if isinstance(id, str):
return cls(id)
return id
@classmethod
def pair(cls, left_id: StrIdent, right_id: StrIdent) -> Pair:
left = cls.get(left_id)
right = cls.get(right_id)
if left == right:
raise ResolverLogicError()
return (max(left, right), min(left, right))
@classmethod
def make(cls, value: Optional[str] = None) -> "Identifier":
key = value or shortuuid.uuid()
return cls.get(f"{cls.PREFIX}{key}")
class Edge(object):
__slots__ = ("key", "source", "target", "judgement", "score", "user", "timestamp")
def __init__(
self,
left_id: StrIdent,
right_id: StrIdent,
judgement: Judgement = Judgement.NO_JUDGEMENT,
score: Optional[float] = None,
user: Optional[str] = None,
timestamp: Optional[str] = None,
):
self.key = Identifier.pair(left_id, right_id)
self.target, self.source = self.key
self.judgement = judgement
self.score = score
self.user = user
self.timestamp = timestamp
def other(self, cur: Identifier) -> Identifier:
if cur == self.target:
return self.source
return self.target
def to_line(self) -> str:
row = [
self.target.id,
self.source.id,
self.judgement.value,
self.score,
self.user,
self.timestamp,
]
return json.dumps(row) + "\n"
def __str__(self) -> str:
return self.to_line()
def __hash__(self) -> int:
return hash(self.key)
def __eq__(self, other: Any) -> bool:
return hash(self) == hash(other)
def __lt__(self, other: Any) -> bool:
return bool(self.key < other.key)
def __repr__(self) -> str:
return f"<E({self.target.id}, {self.source.id}, {self.judgement.value})>"
@classmethod
def from_line(cls, line: str) -> "Edge":
data = json.loads(line)
return cls(
data[0],
data[1],
judgement=Judgement(data[2]),
score=data[3],
user=data[4],
timestamp=data[5],
)
class Resolver(Generic[CE]):
UNDECIDED = (Judgement.NO_JUDGEMENT, Judgement.UNSURE)
def __init__(self, path: Optional[PathLike] = None) -> None:
self.path = path
self.edges: Dict[Pair, Edge] = {}
self.nodes: Dict[Identifier, Set[Edge]] = defaultdict(set)
def get_edge(self, left_id: StrIdent, right_id: StrIdent) -> Optional[Edge]:
key = Identifier.pair(left_id, right_id)
return self.edges.get(key)
def _traverse(self, node: Identifier, seen: Set[Identifier]) -> Set[Identifier]:
connected = set([node])
if node in seen:
return connected
seen.add(node)
for edge in self.nodes.get(node, []):
if edge.judgement == Judgement.POSITIVE:
other = edge.other(node)
rec = self._traverse(other, seen)
connected.update(rec)
return connected
@lru_cache(maxsize=None)
def connected(self, node: Identifier) -> Set[Identifier]:
return self._traverse(node, set())
def get_canonical(self, entity_id: StrIdent) -> str:
"""Return the canonical identifier for the given entity ID."""
node = Identifier.get(entity_id)
best = max(self.connected(node))
if best.canonical:
return best.id
return node.id
def canonicals(self) -> Generator[Identifier, None, None]:
"""Return all the canonical cluster identifiers."""
for node in self.nodes.keys():
if not node.canonical:
continue
canonical = self.get_canonical(node)
if canonical == node.id:
yield node
def get_referents(
self, canonical_id: StrIdent, canonicals: bool = True
) -> Set[str]:
"""Get all the non-canonical entity identifiers which refer to a given
canonical identifier."""
node = Identifier.get(canonical_id)
referents: Set[str] = set()
for connected in self.connected(node):
if not canonicals and connected.canonical:
continue
if connected == node:
continue
referents.add(connected.id)
return referents
def get_resolved_edge(
self, left_id: StrIdent, right_id: StrIdent
) -> Optional[Edge]:
(left, right) = Identifier.pair(left_id, right_id)
left_connected = self.connected(left)
right_connected = self.connected(right)
for e in left_connected:
for o in right_connected:
edge = self.edges.get(Identifier.pair(e, o))
if edge is None:
continue
return edge
return None
def get_judgement(self, entity_id: StrIdent, other_id: StrIdent) -> Judgement:
"""Get the existing decision between two entities with dedupe factored in."""
entity = Identifier.get(entity_id)
other = Identifier.get(other_id)
if entity == other:
return Judgement.POSITIVE
if is_qid(entity.id) and is_qid(other.id):
return Judgement.NEGATIVE
entity_connected = self.connected(entity)
if other in entity_connected:
return Judgement.POSITIVE
other_connected = self.connected(other)
for e in entity_connected:
for o in other_connected:
edge = self.edges.get(Identifier.pair(e, o))
if edge is None:
continue
if edge.judgement == Judgement.NEGATIVE:
return edge.judgement
return Judgement.NO_JUDGEMENT
def check_candidate(self, left: StrIdent, right: StrIdent) -> bool:
"""Check if the two IDs could be merged, i.e. if there's no existing
judgement."""
judgement = self.get_judgement(left, right)
return judgement == Judgement.NO_JUDGEMENT
def _get_suggested(self) -> List[Edge]:
"""Get all NO_JUDGEMENT edges in descending order of score."""
edges_all = self.edges.values()
candidates = (e for e in edges_all if e.judgement == Judgement.NO_JUDGEMENT)
cmp = lambda x: x.score or -1.0
return sorted(candidates, key=cmp, reverse=True)
def get_candidates(
self, limit: int = 100
) -> Generator[Tuple[str, str, Optional[float]], None, None]:
returned = 0
for edge in self._get_suggested():
if not self.check_candidate(edge.source, edge.target):
continue
yield edge.target.id, edge.source.id, edge.score
returned += 1
if returned >= limit:
break
def suggest(
self, left_id: StrIdent, right_id: StrIdent, score: float
) -> Identifier:
"""Make a NO_JUDGEMENT link between two identifiers to suggest that a user
should make a decision about whether they are the same or not."""
edge = self.get_edge(left_id, right_id)
if edge is not None:
if edge.judgement in self.UNDECIDED:
edge.score = score
return edge.target
return self.decide(left_id, right_id, Judgement.NO_JUDGEMENT, score=score)
def decide(
self,
left_id: StrIdent,
right_id: StrIdent,
judgement: Judgement,
user: Optional[str] = None,
score: Optional[float] = None,
) -> Identifier:
edge = self.get_edge(left_id, right_id)
if edge is None:
edge = Edge(left_id, right_id, judgement=judgement)
# Canonicalise positive matches, i.e. make both identifiers refer to a
# canonical identifier, instead of making a direct link.
if judgement == Judgement.POSITIVE:
connected = set(self.connected(edge.target))
connected.update(self.connected(edge.source))
target = max(connected)
if not target.canonical:
canonical = Identifier.make()
self._remove(edge)
self.decide(edge.source, canonical, judgement=judgement, user=user)
self.decide(edge.target, canonical, judgement=judgement, user=user)
return canonical
edge.judgement = judgement
edge.timestamp = datetime.utcnow().isoformat()[:16]
edge.user = user or getpass.getuser()
edge.score = score or edge.score
self._register(edge)
return edge.target
def _register(self, edge: Edge) -> None:
if edge.judgement != Judgement.NO_JUDGEMENT:
edge.score = None
self.edges[edge.key] = edge
self.nodes[edge.source].add(edge)
self.nodes[edge.target].add(edge)
self.connected.cache_clear()
def _remove(self, edge: Edge) -> None:
"""Remove an edge from the graph."""
self.edges.pop(edge.key, None)
for node in (edge.source, edge.target):
if node in self.nodes:
self.nodes[node].discard(edge)
def explode(self, node_id: StrIdent) -> Set[str]:
"""Dissolve all edges linked to the cluster to which the node belongs.
This is the hard way to make sure we re-do context once we realise
there's been a mistake."""
node = Identifier.get(node_id)
affected: Set[str] = set()
for part in self.connected(node):
affected.add(str(part))
edges = self.nodes.get(part)
if edges is None:
continue
for edge in list(edges):
if edge.judgement != Judgement.NO_JUDGEMENT:
self._remove(edge)
self.connected.cache_clear()
return affected
def prune(self, keep: int = 0) -> None:
"""Remove suggested (i.e. NO_JUDGEMENT) edges, keep only the n with the
highest score. This also checks if a transitive judgement has been
established in the mean time and removes those candidates."""
kept = 0
for edge in self._get_suggested():
judgement = self.get_judgement(edge.source, edge.target)
if judgement != Judgement.NO_JUDGEMENT:
self._remove(edge)
if kept >= keep:
self._remove(edge)
kept += 1
self.connected.cache_clear()
def apply(self, proxy: CE) -> CE:
"""Replace all entity references in a given proxy with their canonical
identifiers. This is essentially the harmonisation post de-dupe."""
canonical_id = self.get_canonical(proxy.id)
if canonical_id != proxy.id:
proxy.referents = self.get_referents(canonical_id)
proxy.id = canonical_id
for prop in proxy.iterprops():
if prop.type != registry.entity:
continue
for value in proxy.pop(prop):
canonical = self.get_canonical(value)
proxy.unsafe_add(prop, canonical, cleaned=True)
return proxy
def save(self) -> None:
"""Store the resolver adjacency list to a plain text JSON list."""
if self.path is None:
raise RuntimeError("Resolver has no path")
edges = sorted(self.edges.values())
with open(self.path, "w") as fh:
for edge in edges:
fh.write(edge.to_line())
@classmethod
def load(cls, path: PathLike) -> "Resolver[CE]":
resolver = cls(path=path)
if not path.exists():
return resolver
with open(path, "r") as fh:
while True:
line = fh.readline()
if not line:
break
edge = Edge.from_line(line)
resolver._register(edge)
return resolver
def __repr__(self) -> str:
path = self.path.name if self.path is not None else ":memory:"
return f"<Resolver({path!r}, {len(self.edges)})>"
|
'''
OAuth2.0 client credentials flow plugin for HTTPie.
'''
import sys
from httpie.plugins import AuthPlugin
from httpie.cli.definition import parser as httpie_args_parser
from urllib.request import Request, urlopen
from urllib.parse import urlencode
from urllib.error import HTTPError
import json
from base64 import b64encode
class OAuth2ClientCredentials:
def __init__(self, client_id, client_secret):
if not client_id:
raise ValueError('client_id is required.')
self.client_id = client_id
if not client_secret:
raise ValueError('client_secret is required.')
self.client_secret = client_secret
options = httpie_args_parser.args
if not options.token_endpoint:
raise ValueError('token_endpoint is required.')
self.token_endpoint = options.token_endpoint
self.token_request_type = options.token_request_type
self.scope = options.scope
self.print_token_response = options.print_token_response
def __call__(self, request):
token_response = self.__get_token()
token_type = token_response.get('token_type', 'Bearer')
token = token_response.get('access_token', '')
request.headers['Authorization'] = '%s %s' % (token_type, token)
return request
def __get_token(self):
req_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
post_params = {'grant_type': 'client_credentials'}
if self.scope:
post_params['scope'] = self.scope
post_data = None
if self.token_request_type == 'basic':
credentials = u'%s:%s' % (self.client_id, self.client_secret)
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
req_headers['Authorzaion'] = 'Basic %s' % token
post_data = urlencode(post_params).encode()
else:
post_params['client_id'] = self.client_id
post_params['client_secret'] = self.client_secret
if self.token_request_type == 'form':
post_data = urlencode(post_params).encode()
elif self.token_request_type == 'json':
req_headers = {'Content-Type': 'application/json'}
post_data = json.dumps(post_params).encode("utf-8")
else:
raise ValueError('token-request-type is invalid value.')
# Execute token request.
try:
res = urlopen(Request(self.token_endpoint, method='POST', headers=req_headers, data=post_data))
res_body = json.loads(res.read())
if self.print_token_response:
sys.stdout.write(f'token_response: \n========== \n{json.dumps(res_body, indent=2)}\n==========\n')
return res_body
except HTTPError as e:
if self.print_token_response:
sys.stderr.write(f'oauth2 error response:\nstatus={e.status}\n')
res_body = e.read()
try:
res_body = json.loads(res_body)
sys.stderr.write(f'token_error_response: \n========== \n{json.dumps(res_body, indent=2)}\n==========\n')
except:
sys.stderr.write(f'error_response: \n========== \n{res_body}\n==========\n')
raise e
class OAuth2ClientCredentialsPlugin(AuthPlugin):
name = 'OAuth2.0 client credentilas flow.'
auth_type = 'oauth2-client-credentials'
description = 'Set the Bearer token obtained in the OAuth2.0 client_credentials flow to the Authorization header.'
params = httpie_args_parser.add_argument_group(title='OAuth2.0 client credentilas flow options')
params.add_argument(
'--token-endpoint',
default=None,
metavar='TOKEN_ENDPOINT_URL',
help='OAuth 2.0 Token endpoint URI'
)
params.add_argument(
'--token-request-type',
default='basic',
choices=('basic','form','json'),
help='OAuth 2.0 Token request types.'
)
params.add_argument(
'--scope',
default=None,
metavar='OAUTH2_SCOPE',
help='OAuth 2.0 Scopes'
)
params.add_argument(
'--print-token-response',
dest='print_token_response',
action='store_true',
default=False,
help='print oauth2 token response.'
)
def get_auth(self, username=None, password=None):
'''Add to authorization header
Args:
username str: client_id(client_id)
password str: client_secret(client_sercret)
Returns:
requests.models.PreparedRequest:
Added authorization header at the request object.
'''
return OAuth2ClientCredentials(username, password)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.