partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
Kron2SumCov.listen
Listen to parameters change. Parameters ---------- func : callable Function to be called when a parameter changes.
glimix_core/cov/_kron2sum.py
def listen(self, func): """ Listen to parameters change. Parameters ---------- func : callable Function to be called when a parameter changes. """ self._C0.listen(func) self._C1.listen(func)
def listen(self, func): """ Listen to parameters change. Parameters ---------- func : callable Function to be called when a parameter changes. """ self._C0.listen(func) self._C1.listen(func)
[ "Listen", "to", "parameters", "change", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L161-L171
[ "def", "listen", "(", "self", ",", "func", ")", ":", "self", ".", "_C0", ".", "listen", "(", "func", ")", "self", ".", "_C1", ".", "listen", "(", "func", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov._LhD
Implements Lₕ and D. Returns ------- Lh : ndarray Uₕᵀ S₁⁻½ U₁ᵀ. D : ndarray (Sₕ ⊗ Sₓ + Iₕₓ)⁻¹.
glimix_core/cov/_kron2sum.py
def _LhD(self): """ Implements Lₕ and D. Returns ------- Lh : ndarray Uₕᵀ S₁⁻½ U₁ᵀ. D : ndarray (Sₕ ⊗ Sₓ + Iₕₓ)⁻¹. """ from numpy_sugar.linalg import ddot self._init_svd() if self._cache["LhD"] is not None: return self._cache["LhD"] S1, U1 = self.C1.eigh() U1S1 = ddot(U1, 1 / sqrt(S1)) Sh, Uh = eigh(U1S1.T @ self.C0.value() @ U1S1) self._cache["LhD"] = { "Lh": (U1S1 @ Uh).T, "D": 1 / (kron(Sh, self._Sx) + 1), "De": 1 / (kron(Sh, self._Sxe) + 1), } return self._cache["LhD"]
def _LhD(self): """ Implements Lₕ and D. Returns ------- Lh : ndarray Uₕᵀ S₁⁻½ U₁ᵀ. D : ndarray (Sₕ ⊗ Sₓ + Iₕₓ)⁻¹. """ from numpy_sugar.linalg import ddot self._init_svd() if self._cache["LhD"] is not None: return self._cache["LhD"] S1, U1 = self.C1.eigh() U1S1 = ddot(U1, 1 / sqrt(S1)) Sh, Uh = eigh(U1S1.T @ self.C0.value() @ U1S1) self._cache["LhD"] = { "Lh": (U1S1 @ Uh).T, "D": 1 / (kron(Sh, self._Sx) + 1), "De": 1 / (kron(Sh, self._Sxe) + 1), } return self._cache["LhD"]
[ "Implements", "Lₕ", "and", "D", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L188-L212
[ "def", "_LhD", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", "self", ".", "_init_svd", "(", ")", "if", "self", ".", "_cache", "[", "\"LhD\"", "]", "is", "not", "None", ":", "return", "self", ".", "_cache", "[", "\"LhD\"", "]", "S1", ",", "U1", "=", "self", ".", "C1", ".", "eigh", "(", ")", "U1S1", "=", "ddot", "(", "U1", ",", "1", "/", "sqrt", "(", "S1", ")", ")", "Sh", ",", "Uh", "=", "eigh", "(", "U1S1", ".", "T", "@", "self", ".", "C0", ".", "value", "(", ")", "@", "U1S1", ")", "self", ".", "_cache", "[", "\"LhD\"", "]", "=", "{", "\"Lh\"", ":", "(", "U1S1", "@", "Uh", ")", ".", "T", ",", "\"D\"", ":", "1", "/", "(", "kron", "(", "Sh", ",", "self", ".", "_Sx", ")", "+", "1", ")", ",", "\"De\"", ":", "1", "/", "(", "kron", "(", "Sh", ",", "self", ".", "_Sxe", ")", "+", "1", ")", ",", "}", "return", "self", ".", "_cache", "[", "\"LhD\"", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.value
Covariance matrix K = C₀ ⊗ GGᵀ + C₁ ⊗ I. Returns ------- K : ndarray C₀ ⊗ GGᵀ + C₁ ⊗ I.
glimix_core/cov/_kron2sum.py
def value(self): """ Covariance matrix K = C₀ ⊗ GGᵀ + C₁ ⊗ I. Returns ------- K : ndarray C₀ ⊗ GGᵀ + C₁ ⊗ I. """ C0 = self._C0.value() C1 = self._C1.value() return kron(C0, self._GG) + kron(C1, self._I)
def value(self): """ Covariance matrix K = C₀ ⊗ GGᵀ + C₁ ⊗ I. Returns ------- K : ndarray C₀ ⊗ GGᵀ + C₁ ⊗ I. """ C0 = self._C0.value() C1 = self._C1.value() return kron(C0, self._GG) + kron(C1, self._I)
[ "Covariance", "matrix", "K", "=", "C₀", "⊗", "GGᵀ", "+", "C₁", "⊗", "I", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L253-L264
[ "def", "value", "(", "self", ")", ":", "C0", "=", "self", ".", "_C0", ".", "value", "(", ")", "C1", "=", "self", ".", "_C1", ".", "value", "(", ")", "return", "kron", "(", "C0", ",", "self", ".", "_GG", ")", "+", "kron", "(", "C1", ",", "self", ".", "_I", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.gradient
Gradient of K. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters.
glimix_core/cov/_kron2sum.py
def gradient(self): """ Gradient of K. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters. """ self._init_svd() C0 = self._C0.gradient()["Lu"].T C1 = self._C1.gradient()["Lu"].T grad = {"C0.Lu": kron(C0, self._X).T, "C1.Lu": kron(C1, self._I).T} return grad
def gradient(self): """ Gradient of K. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters. """ self._init_svd() C0 = self._C0.gradient()["Lu"].T C1 = self._C1.gradient()["Lu"].T grad = {"C0.Lu": kron(C0, self._X).T, "C1.Lu": kron(C1, self._I).T} return grad
[ "Gradient", "of", "K", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L266-L281
[ "def", "gradient", "(", "self", ")", ":", "self", ".", "_init_svd", "(", ")", "C0", "=", "self", ".", "_C0", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", ".", "T", "C1", "=", "self", ".", "_C1", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", ".", "T", "grad", "=", "{", "\"C0.Lu\"", ":", "kron", "(", "C0", ",", "self", ".", "_X", ")", ".", "T", ",", "\"C1.Lu\"", ":", "kron", "(", "C1", ",", "self", ".", "_I", ")", ".", "T", "}", "return", "grad" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.gradient_dot
Implements ∂K⋅v. Parameters ---------- v : array_like Vector from ∂K⋅v. Returns ------- C0.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₀ parameters. C1.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₁ parameters.
glimix_core/cov/_kron2sum.py
def gradient_dot(self, v): """ Implements ∂K⋅v. Parameters ---------- v : array_like Vector from ∂K⋅v. Returns ------- C0.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₀ parameters. C1.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₁ parameters. """ self._init_svd() V = unvec(v, (self.G.shape[0], -1) + v.shape[1:]) r = {} C = self._C0.gradient()["Lu"] r["C0.Lu"] = tensordot(V.T @ self.G @ self.G.T, C, axes=([-2], [0])) r["C0.Lu"] = r["C0.Lu"].reshape(V.shape[2:] + (-1,) + (C.shape[-1],), order="F") C = self._C1.gradient()["Lu"] r["C1.Lu"] = tensordot(V.T, C, axes=([-2], [0])) r["C1.Lu"] = r["C1.Lu"].reshape(V.shape[2:] + (-1,) + (C.shape[-1],), order="F") return r
def gradient_dot(self, v): """ Implements ∂K⋅v. Parameters ---------- v : array_like Vector from ∂K⋅v. Returns ------- C0.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₀ parameters. C1.Lu : ndarray ∂K⋅v, where the gradient is taken over the C₁ parameters. """ self._init_svd() V = unvec(v, (self.G.shape[0], -1) + v.shape[1:]) r = {} C = self._C0.gradient()["Lu"] r["C0.Lu"] = tensordot(V.T @ self.G @ self.G.T, C, axes=([-2], [0])) r["C0.Lu"] = r["C0.Lu"].reshape(V.shape[2:] + (-1,) + (C.shape[-1],), order="F") C = self._C1.gradient()["Lu"] r["C1.Lu"] = tensordot(V.T, C, axes=([-2], [0])) r["C1.Lu"] = r["C1.Lu"].reshape(V.shape[2:] + (-1,) + (C.shape[-1],), order="F") return r
[ "Implements", "∂K⋅v", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L283-L311
[ "def", "gradient_dot", "(", "self", ",", "v", ")", ":", "self", ".", "_init_svd", "(", ")", "V", "=", "unvec", "(", "v", ",", "(", "self", ".", "G", ".", "shape", "[", "0", "]", ",", "-", "1", ")", "+", "v", ".", "shape", "[", "1", ":", "]", ")", "r", "=", "{", "}", "C", "=", "self", ".", "_C0", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "r", "[", "\"C0.Lu\"", "]", "=", "tensordot", "(", "V", ".", "T", "@", "self", ".", "G", "@", "self", ".", "G", ".", "T", ",", "C", ",", "axes", "=", "(", "[", "-", "2", "]", ",", "[", "0", "]", ")", ")", "r", "[", "\"C0.Lu\"", "]", "=", "r", "[", "\"C0.Lu\"", "]", ".", "reshape", "(", "V", ".", "shape", "[", "2", ":", "]", "+", "(", "-", "1", ",", ")", "+", "(", "C", ".", "shape", "[", "-", "1", "]", ",", ")", ",", "order", "=", "\"F\"", ")", "C", "=", "self", ".", "_C1", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "r", "[", "\"C1.Lu\"", "]", "=", "tensordot", "(", "V", ".", "T", ",", "C", ",", "axes", "=", "(", "[", "-", "2", "]", ",", "[", "0", "]", ")", ")", "r", "[", "\"C1.Lu\"", "]", "=", "r", "[", "\"C1.Lu\"", "]", ".", "reshape", "(", "V", ".", "shape", "[", "2", ":", "]", "+", "(", "-", "1", ",", ")", "+", "(", "C", ".", "shape", "[", "-", "1", "]", ",", ")", ",", "order", "=", "\"F\"", ")", "return", "r" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.solve
Implements the product K⁻¹⋅v. Parameters ---------- v : array_like Array to be multiplied. Returns ------- x : ndarray Solution x to the equation K⋅x = y.
glimix_core/cov/_kron2sum.py
def solve(self, v): """ Implements the product K⁻¹⋅v. Parameters ---------- v : array_like Array to be multiplied. Returns ------- x : ndarray Solution x to the equation K⋅x = y. """ from numpy_sugar.linalg import ddot self._init_svd() L = kron(self.Lh, self.Lx) return L.T @ ddot(self.D, L @ v, left=True)
def solve(self, v): """ Implements the product K⁻¹⋅v. Parameters ---------- v : array_like Array to be multiplied. Returns ------- x : ndarray Solution x to the equation K⋅x = y. """ from numpy_sugar.linalg import ddot self._init_svd() L = kron(self.Lh, self.Lx) return L.T @ ddot(self.D, L @ v, left=True)
[ "Implements", "the", "product", "K⁻¹⋅v", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L313-L331
[ "def", "solve", "(", "self", ",", "v", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", "self", ".", "_init_svd", "(", ")", "L", "=", "kron", "(", "self", ".", "Lh", ",", "self", ".", "Lx", ")", "return", "L", ".", "T", "@", "ddot", "(", "self", ".", "D", ",", "L", "@", "v", ",", "left", "=", "True", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.logdet
Implements log|K| = - log|D| + n⋅log|C₁|. Returns ------- logdet : float Log-determinant of K.
glimix_core/cov/_kron2sum.py
def logdet(self): """ Implements log|K| = - log|D| + n⋅log|C₁|. Returns ------- logdet : float Log-determinant of K. """ self._init_svd() return -log(self._De).sum() + self.G.shape[0] * self.C1.logdet()
def logdet(self): """ Implements log|K| = - log|D| + n⋅log|C₁|. Returns ------- logdet : float Log-determinant of K. """ self._init_svd() return -log(self._De).sum() + self.G.shape[0] * self.C1.logdet()
[ "Implements", "log|K|", "=", "-", "log|D|", "+", "n⋅log|C₁|", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L333-L343
[ "def", "logdet", "(", "self", ")", ":", "self", ".", "_init_svd", "(", ")", "return", "-", "log", "(", "self", ".", "_De", ")", ".", "sum", "(", ")", "+", "self", ".", "G", ".", "shape", "[", "0", "]", "*", "self", ".", "C1", ".", "logdet", "(", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.logdet_gradient
Implements ∂log|K| = Tr[K⁻¹∂K]. It can be shown that:: ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₀Lₕᵀ)⊗diag(LₓGGᵀLₓᵀ)), when the derivative is over the parameters of C₀. Similarly, ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₁Lₕᵀ)⊗diag(I)), over the parameters of C₁. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters.
glimix_core/cov/_kron2sum.py
def logdet_gradient(self): """ Implements ∂log|K| = Tr[K⁻¹∂K]. It can be shown that:: ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₀Lₕᵀ)⊗diag(LₓGGᵀLₓᵀ)), when the derivative is over the parameters of C₀. Similarly, ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₁Lₕᵀ)⊗diag(I)), over the parameters of C₁. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters. """ from numpy_sugar.linalg import dotd self._init_svd() dC0 = self._C0.gradient()["Lu"] grad_C0 = zeros_like(self._C0.Lu) for i in range(self._C0.Lu.shape[0]): t = kron(dotd(self.Lh, dC0[..., i] @ self.Lh.T), self._diag_LxGGLxe) grad_C0[i] = (self._De * t).sum() dC1 = self._C1.gradient()["Lu"] grad_C1 = zeros_like(self._C1.Lu) p = self._Sxe.shape[0] np = self._G.shape[0] - p for i in range(self._C1.Lu.shape[0]): t = (dotd(self.Lh, dC1[..., i] @ self.Lh.T) * np).sum() t1 = kron(dotd(self.Lh, dC1[..., i] @ self.Lh.T), eye(p)) t += (self._De * t1).sum() grad_C1[i] = t return {"C0.Lu": grad_C0, "C1.Lu": grad_C1}
def logdet_gradient(self): """ Implements ∂log|K| = Tr[K⁻¹∂K]. It can be shown that:: ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₀Lₕᵀ)⊗diag(LₓGGᵀLₓᵀ)), when the derivative is over the parameters of C₀. Similarly, ∂log|K| = diag(D)ᵀdiag(L(∂K)Lᵀ) = diag(D)ᵀ(diag(Lₕ∂C₁Lₕᵀ)⊗diag(I)), over the parameters of C₁. Returns ------- C0 : ndarray Derivative of C₀ over its parameters. C1 : ndarray Derivative of C₁ over its parameters. """ from numpy_sugar.linalg import dotd self._init_svd() dC0 = self._C0.gradient()["Lu"] grad_C0 = zeros_like(self._C0.Lu) for i in range(self._C0.Lu.shape[0]): t = kron(dotd(self.Lh, dC0[..., i] @ self.Lh.T), self._diag_LxGGLxe) grad_C0[i] = (self._De * t).sum() dC1 = self._C1.gradient()["Lu"] grad_C1 = zeros_like(self._C1.Lu) p = self._Sxe.shape[0] np = self._G.shape[0] - p for i in range(self._C1.Lu.shape[0]): t = (dotd(self.Lh, dC1[..., i] @ self.Lh.T) * np).sum() t1 = kron(dotd(self.Lh, dC1[..., i] @ self.Lh.T), eye(p)) t += (self._De * t1).sum() grad_C1[i] = t return {"C0.Lu": grad_C0, "C1.Lu": grad_C1}
[ "Implements", "∂log|K|", "=", "Tr", "[", "K⁻¹∂K", "]", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L345-L386
[ "def", "logdet_gradient", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "dotd", "self", ".", "_init_svd", "(", ")", "dC0", "=", "self", ".", "_C0", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "grad_C0", "=", "zeros_like", "(", "self", ".", "_C0", ".", "Lu", ")", "for", "i", "in", "range", "(", "self", ".", "_C0", ".", "Lu", ".", "shape", "[", "0", "]", ")", ":", "t", "=", "kron", "(", "dotd", "(", "self", ".", "Lh", ",", "dC0", "[", "...", ",", "i", "]", "@", "self", ".", "Lh", ".", "T", ")", ",", "self", ".", "_diag_LxGGLxe", ")", "grad_C0", "[", "i", "]", "=", "(", "self", ".", "_De", "*", "t", ")", ".", "sum", "(", ")", "dC1", "=", "self", ".", "_C1", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "grad_C1", "=", "zeros_like", "(", "self", ".", "_C1", ".", "Lu", ")", "p", "=", "self", ".", "_Sxe", ".", "shape", "[", "0", "]", "np", "=", "self", ".", "_G", ".", "shape", "[", "0", "]", "-", "p", "for", "i", "in", "range", "(", "self", ".", "_C1", ".", "Lu", ".", "shape", "[", "0", "]", ")", ":", "t", "=", "(", "dotd", "(", "self", ".", "Lh", ",", "dC1", "[", "...", ",", "i", "]", "@", "self", ".", "Lh", ".", "T", ")", "*", "np", ")", ".", "sum", "(", ")", "t1", "=", "kron", "(", "dotd", "(", "self", ".", "Lh", ",", "dC1", "[", "...", ",", "i", "]", "@", "self", ".", "Lh", ".", "T", ")", ",", "eye", "(", "p", ")", ")", "t", "+=", "(", "self", ".", "_De", "*", "t1", ")", ".", "sum", "(", ")", "grad_C1", "[", "i", "]", "=", "t", "return", "{", "\"C0.Lu\"", ":", "grad_C0", ",", "\"C1.Lu\"", ":", "grad_C1", "}" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2SumCov.LdKL_dot
Implements L(∂K)Lᵀv. The array v can have one or two dimensions and the first dimension has to have size n⋅p. Let vec(V) = v. We have L(∂K)Lᵀ⋅v = ((Lₕ∂C₀Lₕᵀ) ⊗ (LₓGGᵀLₓᵀ))vec(V) = vec(LₓGGᵀLₓᵀVLₕ∂C₀Lₕᵀ), when the derivative is over the parameters of C₀. Similarly, L(∂K)Lᵀv = ((Lₕ∂C₁Lₕᵀ) ⊗ (LₓLₓᵀ))vec(V) = vec(LₓLₓᵀVLₕ∂C₁Lₕᵀ), over the parameters of C₁.
glimix_core/cov/_kron2sum.py
def LdKL_dot(self, v, v1=None): """ Implements L(∂K)Lᵀv. The array v can have one or two dimensions and the first dimension has to have size n⋅p. Let vec(V) = v. We have L(∂K)Lᵀ⋅v = ((Lₕ∂C₀Lₕᵀ) ⊗ (LₓGGᵀLₓᵀ))vec(V) = vec(LₓGGᵀLₓᵀVLₕ∂C₀Lₕᵀ), when the derivative is over the parameters of C₀. Similarly, L(∂K)Lᵀv = ((Lₕ∂C₁Lₕᵀ) ⊗ (LₓLₓᵀ))vec(V) = vec(LₓLₓᵀVLₕ∂C₁Lₕᵀ), over the parameters of C₁. """ self._init_svd() def dot(a, b): r = tensordot(a, b, axes=([1], [0])) if a.ndim > b.ndim: return r.transpose([0, 2, 1]) return r Lh = self.Lh V = unvec(v, (self.Lx.shape[0], -1) + v.shape[1:]) LdKL_dot = { "C0.Lu": empty((v.shape[0],) + v.shape[1:] + (self._C0.Lu.shape[0],)), "C1.Lu": empty((v.shape[0],) + v.shape[1:] + (self._C1.Lu.shape[0],)), } dC0 = self._C0.gradient()["Lu"] for i in range(self._C0.Lu.shape[0]): t = dot(self._LxG, dot(self._LxG.T, dot(V, Lh @ dC0[..., i] @ Lh.T))) LdKL_dot["C0.Lu"][..., i] = t.reshape((-1,) + t.shape[2:], order="F") dC1 = self._C1.gradient()["Lu"] for i in range(self._C1.Lu.shape[0]): t = dot(V, Lh @ dC1[..., i] @ Lh.T) LdKL_dot["C1.Lu"][..., i] = t.reshape((-1,) + t.shape[2:], order="F") return LdKL_dot
def LdKL_dot(self, v, v1=None): """ Implements L(∂K)Lᵀv. The array v can have one or two dimensions and the first dimension has to have size n⋅p. Let vec(V) = v. We have L(∂K)Lᵀ⋅v = ((Lₕ∂C₀Lₕᵀ) ⊗ (LₓGGᵀLₓᵀ))vec(V) = vec(LₓGGᵀLₓᵀVLₕ∂C₀Lₕᵀ), when the derivative is over the parameters of C₀. Similarly, L(∂K)Lᵀv = ((Lₕ∂C₁Lₕᵀ) ⊗ (LₓLₓᵀ))vec(V) = vec(LₓLₓᵀVLₕ∂C₁Lₕᵀ), over the parameters of C₁. """ self._init_svd() def dot(a, b): r = tensordot(a, b, axes=([1], [0])) if a.ndim > b.ndim: return r.transpose([0, 2, 1]) return r Lh = self.Lh V = unvec(v, (self.Lx.shape[0], -1) + v.shape[1:]) LdKL_dot = { "C0.Lu": empty((v.shape[0],) + v.shape[1:] + (self._C0.Lu.shape[0],)), "C1.Lu": empty((v.shape[0],) + v.shape[1:] + (self._C1.Lu.shape[0],)), } dC0 = self._C0.gradient()["Lu"] for i in range(self._C0.Lu.shape[0]): t = dot(self._LxG, dot(self._LxG.T, dot(V, Lh @ dC0[..., i] @ Lh.T))) LdKL_dot["C0.Lu"][..., i] = t.reshape((-1,) + t.shape[2:], order="F") dC1 = self._C1.gradient()["Lu"] for i in range(self._C1.Lu.shape[0]): t = dot(V, Lh @ dC1[..., i] @ Lh.T) LdKL_dot["C1.Lu"][..., i] = t.reshape((-1,) + t.shape[2:], order="F") return LdKL_dot
[ "Implements", "L", "(", "∂K", ")", "Lᵀv", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_kron2sum.py#L388-L430
[ "def", "LdKL_dot", "(", "self", ",", "v", ",", "v1", "=", "None", ")", ":", "self", ".", "_init_svd", "(", ")", "def", "dot", "(", "a", ",", "b", ")", ":", "r", "=", "tensordot", "(", "a", ",", "b", ",", "axes", "=", "(", "[", "1", "]", ",", "[", "0", "]", ")", ")", "if", "a", ".", "ndim", ">", "b", ".", "ndim", ":", "return", "r", ".", "transpose", "(", "[", "0", ",", "2", ",", "1", "]", ")", "return", "r", "Lh", "=", "self", ".", "Lh", "V", "=", "unvec", "(", "v", ",", "(", "self", ".", "Lx", ".", "shape", "[", "0", "]", ",", "-", "1", ")", "+", "v", ".", "shape", "[", "1", ":", "]", ")", "LdKL_dot", "=", "{", "\"C0.Lu\"", ":", "empty", "(", "(", "v", ".", "shape", "[", "0", "]", ",", ")", "+", "v", ".", "shape", "[", "1", ":", "]", "+", "(", "self", ".", "_C0", ".", "Lu", ".", "shape", "[", "0", "]", ",", ")", ")", ",", "\"C1.Lu\"", ":", "empty", "(", "(", "v", ".", "shape", "[", "0", "]", ",", ")", "+", "v", ".", "shape", "[", "1", ":", "]", "+", "(", "self", ".", "_C1", ".", "Lu", ".", "shape", "[", "0", "]", ",", ")", ")", ",", "}", "dC0", "=", "self", ".", "_C0", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "for", "i", "in", "range", "(", "self", ".", "_C0", ".", "Lu", ".", "shape", "[", "0", "]", ")", ":", "t", "=", "dot", "(", "self", ".", "_LxG", ",", "dot", "(", "self", ".", "_LxG", ".", "T", ",", "dot", "(", "V", ",", "Lh", "@", "dC0", "[", "...", ",", "i", "]", "@", "Lh", ".", "T", ")", ")", ")", "LdKL_dot", "[", "\"C0.Lu\"", "]", "[", "...", ",", "i", "]", "=", "t", ".", "reshape", "(", "(", "-", "1", ",", ")", "+", "t", ".", "shape", "[", "2", ":", "]", ",", "order", "=", "\"F\"", ")", "dC1", "=", "self", ".", "_C1", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "for", "i", "in", "range", "(", "self", ".", "_C1", ".", "Lu", ".", "shape", "[", "0", "]", ")", ":", "t", "=", "dot", "(", "V", ",", "Lh", "@", "dC1", "[", "...", ",", "i", "]", "@", "Lh", ".", "T", ")", "LdKL_dot", "[", "\"C1.Lu\"", "]", "[", "...", ",", "i", "]", "=", "t", ".", "reshape", "(", "(", "-", "1", ",", ")", "+", "t", ".", "shape", "[", "2", ":", "]", ",", "order", "=", "\"F\"", ")", "return", "LdKL_dot" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
rsolve
Robust solve Ax=y.
glimix_core/_util/solve.py
def rsolve(A, y): """ Robust solve Ax=y. """ from numpy_sugar.linalg import rsolve as _rsolve try: beta = _rsolve(A, y) except LinAlgError: msg = "Could not converge to solve Ax=y." msg += " Setting x to zero." warnings.warn(msg, RuntimeWarning) beta = zeros(A.shape[0]) return beta
def rsolve(A, y): """ Robust solve Ax=y. """ from numpy_sugar.linalg import rsolve as _rsolve try: beta = _rsolve(A, y) except LinAlgError: msg = "Could not converge to solve Ax=y." msg += " Setting x to zero." warnings.warn(msg, RuntimeWarning) beta = zeros(A.shape[0]) return beta
[ "Robust", "solve", "Ax", "=", "y", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_util/solve.py#L37-L51
[ "def", "rsolve", "(", "A", ",", "y", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "rsolve", "as", "_rsolve", "try", ":", "beta", "=", "_rsolve", "(", "A", ",", "y", ")", "except", "LinAlgError", ":", "msg", "=", "\"Could not converge to solve Ax=y.\"", "msg", "+=", "\" Setting x to zero.\"", "warnings", ".", "warn", "(", "msg", ",", "RuntimeWarning", ")", "beta", "=", "zeros", "(", "A", ".", "shape", "[", "0", "]", ")", "return", "beta" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
multivariate_normal
Draw random samples from a multivariate normal distribution. Parameters ---------- random : np.random.RandomState instance Random state. mean : array_like Mean of the n-dimensional distribution. cov : array_like Covariance matrix of the distribution. It must be symmetric and positive-definite for proper sampling. Returns ------- out : ndarray The drawn sample.
glimix_core/_util/random.py
def multivariate_normal(random, mean, cov): """ Draw random samples from a multivariate normal distribution. Parameters ---------- random : np.random.RandomState instance Random state. mean : array_like Mean of the n-dimensional distribution. cov : array_like Covariance matrix of the distribution. It must be symmetric and positive-definite for proper sampling. Returns ------- out : ndarray The drawn sample. """ from numpy.linalg import cholesky L = cholesky(cov) return L @ random.randn(L.shape[0]) + mean
def multivariate_normal(random, mean, cov): """ Draw random samples from a multivariate normal distribution. Parameters ---------- random : np.random.RandomState instance Random state. mean : array_like Mean of the n-dimensional distribution. cov : array_like Covariance matrix of the distribution. It must be symmetric and positive-definite for proper sampling. Returns ------- out : ndarray The drawn sample. """ from numpy.linalg import cholesky L = cholesky(cov) return L @ random.randn(L.shape[0]) + mean
[ "Draw", "random", "samples", "from", "a", "multivariate", "normal", "distribution", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_util/random.py#L1-L23
[ "def", "multivariate_normal", "(", "random", ",", "mean", ",", "cov", ")", ":", "from", "numpy", ".", "linalg", "import", "cholesky", "L", "=", "cholesky", "(", "cov", ")", "return", "L", "@", "random", ".", "randn", "(", "L", ".", "shape", "[", "0", "]", ")", "+", "mean" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
SumCov.gradient
Sum of covariance function derivatives. Returns ------- dict ∂K₀ + ∂K₁ + ⋯
glimix_core/cov/_sum.py
def gradient(self): """ Sum of covariance function derivatives. Returns ------- dict ∂K₀ + ∂K₁ + ⋯ """ grad = {} for i, f in enumerate(self._covariances): for varname, g in f.gradient().items(): grad[f"{self._name}[{i}].{varname}"] = g return grad
def gradient(self): """ Sum of covariance function derivatives. Returns ------- dict ∂K₀ + ∂K₁ + ⋯ """ grad = {} for i, f in enumerate(self._covariances): for varname, g in f.gradient().items(): grad[f"{self._name}[{i}].{varname}"] = g return grad
[ "Sum", "of", "covariance", "function", "derivatives", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_sum.py#L57-L70
[ "def", "gradient", "(", "self", ")", ":", "grad", "=", "{", "}", "for", "i", ",", "f", "in", "enumerate", "(", "self", ".", "_covariances", ")", ":", "for", "varname", ",", "g", "in", "f", ".", "gradient", "(", ")", ".", "items", "(", ")", ":", "grad", "[", "f\"{self._name}[{i}].{varname}\"", "]", "=", "g", "return", "grad" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LinearCov.value
Covariance matrix. Returns ------- K : ndarray s⋅XXᵀ.
glimix_core/cov/_linear.py
def value(self): """ Covariance matrix. Returns ------- K : ndarray s⋅XXᵀ. """ X = self.X return self.scale * (X @ X.T)
def value(self): """ Covariance matrix. Returns ------- K : ndarray s⋅XXᵀ. """ X = self.X return self.scale * (X @ X.T)
[ "Covariance", "matrix", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_linear.py#L79-L89
[ "def", "value", "(", "self", ")", ":", "X", "=", "self", ".", "X", "return", "self", ".", "scale", "*", "(", "X", "@", "X", ".", "T", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
KronMean.B
Effect-sizes parameter, B.
glimix_core/mean/_kron.py
def B(self): """ Effect-sizes parameter, B. """ return unvec(self._vecB.value, (self.X.shape[1], self.A.shape[0]))
def B(self): """ Effect-sizes parameter, B. """ return unvec(self._vecB.value, (self.X.shape[1], self.A.shape[0]))
[ "Effect", "-", "sizes", "parameter", "B", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/mean/_kron.py#L94-L98
[ "def", "B", "(", "self", ")", ":", "return", "unvec", "(", "self", ".", "_vecB", ".", "value", ",", "(", "self", ".", "X", ".", "shape", "[", "1", "]", ",", "self", ".", "A", ".", "shape", "[", "0", "]", ")", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
bernoulli_sample
r"""Bernoulli likelihood sampling. Sample according to .. math:: \mathbf y \sim \prod_{i=1}^n \text{Bernoulli}(\mu_i = \text{logit}(z_i)) \mathcal N(~ o \mathbf 1 + \mathbf a^\intercal \boldsymbol\alpha; ~ (h^2 - v_c)\mathrm G^\intercal\mathrm G + (1-h^2-v_c)\mathrm I ~) using the canonical Logit link function to define the conditional Bernoulli mean :math:`\mu_i`. The causal :math:`\mathbf a` covariates and the corresponding effect-sizes are randomly draw according to the following idea. The ``causal_variants``, if given, are first mean-zero and std-one normalized and then having its elements divided by the squared-root the the number of variances:: causal_variants = _stdnorm(causal_variants, axis=0) causal_variants /= sqrt(causal_variants.shape[1]) The causal effect-sizes :math:`\boldsymbol\alpha` are draw from :math:`\{-1, +1\}` and subsequently normalized for mean-zero and std-one"" Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import bernoulli_sample >>> from numpy.random import RandomState >>> offset = 5 >>> G = [[1, -1], [2, 1]] >>> bernoulli_sample(offset, G, random_state=RandomState(0)) array([1., 1.])
glimix_core/random/_canonical.py
def bernoulli_sample( offset, G, heritability=0.5, causal_variants=None, causal_variance=0, random_state=None, ): r"""Bernoulli likelihood sampling. Sample according to .. math:: \mathbf y \sim \prod_{i=1}^n \text{Bernoulli}(\mu_i = \text{logit}(z_i)) \mathcal N(~ o \mathbf 1 + \mathbf a^\intercal \boldsymbol\alpha; ~ (h^2 - v_c)\mathrm G^\intercal\mathrm G + (1-h^2-v_c)\mathrm I ~) using the canonical Logit link function to define the conditional Bernoulli mean :math:`\mu_i`. The causal :math:`\mathbf a` covariates and the corresponding effect-sizes are randomly draw according to the following idea. The ``causal_variants``, if given, are first mean-zero and std-one normalized and then having its elements divided by the squared-root the the number of variances:: causal_variants = _stdnorm(causal_variants, axis=0) causal_variants /= sqrt(causal_variants.shape[1]) The causal effect-sizes :math:`\boldsymbol\alpha` are draw from :math:`\{-1, +1\}` and subsequently normalized for mean-zero and std-one"" Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import bernoulli_sample >>> from numpy.random import RandomState >>> offset = 5 >>> G = [[1, -1], [2, 1]] >>> bernoulli_sample(offset, G, random_state=RandomState(0)) array([1., 1.]) """ link = LogitLink() mean, cov = _mean_cov( offset, G, heritability, causal_variants, causal_variance, random_state ) lik = BernoulliProdLik(link) sampler = GGPSampler(lik, mean, cov) return sampler.sample(random_state)
def bernoulli_sample( offset, G, heritability=0.5, causal_variants=None, causal_variance=0, random_state=None, ): r"""Bernoulli likelihood sampling. Sample according to .. math:: \mathbf y \sim \prod_{i=1}^n \text{Bernoulli}(\mu_i = \text{logit}(z_i)) \mathcal N(~ o \mathbf 1 + \mathbf a^\intercal \boldsymbol\alpha; ~ (h^2 - v_c)\mathrm G^\intercal\mathrm G + (1-h^2-v_c)\mathrm I ~) using the canonical Logit link function to define the conditional Bernoulli mean :math:`\mu_i`. The causal :math:`\mathbf a` covariates and the corresponding effect-sizes are randomly draw according to the following idea. The ``causal_variants``, if given, are first mean-zero and std-one normalized and then having its elements divided by the squared-root the the number of variances:: causal_variants = _stdnorm(causal_variants, axis=0) causal_variants /= sqrt(causal_variants.shape[1]) The causal effect-sizes :math:`\boldsymbol\alpha` are draw from :math:`\{-1, +1\}` and subsequently normalized for mean-zero and std-one"" Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import bernoulli_sample >>> from numpy.random import RandomState >>> offset = 5 >>> G = [[1, -1], [2, 1]] >>> bernoulli_sample(offset, G, random_state=RandomState(0)) array([1., 1.]) """ link = LogitLink() mean, cov = _mean_cov( offset, G, heritability, causal_variants, causal_variance, random_state ) lik = BernoulliProdLik(link) sampler = GGPSampler(lik, mean, cov) return sampler.sample(random_state)
[ "r", "Bernoulli", "likelihood", "sampling", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/random/_canonical.py#L10-L68
[ "def", "bernoulli_sample", "(", "offset", ",", "G", ",", "heritability", "=", "0.5", ",", "causal_variants", "=", "None", ",", "causal_variance", "=", "0", ",", "random_state", "=", "None", ",", ")", ":", "link", "=", "LogitLink", "(", ")", "mean", ",", "cov", "=", "_mean_cov", "(", "offset", ",", "G", ",", "heritability", ",", "causal_variants", ",", "causal_variance", ",", "random_state", ")", "lik", "=", "BernoulliProdLik", "(", "link", ")", "sampler", "=", "GGPSampler", "(", "lik", ",", "mean", ",", "cov", ")", "return", "sampler", ".", "sample", "(", "random_state", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
poisson_sample
Poisson likelihood sampling. Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import poisson_sample >>> from numpy.random import RandomState >>> offset = -0.5 >>> G = [[0.5, -1], [2, 1]] >>> poisson_sample(offset, G, random_state=RandomState(0)) array([0, 6])
glimix_core/random/_canonical.py
def poisson_sample( offset, G, heritability=0.5, causal_variants=None, causal_variance=0, random_state=None, ): """Poisson likelihood sampling. Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import poisson_sample >>> from numpy.random import RandomState >>> offset = -0.5 >>> G = [[0.5, -1], [2, 1]] >>> poisson_sample(offset, G, random_state=RandomState(0)) array([0, 6]) """ mean, cov = _mean_cov( offset, G, heritability, causal_variants, causal_variance, random_state ) link = LogLink() lik = PoissonProdLik(link) sampler = GGPSampler(lik, mean, cov) return sampler.sample(random_state)
def poisson_sample( offset, G, heritability=0.5, causal_variants=None, causal_variance=0, random_state=None, ): """Poisson likelihood sampling. Parameters ---------- random_state : random_state Set the initial random state. Example ------- .. doctest:: >>> from glimix_core.random import poisson_sample >>> from numpy.random import RandomState >>> offset = -0.5 >>> G = [[0.5, -1], [2, 1]] >>> poisson_sample(offset, G, random_state=RandomState(0)) array([0, 6]) """ mean, cov = _mean_cov( offset, G, heritability, causal_variants, causal_variance, random_state ) link = LogLink() lik = PoissonProdLik(link) sampler = GGPSampler(lik, mean, cov) return sampler.sample(random_state)
[ "Poisson", "likelihood", "sampling", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/random/_canonical.py#L110-L144
[ "def", "poisson_sample", "(", "offset", ",", "G", ",", "heritability", "=", "0.5", ",", "causal_variants", "=", "None", ",", "causal_variance", "=", "0", ",", "random_state", "=", "None", ",", ")", ":", "mean", ",", "cov", "=", "_mean_cov", "(", "offset", ",", "G", ",", "heritability", ",", "causal_variants", ",", "causal_variance", ",", "random_state", ")", "link", "=", "LogLink", "(", ")", "lik", "=", "PoissonProdLik", "(", "link", ")", "sampler", "=", "GGPSampler", "(", "lik", ",", "mean", ",", "cov", ")", "return", "sampler", ".", "sample", "(", "random_state", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
PosteriorLinearKernel.L
r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1}
glimix_core/_ep/posterior_linear_kernel.py
def L(self): r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1} """ from numpy_sugar.linalg import ddot, sum2diag if self._L_cache is not None: return self._L_cache s = self._cov["scale"] d = self._cov["delta"] Q = self._cov["QS"][0][0] S = self._cov["QS"][1] ddot(self.A * self._site.tau, Q, left=True, out=self._NxR) B = dot(Q.T, self._NxR, out=self._RxR) B *= 1 - d sum2diag(B, 1.0 / S / s, out=B) self._L_cache = _cho_factor(B) return self._L_cache
def L(self): r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1} """ from numpy_sugar.linalg import ddot, sum2diag if self._L_cache is not None: return self._L_cache s = self._cov["scale"] d = self._cov["delta"] Q = self._cov["QS"][0][0] S = self._cov["QS"][1] ddot(self.A * self._site.tau, Q, left=True, out=self._NxR) B = dot(Q.T, self._NxR, out=self._RxR) B *= 1 - d sum2diag(B, 1.0 / S / s, out=B) self._L_cache = _cho_factor(B) return self._L_cache
[ "r", "Cholesky", "decomposition", "of", ":", "math", ":", "\\", "mathrm", "B", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_ep/posterior_linear_kernel.py#L65-L88
[ "def", "L", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", ",", "sum2diag", "if", "self", ".", "_L_cache", "is", "not", "None", ":", "return", "self", ".", "_L_cache", "s", "=", "self", ".", "_cov", "[", "\"scale\"", "]", "d", "=", "self", ".", "_cov", "[", "\"delta\"", "]", "Q", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "0", "]", "[", "0", "]", "S", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "1", "]", "ddot", "(", "self", ".", "A", "*", "self", ".", "_site", ".", "tau", ",", "Q", ",", "left", "=", "True", ",", "out", "=", "self", ".", "_NxR", ")", "B", "=", "dot", "(", "Q", ".", "T", ",", "self", ".", "_NxR", ",", "out", "=", "self", ".", "_RxR", ")", "B", "*=", "1", "-", "d", "sum2diag", "(", "B", ",", "1.0", "/", "S", "/", "s", ",", "out", "=", "B", ")", "self", ".", "_L_cache", "=", "_cho_factor", "(", "B", ")", "return", "self", ".", "_L_cache" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
ExpFamGP.fit
r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``.
glimix_core/ggp/_expfam.py
def fit(self, verbose=True, factr=1e5, pgtol=1e-7): r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``. """ self._maximize(verbose=verbose, factr=factr, pgtol=pgtol)
def fit(self, verbose=True, factr=1e5, pgtol=1e-7): r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``. """ self._maximize(verbose=verbose, factr=factr, pgtol=pgtol)
[ "r", "Maximise", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/ggp/_expfam.py#L83-L104
[ "def", "fit", "(", "self", ",", "verbose", "=", "True", ",", "factr", "=", "1e5", ",", "pgtol", "=", "1e-7", ")", ":", "self", ".", "_maximize", "(", "verbose", "=", "verbose", ",", "factr", "=", "factr", ",", "pgtol", "=", "pgtol", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMM.covariance
r"""Covariance of the prior. Returns ------- :class:`numpy.ndarray` :math:`v_0 \mathrm K + v_1 \mathrm I`.
glimix_core/glmm/_glmm.py
def covariance(self): r"""Covariance of the prior. Returns ------- :class:`numpy.ndarray` :math:`v_0 \mathrm K + v_1 \mathrm I`. """ from numpy_sugar.linalg import ddot, sum2diag Q0 = self._QS[0][0] S0 = self._QS[1] return sum2diag(dot(ddot(Q0, self.v0 * S0), Q0.T), self.v1)
def covariance(self): r"""Covariance of the prior. Returns ------- :class:`numpy.ndarray` :math:`v_0 \mathrm K + v_1 \mathrm I`. """ from numpy_sugar.linalg import ddot, sum2diag Q0 = self._QS[0][0] S0 = self._QS[1] return sum2diag(dot(ddot(Q0, self.v0 * S0), Q0.T), self.v1)
[ "r", "Covariance", "of", "the", "prior", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_glmm.py#L127-L139
[ "def", "covariance", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", ",", "sum2diag", "Q0", "=", "self", ".", "_QS", "[", "0", "]", "[", "0", "]", "S0", "=", "self", ".", "_QS", "[", "1", "]", "return", "sum2diag", "(", "dot", "(", "ddot", "(", "Q0", ",", "self", ".", "v0", "*", "S0", ")", ",", "Q0", ".", "T", ")", ",", "self", ".", "v1", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMM.fit
r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``.
glimix_core/glmm/_glmm.py
def fit(self, verbose=True, factr=1e5, pgtol=1e-7): r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``. """ self._verbose = verbose self._maximize(verbose=verbose, factr=factr, pgtol=pgtol) self._verbose = False
def fit(self, verbose=True, factr=1e5, pgtol=1e-7): r"""Maximise the marginal likelihood. Parameters ---------- verbose : bool ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. factr : float, optional The iteration stops when ``(f^k - f^{k+1})/max{|f^k|,|f^{k+1}|,1} <= factr * eps``, where ``eps`` is the machine precision. pgtol : float, optional The iteration will stop when ``max{|proj g_i | i = 1, ..., n} <= pgtol`` where ``pg_i`` is the i-th component of the projected gradient. Notes ----- Please, refer to :func:`scipy.optimize.fmin_l_bfgs_b` for further information about ``factr`` and ``pgtol``. """ self._verbose = verbose self._maximize(verbose=verbose, factr=factr, pgtol=pgtol) self._verbose = False
[ "r", "Maximise", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_glmm.py#L169-L192
[ "def", "fit", "(", "self", ",", "verbose", "=", "True", ",", "factr", "=", "1e5", ",", "pgtol", "=", "1e-7", ")", ":", "self", ".", "_verbose", "=", "verbose", "self", ".", "_maximize", "(", "verbose", "=", "verbose", ",", "factr", "=", "factr", ",", "pgtol", "=", "pgtol", ")", "self", ".", "_verbose", "=", "False" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMM.posteriori_mean
r""" Mean of the estimated posteriori. This is also the maximum a posteriori estimation of the latent variable.
glimix_core/glmm/_glmm.py
def posteriori_mean(self): r""" Mean of the estimated posteriori. This is also the maximum a posteriori estimation of the latent variable. """ from numpy_sugar.linalg import rsolve Sigma = self.posteriori_covariance() eta = self._ep._posterior.eta return dot(Sigma, eta + rsolve(GLMM.covariance(self), self.mean()))
def posteriori_mean(self): r""" Mean of the estimated posteriori. This is also the maximum a posteriori estimation of the latent variable. """ from numpy_sugar.linalg import rsolve Sigma = self.posteriori_covariance() eta = self._ep._posterior.eta return dot(Sigma, eta + rsolve(GLMM.covariance(self), self.mean()))
[ "r", "Mean", "of", "the", "estimated", "posteriori", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_glmm.py#L220-L229
[ "def", "posteriori_mean", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "rsolve", "Sigma", "=", "self", ".", "posteriori_covariance", "(", ")", "eta", "=", "self", ".", "_ep", ".", "_posterior", ".", "eta", "return", "dot", "(", "Sigma", ",", "eta", "+", "rsolve", "(", "GLMM", ".", "covariance", "(", "self", ")", ",", "self", ".", "mean", "(", ")", ")", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMM.posteriori_covariance
r""" Covariance of the estimated posteriori.
glimix_core/glmm/_glmm.py
def posteriori_covariance(self): r""" Covariance of the estimated posteriori.""" K = GLMM.covariance(self) tau = self._ep._posterior.tau return pinv(pinv(K) + diag(1 / tau))
def posteriori_covariance(self): r""" Covariance of the estimated posteriori.""" K = GLMM.covariance(self) tau = self._ep._posterior.tau return pinv(pinv(K) + diag(1 / tau))
[ "r", "Covariance", "of", "the", "estimated", "posteriori", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_glmm.py#L231-L235
[ "def", "posteriori_covariance", "(", "self", ")", ":", "K", "=", "GLMM", ".", "covariance", "(", "self", ")", "tau", "=", "self", ".", "_ep", ".", "_posterior", ".", "tau", "return", "pinv", "(", "pinv", "(", "K", ")", "+", "diag", "(", "1", "/", "tau", ")", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
_bstar_1effect
Same as :func:`_bstar_set` but for single-effect.
glimix_core/lmm/_lmm_scan.py
def _bstar_1effect(beta, alpha, yTBy, yTBX, yTBM, XTBX, XTBM, MTBM): """ Same as :func:`_bstar_set` but for single-effect. """ from numpy_sugar import epsilon from numpy_sugar.linalg import dotd from numpy import sum r = full(MTBM[0].shape[0], yTBy) r -= 2 * add.reduce([dot(i, beta) for i in yTBX]) r -= 2 * add.reduce([i * alpha for i in yTBM]) r += add.reduce([dotd(beta.T, dot(i, beta)) for i in XTBX]) r += add.reduce([dotd(beta.T, i * alpha) for i in XTBM]) r += add.reduce([sum(alpha * i * beta, axis=0) for i in XTBM]) r += add.reduce([alpha * i.ravel() * alpha for i in MTBM]) return clip(r, epsilon.tiny, inf)
def _bstar_1effect(beta, alpha, yTBy, yTBX, yTBM, XTBX, XTBM, MTBM): """ Same as :func:`_bstar_set` but for single-effect. """ from numpy_sugar import epsilon from numpy_sugar.linalg import dotd from numpy import sum r = full(MTBM[0].shape[0], yTBy) r -= 2 * add.reduce([dot(i, beta) for i in yTBX]) r -= 2 * add.reduce([i * alpha for i in yTBM]) r += add.reduce([dotd(beta.T, dot(i, beta)) for i in XTBX]) r += add.reduce([dotd(beta.T, i * alpha) for i in XTBM]) r += add.reduce([sum(alpha * i * beta, axis=0) for i in XTBM]) r += add.reduce([alpha * i.ravel() * alpha for i in MTBM]) return clip(r, epsilon.tiny, inf)
[ "Same", "as", ":", "func", ":", "_bstar_set", "but", "for", "single", "-", "effect", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L536-L551
[ "def", "_bstar_1effect", "(", "beta", ",", "alpha", ",", "yTBy", ",", "yTBX", ",", "yTBM", ",", "XTBX", ",", "XTBM", ",", "MTBM", ")", ":", "from", "numpy_sugar", "import", "epsilon", "from", "numpy_sugar", ".", "linalg", "import", "dotd", "from", "numpy", "import", "sum", "r", "=", "full", "(", "MTBM", "[", "0", "]", ".", "shape", "[", "0", "]", ",", "yTBy", ")", "r", "-=", "2", "*", "add", ".", "reduce", "(", "[", "dot", "(", "i", ",", "beta", ")", "for", "i", "in", "yTBX", "]", ")", "r", "-=", "2", "*", "add", ".", "reduce", "(", "[", "i", "*", "alpha", "for", "i", "in", "yTBM", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "dotd", "(", "beta", ".", "T", ",", "dot", "(", "i", ",", "beta", ")", ")", "for", "i", "in", "XTBX", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "dotd", "(", "beta", ".", "T", ",", "i", "*", "alpha", ")", "for", "i", "in", "XTBM", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "sum", "(", "alpha", "*", "i", "*", "beta", ",", "axis", "=", "0", ")", "for", "i", "in", "XTBM", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "alpha", "*", "i", ".", "ravel", "(", ")", "*", "alpha", "for", "i", "in", "MTBM", "]", ")", "return", "clip", "(", "r", ",", "epsilon", ".", "tiny", ",", "inf", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
_bstar_set
Compute -2𝐲ᵀBEⱼ𝐛ⱼ + (𝐛ⱼEⱼ)ᵀBEⱼ𝐛ⱼ. For 𝐛ⱼ = [𝜷ⱼᵀ 𝜶ⱼᵀ]ᵀ.
glimix_core/lmm/_lmm_scan.py
def _bstar_set(beta, alpha, yTBy, yTBX, yTBM, XTBX, XTBM, MTBM): """ Compute -2𝐲ᵀBEⱼ𝐛ⱼ + (𝐛ⱼEⱼ)ᵀBEⱼ𝐛ⱼ. For 𝐛ⱼ = [𝜷ⱼᵀ 𝜶ⱼᵀ]ᵀ. """ from numpy_sugar import epsilon r = yTBy r -= 2 * add.reduce([i @ beta for i in yTBX]) r -= 2 * add.reduce([i @ alpha for i in yTBM]) r += add.reduce([beta.T @ i @ beta for i in XTBX]) r += 2 * add.reduce([beta.T @ i @ alpha for i in XTBM]) r += add.reduce([alpha.T @ i @ alpha for i in MTBM]) return clip(r, epsilon.tiny, inf)
def _bstar_set(beta, alpha, yTBy, yTBX, yTBM, XTBX, XTBM, MTBM): """ Compute -2𝐲ᵀBEⱼ𝐛ⱼ + (𝐛ⱼEⱼ)ᵀBEⱼ𝐛ⱼ. For 𝐛ⱼ = [𝜷ⱼᵀ 𝜶ⱼᵀ]ᵀ. """ from numpy_sugar import epsilon r = yTBy r -= 2 * add.reduce([i @ beta for i in yTBX]) r -= 2 * add.reduce([i @ alpha for i in yTBM]) r += add.reduce([beta.T @ i @ beta for i in XTBX]) r += 2 * add.reduce([beta.T @ i @ alpha for i in XTBM]) r += add.reduce([alpha.T @ i @ alpha for i in MTBM]) return clip(r, epsilon.tiny, inf)
[ "Compute", "-", "2𝐲ᵀBEⱼ𝐛ⱼ", "+", "(", "𝐛ⱼEⱼ", ")", "ᵀBEⱼ𝐛ⱼ", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L554-L568
[ "def", "_bstar_set", "(", "beta", ",", "alpha", ",", "yTBy", ",", "yTBX", ",", "yTBM", ",", "XTBX", ",", "XTBM", ",", "MTBM", ")", ":", "from", "numpy_sugar", "import", "epsilon", "r", "=", "yTBy", "r", "-=", "2", "*", "add", ".", "reduce", "(", "[", "i", "@", "beta", "for", "i", "in", "yTBX", "]", ")", "r", "-=", "2", "*", "add", ".", "reduce", "(", "[", "i", "@", "alpha", "for", "i", "in", "yTBM", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "beta", ".", "T", "@", "i", "@", "beta", "for", "i", "in", "XTBX", "]", ")", "r", "+=", "2", "*", "add", ".", "reduce", "(", "[", "beta", ".", "T", "@", "i", "@", "alpha", "for", "i", "in", "XTBM", "]", ")", "r", "+=", "add", ".", "reduce", "(", "[", "alpha", ".", "T", "@", "i", "@", "alpha", "for", "i", "in", "MTBM", "]", ")", "return", "clip", "(", "r", ",", "epsilon", ".", "tiny", ",", "inf", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.null_lml
Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·log(2𝜋s) - log|D| - n, Returns ------- lml : float Log of the marginal likelihood.
glimix_core/lmm/_lmm_scan.py
def null_lml(self): """ Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·log(2𝜋s) - log|D| - n, Returns ------- lml : float Log of the marginal likelihood. """ n = self._nsamples scale = self.null_scale return (self._static_lml() - n * log(scale)) / 2
def null_lml(self): """ Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·log(2𝜋s) - log|D| - n, Returns ------- lml : float Log of the marginal likelihood. """ n = self._nsamples scale = self.null_scale return (self._static_lml() - n * log(scale)) / 2
[ "Log", "of", "the", "marginal", "likelihood", "for", "the", "null", "hypothesis", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L114-L129
[ "def", "null_lml", "(", "self", ")", ":", "n", "=", "self", ".", "_nsamples", "scale", "=", "self", ".", "null_scale", "return", "(", "self", ".", "_static_lml", "(", ")", "-", "n", "*", "log", "(", "scale", ")", ")", "/", "2" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.null_beta
Optimal 𝜷 according to the marginal likelihood. It is compute by solving the equation :: (XᵀBX)𝜷 = XᵀB𝐲. Returns ------- beta : ndarray Optimal 𝜷.
glimix_core/lmm/_lmm_scan.py
def null_beta(self): """ Optimal 𝜷 according to the marginal likelihood. It is compute by solving the equation :: (XᵀBX)𝜷 = XᵀB𝐲. Returns ------- beta : ndarray Optimal 𝜷. """ ETBE = self._ETBE yTBX = self._yTBX A = sum(i.XTBX for i in ETBE) b = sum(yTBX) return rsolve(A, b)
def null_beta(self): """ Optimal 𝜷 according to the marginal likelihood. It is compute by solving the equation :: (XᵀBX)𝜷 = XᵀB𝐲. Returns ------- beta : ndarray Optimal 𝜷. """ ETBE = self._ETBE yTBX = self._yTBX A = sum(i.XTBX for i in ETBE) b = sum(yTBX) return rsolve(A, b)
[ "Optimal", "𝜷", "according", "to", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L133-L151
[ "def", "null_beta", "(", "self", ")", ":", "ETBE", "=", "self", ".", "_ETBE", "yTBX", "=", "self", ".", "_yTBX", "A", "=", "sum", "(", "i", ".", "XTBX", "for", "i", "in", "ETBE", ")", "b", "=", "sum", "(", "yTBX", ")", "return", "rsolve", "(", "A", ",", "b", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.null_beta_covariance
Covariance of the optimal 𝜷 according to the marginal likelihood. Returns ------- beta_covariance : ndarray (Xᵀ(s(K + vI))⁻¹X)⁻¹.
glimix_core/lmm/_lmm_scan.py
def null_beta_covariance(self): """ Covariance of the optimal 𝜷 according to the marginal likelihood. Returns ------- beta_covariance : ndarray (Xᵀ(s(K + vI))⁻¹X)⁻¹. """ A = sum(i @ j.T for (i, j) in zip(self._XTQDi, self._XTQ)) return self.null_scale * pinv(A)
def null_beta_covariance(self): """ Covariance of the optimal 𝜷 according to the marginal likelihood. Returns ------- beta_covariance : ndarray (Xᵀ(s(K + vI))⁻¹X)⁻¹. """ A = sum(i @ j.T for (i, j) in zip(self._XTQDi, self._XTQ)) return self.null_scale * pinv(A)
[ "Covariance", "of", "the", "optimal", "𝜷", "according", "to", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L155-L165
[ "def", "null_beta_covariance", "(", "self", ")", ":", "A", "=", "sum", "(", "i", "@", "j", ".", "T", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "self", ".", "_XTQDi", ",", "self", ".", "_XTQ", ")", ")", "return", "self", ".", "null_scale", "*", "pinv", "(", "A", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.null_scale
Optimal s according to the marginal likelihood. The optimal s is given by :: s = n⁻¹𝐲ᵀB(𝐲 - X𝜷), where 𝜷 is optimal. Returns ------- scale : float Optimal scale.
glimix_core/lmm/_lmm_scan.py
def null_scale(self): """ Optimal s according to the marginal likelihood. The optimal s is given by :: s = n⁻¹𝐲ᵀB(𝐲 - X𝜷), where 𝜷 is optimal. Returns ------- scale : float Optimal scale. """ n = self._nsamples beta = self.null_beta sqrdot = self._yTBy - dot(sum(self._yTBX), beta) return sqrdot / n
def null_scale(self): """ Optimal s according to the marginal likelihood. The optimal s is given by :: s = n⁻¹𝐲ᵀB(𝐲 - X𝜷), where 𝜷 is optimal. Returns ------- scale : float Optimal scale. """ n = self._nsamples beta = self.null_beta sqrdot = self._yTBy - dot(sum(self._yTBX), beta) return sqrdot / n
[ "Optimal", "s", "according", "to", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L182-L200
[ "def", "null_scale", "(", "self", ")", ":", "n", "=", "self", ".", "_nsamples", "beta", "=", "self", ".", "null_beta", "sqrdot", "=", "self", ".", "_yTBy", "-", "dot", "(", "sum", "(", "self", ".", "_yTBX", ")", ",", "beta", ")", "return", "sqrdot", "/", "n" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.fast_scan
LMLs, fixed-effect sizes, and scales for single-marker scan. Parameters ---------- M : array_like Matrix of fixed-effects across columns. verbose : bool, optional ``True`` for progress information; ``False`` otherwise. Defaults to ``True``. Returns ------- lmls : ndarray Log of the marginal likelihoods. effsizes0 : ndarray Covariate fixed-effect sizes. effsizes1 : ndarray Candidate set fixed-effect sizes. scales : ndarray Scales.
glimix_core/lmm/_lmm_scan.py
def fast_scan(self, M, verbose=True): """ LMLs, fixed-effect sizes, and scales for single-marker scan. Parameters ---------- M : array_like Matrix of fixed-effects across columns. verbose : bool, optional ``True`` for progress information; ``False`` otherwise. Defaults to ``True``. Returns ------- lmls : ndarray Log of the marginal likelihoods. effsizes0 : ndarray Covariate fixed-effect sizes. effsizes1 : ndarray Candidate set fixed-effect sizes. scales : ndarray Scales. """ from tqdm import tqdm if M.ndim != 2: raise ValueError("`M` array must be bidimensional.") p = M.shape[1] lmls = empty(p) effsizes0 = empty((p, self._XTQ[0].shape[0])) effsizes0_se = empty((p, self._XTQ[0].shape[0])) effsizes1 = empty(p) effsizes1_se = empty(p) scales = empty(p) if verbose: nchunks = min(p, 30) else: nchunks = min(p, 1) chunk_size = (p + nchunks - 1) // nchunks for i in tqdm(range(nchunks), desc="Scanning", disable=not verbose): start = i * chunk_size stop = min(start + chunk_size, M.shape[1]) r = self._fast_scan_chunk(M[:, start:stop]) lmls[start:stop] = r["lml"] effsizes0[start:stop, :] = r["effsizes0"] effsizes0_se[start:stop, :] = r["effsizes0_se"] effsizes1[start:stop] = r["effsizes1"] effsizes1_se[start:stop] = r["effsizes1_se"] scales[start:stop] = r["scale"] return { "lml": lmls, "effsizes0": effsizes0, "effsizes0_se": effsizes0_se, "effsizes1": effsizes1, "effsizes1_se": effsizes1_se, "scale": scales, }
def fast_scan(self, M, verbose=True): """ LMLs, fixed-effect sizes, and scales for single-marker scan. Parameters ---------- M : array_like Matrix of fixed-effects across columns. verbose : bool, optional ``True`` for progress information; ``False`` otherwise. Defaults to ``True``. Returns ------- lmls : ndarray Log of the marginal likelihoods. effsizes0 : ndarray Covariate fixed-effect sizes. effsizes1 : ndarray Candidate set fixed-effect sizes. scales : ndarray Scales. """ from tqdm import tqdm if M.ndim != 2: raise ValueError("`M` array must be bidimensional.") p = M.shape[1] lmls = empty(p) effsizes0 = empty((p, self._XTQ[0].shape[0])) effsizes0_se = empty((p, self._XTQ[0].shape[0])) effsizes1 = empty(p) effsizes1_se = empty(p) scales = empty(p) if verbose: nchunks = min(p, 30) else: nchunks = min(p, 1) chunk_size = (p + nchunks - 1) // nchunks for i in tqdm(range(nchunks), desc="Scanning", disable=not verbose): start = i * chunk_size stop = min(start + chunk_size, M.shape[1]) r = self._fast_scan_chunk(M[:, start:stop]) lmls[start:stop] = r["lml"] effsizes0[start:stop, :] = r["effsizes0"] effsizes0_se[start:stop, :] = r["effsizes0_se"] effsizes1[start:stop] = r["effsizes1"] effsizes1_se[start:stop] = r["effsizes1_se"] scales[start:stop] = r["scale"] return { "lml": lmls, "effsizes0": effsizes0, "effsizes0_se": effsizes0_se, "effsizes1": effsizes1, "effsizes1_se": effsizes1_se, "scale": scales, }
[ "LMLs", "fixed", "-", "effect", "sizes", "and", "scales", "for", "single", "-", "marker", "scan", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L202-L265
[ "def", "fast_scan", "(", "self", ",", "M", ",", "verbose", "=", "True", ")", ":", "from", "tqdm", "import", "tqdm", "if", "M", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"`M` array must be bidimensional.\"", ")", "p", "=", "M", ".", "shape", "[", "1", "]", "lmls", "=", "empty", "(", "p", ")", "effsizes0", "=", "empty", "(", "(", "p", ",", "self", ".", "_XTQ", "[", "0", "]", ".", "shape", "[", "0", "]", ")", ")", "effsizes0_se", "=", "empty", "(", "(", "p", ",", "self", ".", "_XTQ", "[", "0", "]", ".", "shape", "[", "0", "]", ")", ")", "effsizes1", "=", "empty", "(", "p", ")", "effsizes1_se", "=", "empty", "(", "p", ")", "scales", "=", "empty", "(", "p", ")", "if", "verbose", ":", "nchunks", "=", "min", "(", "p", ",", "30", ")", "else", ":", "nchunks", "=", "min", "(", "p", ",", "1", ")", "chunk_size", "=", "(", "p", "+", "nchunks", "-", "1", ")", "//", "nchunks", "for", "i", "in", "tqdm", "(", "range", "(", "nchunks", ")", ",", "desc", "=", "\"Scanning\"", ",", "disable", "=", "not", "verbose", ")", ":", "start", "=", "i", "*", "chunk_size", "stop", "=", "min", "(", "start", "+", "chunk_size", ",", "M", ".", "shape", "[", "1", "]", ")", "r", "=", "self", ".", "_fast_scan_chunk", "(", "M", "[", ":", ",", "start", ":", "stop", "]", ")", "lmls", "[", "start", ":", "stop", "]", "=", "r", "[", "\"lml\"", "]", "effsizes0", "[", "start", ":", "stop", ",", ":", "]", "=", "r", "[", "\"effsizes0\"", "]", "effsizes0_se", "[", "start", ":", "stop", ",", ":", "]", "=", "r", "[", "\"effsizes0_se\"", "]", "effsizes1", "[", "start", ":", "stop", "]", "=", "r", "[", "\"effsizes1\"", "]", "effsizes1_se", "[", "start", ":", "stop", "]", "=", "r", "[", "\"effsizes1_se\"", "]", "scales", "[", "start", ":", "stop", "]", "=", "r", "[", "\"scale\"", "]", "return", "{", "\"lml\"", ":", "lmls", ",", "\"effsizes0\"", ":", "effsizes0", ",", "\"effsizes0_se\"", ":", "effsizes0_se", ",", "\"effsizes1\"", ":", "effsizes1", ",", "\"effsizes1_se\"", ":", "effsizes1_se", ",", "\"scale\"", ":", "scales", ",", "}" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
FastScanner.scan
LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- M : array_like Fixed-effects set. Returns ------- lml : float Log of the marginal likelihood. effsizes0 : ndarray Covariates fixed-effect sizes. effsizes0_se : ndarray Covariates fixed-effect size standard errors. effsizes1 : ndarray Candidate set fixed-effect sizes. effsizes1_se : ndarray Candidate fixed-effect size standard errors. scale : ndarray Optimal scale.
glimix_core/lmm/_lmm_scan.py
def scan(self, M): """ LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- M : array_like Fixed-effects set. Returns ------- lml : float Log of the marginal likelihood. effsizes0 : ndarray Covariates fixed-effect sizes. effsizes0_se : ndarray Covariates fixed-effect size standard errors. effsizes1 : ndarray Candidate set fixed-effect sizes. effsizes1_se : ndarray Candidate fixed-effect size standard errors. scale : ndarray Optimal scale. """ from numpy_sugar.linalg import ddot from numpy_sugar import is_all_finite M = asarray(M, float) if M.shape[1] == 0: return { "lml": self.null_lml(), "effsizes0": self.null_beta, "effsizes0_se": self.null_beta_se, "effsizes1": empty((0)), "effsizes1_se": empty((0)), "scale": self.null_scale, } if not is_all_finite(M): raise ValueError("M parameter has non-finite elements.") MTQ = [dot(M.T, Q) for Q in self._QS[0] if Q.size > 0] yTBM = [dot(i, j.T) for (i, j) in zip(self._yTQDi, MTQ)] XTBM = [dot(i, j.T) for (i, j) in zip(self._XTQDi, MTQ)] D = self._D MTBM = [ddot(i, 1 / j) @ i.T for i, j in zip(MTQ, D) if j.min() > 0] return self._multicovariate_set(yTBM, XTBM, MTBM)
def scan(self, M): """ LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- M : array_like Fixed-effects set. Returns ------- lml : float Log of the marginal likelihood. effsizes0 : ndarray Covariates fixed-effect sizes. effsizes0_se : ndarray Covariates fixed-effect size standard errors. effsizes1 : ndarray Candidate set fixed-effect sizes. effsizes1_se : ndarray Candidate fixed-effect size standard errors. scale : ndarray Optimal scale. """ from numpy_sugar.linalg import ddot from numpy_sugar import is_all_finite M = asarray(M, float) if M.shape[1] == 0: return { "lml": self.null_lml(), "effsizes0": self.null_beta, "effsizes0_se": self.null_beta_se, "effsizes1": empty((0)), "effsizes1_se": empty((0)), "scale": self.null_scale, } if not is_all_finite(M): raise ValueError("M parameter has non-finite elements.") MTQ = [dot(M.T, Q) for Q in self._QS[0] if Q.size > 0] yTBM = [dot(i, j.T) for (i, j) in zip(self._yTQDi, MTQ)] XTBM = [dot(i, j.T) for (i, j) in zip(self._XTQDi, MTQ)] D = self._D MTBM = [ddot(i, 1 / j) @ i.T for i, j in zip(MTQ, D) if j.min() > 0] return self._multicovariate_set(yTBM, XTBM, MTBM)
[ "LML", "fixed", "-", "effect", "sizes", "and", "scale", "of", "the", "candidate", "set", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm_scan.py#L267-L315
[ "def", "scan", "(", "self", ",", "M", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", "from", "numpy_sugar", "import", "is_all_finite", "M", "=", "asarray", "(", "M", ",", "float", ")", "if", "M", ".", "shape", "[", "1", "]", "==", "0", ":", "return", "{", "\"lml\"", ":", "self", ".", "null_lml", "(", ")", ",", "\"effsizes0\"", ":", "self", ".", "null_beta", ",", "\"effsizes0_se\"", ":", "self", ".", "null_beta_se", ",", "\"effsizes1\"", ":", "empty", "(", "(", "0", ")", ")", ",", "\"effsizes1_se\"", ":", "empty", "(", "(", "0", ")", ")", ",", "\"scale\"", ":", "self", ".", "null_scale", ",", "}", "if", "not", "is_all_finite", "(", "M", ")", ":", "raise", "ValueError", "(", "\"M parameter has non-finite elements.\"", ")", "MTQ", "=", "[", "dot", "(", "M", ".", "T", ",", "Q", ")", "for", "Q", "in", "self", ".", "_QS", "[", "0", "]", "if", "Q", ".", "size", ">", "0", "]", "yTBM", "=", "[", "dot", "(", "i", ",", "j", ".", "T", ")", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "self", ".", "_yTQDi", ",", "MTQ", ")", "]", "XTBM", "=", "[", "dot", "(", "i", ",", "j", ".", "T", ")", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "self", ".", "_XTQDi", ",", "MTQ", ")", "]", "D", "=", "self", ".", "_D", "MTBM", "=", "[", "ddot", "(", "i", ",", "1", "/", "j", ")", "@", "i", ".", "T", "for", "i", ",", "j", "in", "zip", "(", "MTQ", ",", "D", ")", "if", "j", ".", "min", "(", ")", ">", "0", "]", "return", "self", ".", "_multicovariate_set", "(", "yTBM", ",", "XTBM", ",", "MTBM", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
KronFastScanner.null_lml
Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·p·log(2𝜋s) - log|K| - n·p, for which s and 𝚩 are optimal. Returns ------- lml : float Log of the marginal likelihood.
glimix_core/lmm/_kron2sum_scan.py
def null_lml(self): """ Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·p·log(2𝜋s) - log|K| - n·p, for which s and 𝚩 are optimal. Returns ------- lml : float Log of the marginal likelihood. """ np = self._nsamples * self._ntraits scale = self.null_scale return self._static_lml() / 2 - np * safe_log(scale) / 2 - np / 2
def null_lml(self): """ Log of the marginal likelihood for the null hypothesis. It is implemented as :: 2·log(p(Y)) = -n·p·log(2𝜋s) - log|K| - n·p, for which s and 𝚩 are optimal. Returns ------- lml : float Log of the marginal likelihood. """ np = self._nsamples * self._ntraits scale = self.null_scale return self._static_lml() / 2 - np * safe_log(scale) / 2 - np / 2
[ "Log", "of", "the", "marginal", "likelihood", "for", "the", "null", "hypothesis", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum_scan.py#L60-L77
[ "def", "null_lml", "(", "self", ")", ":", "np", "=", "self", ".", "_nsamples", "*", "self", ".", "_ntraits", "scale", "=", "self", ".", "null_scale", "return", "self", ".", "_static_lml", "(", ")", "/", "2", "-", "np", "*", "safe_log", "(", "scale", ")", "/", "2", "-", "np", "/", "2" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
KronFastScanner.null_scale
Optimal s according to the marginal likelihood. The optimal s is given by s = (n·p)⁻¹𝐲ᵀK⁻¹(𝐲 - 𝐦), where 𝐦 = (A ⊗ X)vec(𝚩) and 𝚩 is optimal. Returns ------- scale : float Optimal scale.
glimix_core/lmm/_kron2sum_scan.py
def null_scale(self): """ Optimal s according to the marginal likelihood. The optimal s is given by s = (n·p)⁻¹𝐲ᵀK⁻¹(𝐲 - 𝐦), where 𝐦 = (A ⊗ X)vec(𝚩) and 𝚩 is optimal. Returns ------- scale : float Optimal scale. """ np = self._nsamples * self._ntraits b = vec(self.null_beta) mKiy = b.T @ self._MKiy sqrtdot = self._yKiy - mKiy scale = sqrtdot / np return scale
def null_scale(self): """ Optimal s according to the marginal likelihood. The optimal s is given by s = (n·p)⁻¹𝐲ᵀK⁻¹(𝐲 - 𝐦), where 𝐦 = (A ⊗ X)vec(𝚩) and 𝚩 is optimal. Returns ------- scale : float Optimal scale. """ np = self._nsamples * self._ntraits b = vec(self.null_beta) mKiy = b.T @ self._MKiy sqrtdot = self._yKiy - mKiy scale = sqrtdot / np return scale
[ "Optimal", "s", "according", "to", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum_scan.py#L126-L146
[ "def", "null_scale", "(", "self", ")", ":", "np", "=", "self", ".", "_nsamples", "*", "self", ".", "_ntraits", "b", "=", "vec", "(", "self", ".", "null_beta", ")", "mKiy", "=", "b", ".", "T", "@", "self", ".", "_MKiy", "sqrtdot", "=", "self", ".", "_yKiy", "-", "mKiy", "scale", "=", "sqrtdot", "/", "np", "return", "scale" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
KronFastScanner.scan
LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- A1 : (p, e) array_like Trait-by-environments design matrix. X1 : (n, m) array_like Variants set matrix. Returns ------- lml : float Log of the marginal likelihood for the set. effsizes0 : (c, p) ndarray Fixed-effect sizes for the covariates. effsizes0_se : (c, p) ndarray Fixed-effect size standard errors for the covariates. effsizes1 : (m, e) ndarray Fixed-effect sizes for the candidates. effsizes1_se : (m, e) ndarray Fixed-effect size standard errors for the candidates. scale : float Optimal scale.
glimix_core/lmm/_kron2sum_scan.py
def scan(self, A1, X1): """ LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- A1 : (p, e) array_like Trait-by-environments design matrix. X1 : (n, m) array_like Variants set matrix. Returns ------- lml : float Log of the marginal likelihood for the set. effsizes0 : (c, p) ndarray Fixed-effect sizes for the covariates. effsizes0_se : (c, p) ndarray Fixed-effect size standard errors for the covariates. effsizes1 : (m, e) ndarray Fixed-effect sizes for the candidates. effsizes1_se : (m, e) ndarray Fixed-effect size standard errors for the candidates. scale : float Optimal scale. """ from numpy import empty from numpy.linalg import multi_dot from numpy_sugar import epsilon, is_all_finite from scipy.linalg import cho_solve A1 = asarray(A1, float) X1 = asarray(X1, float) if not is_all_finite(A1): raise ValueError("A1 parameter has non-finite elements.") if not is_all_finite(X1): raise ValueError("X1 parameter has non-finite elements.") if A1.shape[1] == 0: beta_se = sqrt(self.null_beta_covariance.diagonal()) return { "lml": self.null_lml(), "effsizes0": unvec(self.null_beta, (self._ncovariates, -1)), "effsizes0_se": unvec(beta_se, (self._ncovariates, -1)), "effsizes1": empty((0,)), "effsizes1_se": empty((0,)), "scale": self.null_scale, } X1X1 = X1.T @ X1 XX1 = self._X.T @ X1 AWA1 = self._WA.T @ A1 A1W = A1.T @ self._W GX1 = self._G.T @ X1 MRiM1 = kron(AWA1, XX1) M1RiM1 = kron(A1W @ A1, X1X1) M1Riy = vec(multi_dot([X1.T, self._Y, A1W.T])) XRiM1 = kron(self._WL0.T @ A1, GX1) ZiXRiM1 = cho_solve(self._Lz, XRiM1) MRiXZiXRiM1 = self._XRiM.T @ ZiXRiM1 M1RiXZiXRiM1 = XRiM1.T @ ZiXRiM1 M1RiXZiXRiy = XRiM1.T @ self._ZiXRiy T0 = [[self._MRiM, MRiM1], [MRiM1.T, M1RiM1]] T1 = [[self._MRiXZiXRiM, MRiXZiXRiM1], [MRiXZiXRiM1.T, M1RiXZiXRiM1]] T2 = [self._MRiy, M1Riy] T3 = [self._MRiXZiXRiy, M1RiXZiXRiy] MKiM = block(T0) - block(T1) MKiy = block(T2) - block(T3) beta = rsolve(MKiM, MKiy) mKiy = beta.T @ MKiy cp = self._ntraits * self._ncovariates effsizes0 = unvec(beta[:cp], (self._ncovariates, self._ntraits)) effsizes1 = unvec(beta[cp:], (X1.shape[1], A1.shape[1])) np = self._nsamples * self._ntraits sqrtdot = self._yKiy - mKiy scale = clip(sqrtdot / np, epsilon.tiny, inf) lml = self._static_lml() / 2 - np * safe_log(scale) / 2 - np / 2 effsizes_se = sqrt(clip(scale * pinv(MKiM).diagonal(), epsilon.tiny, inf)) effsizes0_se = unvec(effsizes_se[:cp], (self._ncovariates, self._ntraits)) effsizes1_se = unvec(effsizes_se[cp:], (X1.shape[1], A1.shape[1])) return { "lml": lml, "effsizes0": effsizes0, "effsizes1": effsizes1, "scale": scale, "effsizes0_se": effsizes0_se, "effsizes1_se": effsizes1_se, }
def scan(self, A1, X1): """ LML, fixed-effect sizes, and scale of the candidate set. Parameters ---------- A1 : (p, e) array_like Trait-by-environments design matrix. X1 : (n, m) array_like Variants set matrix. Returns ------- lml : float Log of the marginal likelihood for the set. effsizes0 : (c, p) ndarray Fixed-effect sizes for the covariates. effsizes0_se : (c, p) ndarray Fixed-effect size standard errors for the covariates. effsizes1 : (m, e) ndarray Fixed-effect sizes for the candidates. effsizes1_se : (m, e) ndarray Fixed-effect size standard errors for the candidates. scale : float Optimal scale. """ from numpy import empty from numpy.linalg import multi_dot from numpy_sugar import epsilon, is_all_finite from scipy.linalg import cho_solve A1 = asarray(A1, float) X1 = asarray(X1, float) if not is_all_finite(A1): raise ValueError("A1 parameter has non-finite elements.") if not is_all_finite(X1): raise ValueError("X1 parameter has non-finite elements.") if A1.shape[1] == 0: beta_se = sqrt(self.null_beta_covariance.diagonal()) return { "lml": self.null_lml(), "effsizes0": unvec(self.null_beta, (self._ncovariates, -1)), "effsizes0_se": unvec(beta_se, (self._ncovariates, -1)), "effsizes1": empty((0,)), "effsizes1_se": empty((0,)), "scale": self.null_scale, } X1X1 = X1.T @ X1 XX1 = self._X.T @ X1 AWA1 = self._WA.T @ A1 A1W = A1.T @ self._W GX1 = self._G.T @ X1 MRiM1 = kron(AWA1, XX1) M1RiM1 = kron(A1W @ A1, X1X1) M1Riy = vec(multi_dot([X1.T, self._Y, A1W.T])) XRiM1 = kron(self._WL0.T @ A1, GX1) ZiXRiM1 = cho_solve(self._Lz, XRiM1) MRiXZiXRiM1 = self._XRiM.T @ ZiXRiM1 M1RiXZiXRiM1 = XRiM1.T @ ZiXRiM1 M1RiXZiXRiy = XRiM1.T @ self._ZiXRiy T0 = [[self._MRiM, MRiM1], [MRiM1.T, M1RiM1]] T1 = [[self._MRiXZiXRiM, MRiXZiXRiM1], [MRiXZiXRiM1.T, M1RiXZiXRiM1]] T2 = [self._MRiy, M1Riy] T3 = [self._MRiXZiXRiy, M1RiXZiXRiy] MKiM = block(T0) - block(T1) MKiy = block(T2) - block(T3) beta = rsolve(MKiM, MKiy) mKiy = beta.T @ MKiy cp = self._ntraits * self._ncovariates effsizes0 = unvec(beta[:cp], (self._ncovariates, self._ntraits)) effsizes1 = unvec(beta[cp:], (X1.shape[1], A1.shape[1])) np = self._nsamples * self._ntraits sqrtdot = self._yKiy - mKiy scale = clip(sqrtdot / np, epsilon.tiny, inf) lml = self._static_lml() / 2 - np * safe_log(scale) / 2 - np / 2 effsizes_se = sqrt(clip(scale * pinv(MKiM).diagonal(), epsilon.tiny, inf)) effsizes0_se = unvec(effsizes_se[:cp], (self._ncovariates, self._ntraits)) effsizes1_se = unvec(effsizes_se[cp:], (X1.shape[1], A1.shape[1])) return { "lml": lml, "effsizes0": effsizes0, "effsizes1": effsizes1, "scale": scale, "effsizes0_se": effsizes0_se, "effsizes1_se": effsizes1_se, }
[ "LML", "fixed", "-", "effect", "sizes", "and", "scale", "of", "the", "candidate", "set", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum_scan.py#L148-L246
[ "def", "scan", "(", "self", ",", "A1", ",", "X1", ")", ":", "from", "numpy", "import", "empty", "from", "numpy", ".", "linalg", "import", "multi_dot", "from", "numpy_sugar", "import", "epsilon", ",", "is_all_finite", "from", "scipy", ".", "linalg", "import", "cho_solve", "A1", "=", "asarray", "(", "A1", ",", "float", ")", "X1", "=", "asarray", "(", "X1", ",", "float", ")", "if", "not", "is_all_finite", "(", "A1", ")", ":", "raise", "ValueError", "(", "\"A1 parameter has non-finite elements.\"", ")", "if", "not", "is_all_finite", "(", "X1", ")", ":", "raise", "ValueError", "(", "\"X1 parameter has non-finite elements.\"", ")", "if", "A1", ".", "shape", "[", "1", "]", "==", "0", ":", "beta_se", "=", "sqrt", "(", "self", ".", "null_beta_covariance", ".", "diagonal", "(", ")", ")", "return", "{", "\"lml\"", ":", "self", ".", "null_lml", "(", ")", ",", "\"effsizes0\"", ":", "unvec", "(", "self", ".", "null_beta", ",", "(", "self", ".", "_ncovariates", ",", "-", "1", ")", ")", ",", "\"effsizes0_se\"", ":", "unvec", "(", "beta_se", ",", "(", "self", ".", "_ncovariates", ",", "-", "1", ")", ")", ",", "\"effsizes1\"", ":", "empty", "(", "(", "0", ",", ")", ")", ",", "\"effsizes1_se\"", ":", "empty", "(", "(", "0", ",", ")", ")", ",", "\"scale\"", ":", "self", ".", "null_scale", ",", "}", "X1X1", "=", "X1", ".", "T", "@", "X1", "XX1", "=", "self", ".", "_X", ".", "T", "@", "X1", "AWA1", "=", "self", ".", "_WA", ".", "T", "@", "A1", "A1W", "=", "A1", ".", "T", "@", "self", ".", "_W", "GX1", "=", "self", ".", "_G", ".", "T", "@", "X1", "MRiM1", "=", "kron", "(", "AWA1", ",", "XX1", ")", "M1RiM1", "=", "kron", "(", "A1W", "@", "A1", ",", "X1X1", ")", "M1Riy", "=", "vec", "(", "multi_dot", "(", "[", "X1", ".", "T", ",", "self", ".", "_Y", ",", "A1W", ".", "T", "]", ")", ")", "XRiM1", "=", "kron", "(", "self", ".", "_WL0", ".", "T", "@", "A1", ",", "GX1", ")", "ZiXRiM1", "=", "cho_solve", "(", "self", ".", "_Lz", ",", "XRiM1", ")", "MRiXZiXRiM1", "=", "self", ".", "_XRiM", ".", "T", "@", "ZiXRiM1", "M1RiXZiXRiM1", "=", "XRiM1", ".", "T", "@", "ZiXRiM1", "M1RiXZiXRiy", "=", "XRiM1", ".", "T", "@", "self", ".", "_ZiXRiy", "T0", "=", "[", "[", "self", ".", "_MRiM", ",", "MRiM1", "]", ",", "[", "MRiM1", ".", "T", ",", "M1RiM1", "]", "]", "T1", "=", "[", "[", "self", ".", "_MRiXZiXRiM", ",", "MRiXZiXRiM1", "]", ",", "[", "MRiXZiXRiM1", ".", "T", ",", "M1RiXZiXRiM1", "]", "]", "T2", "=", "[", "self", ".", "_MRiy", ",", "M1Riy", "]", "T3", "=", "[", "self", ".", "_MRiXZiXRiy", ",", "M1RiXZiXRiy", "]", "MKiM", "=", "block", "(", "T0", ")", "-", "block", "(", "T1", ")", "MKiy", "=", "block", "(", "T2", ")", "-", "block", "(", "T3", ")", "beta", "=", "rsolve", "(", "MKiM", ",", "MKiy", ")", "mKiy", "=", "beta", ".", "T", "@", "MKiy", "cp", "=", "self", ".", "_ntraits", "*", "self", ".", "_ncovariates", "effsizes0", "=", "unvec", "(", "beta", "[", ":", "cp", "]", ",", "(", "self", ".", "_ncovariates", ",", "self", ".", "_ntraits", ")", ")", "effsizes1", "=", "unvec", "(", "beta", "[", "cp", ":", "]", ",", "(", "X1", ".", "shape", "[", "1", "]", ",", "A1", ".", "shape", "[", "1", "]", ")", ")", "np", "=", "self", ".", "_nsamples", "*", "self", ".", "_ntraits", "sqrtdot", "=", "self", ".", "_yKiy", "-", "mKiy", "scale", "=", "clip", "(", "sqrtdot", "/", "np", ",", "epsilon", ".", "tiny", ",", "inf", ")", "lml", "=", "self", ".", "_static_lml", "(", ")", "/", "2", "-", "np", "*", "safe_log", "(", "scale", ")", "/", "2", "-", "np", "/", "2", "effsizes_se", "=", "sqrt", "(", "clip", "(", "scale", "*", "pinv", "(", "MKiM", ")", ".", "diagonal", "(", ")", ",", "epsilon", ".", "tiny", ",", "inf", ")", ")", "effsizes0_se", "=", "unvec", "(", "effsizes_se", "[", ":", "cp", "]", ",", "(", "self", ".", "_ncovariates", ",", "self", ".", "_ntraits", ")", ")", "effsizes1_se", "=", "unvec", "(", "effsizes_se", "[", "cp", ":", "]", ",", "(", "X1", ".", "shape", "[", "1", "]", ",", "A1", ".", "shape", "[", "1", "]", ")", ")", "return", "{", "\"lml\"", ":", "lml", ",", "\"effsizes0\"", ":", "effsizes0", ",", "\"effsizes1\"", ":", "effsizes1", ",", "\"scale\"", ":", "scale", ",", "\"effsizes0_se\"", ":", "effsizes0_se", ",", "\"effsizes1_se\"", ":", "effsizes1_se", ",", "}" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GGPSampler.sample
r"""Sample from the specified distribution. Parameters ---------- random_state : random_state Set the initial random state. Returns ------- numpy.ndarray Sample.
glimix_core/random/_ggp.py
def sample(self, random_state=None): r"""Sample from the specified distribution. Parameters ---------- random_state : random_state Set the initial random state. Returns ------- numpy.ndarray Sample. """ from numpy_sugar import epsilon from numpy_sugar.linalg import sum2diag from numpy_sugar.random import multivariate_normal if random_state is None: random_state = RandomState() m = self._mean.value() K = self._cov.value().copy() sum2diag(K, +epsilon.small, out=K) return self._lik.sample(multivariate_normal(m, K, random_state), random_state)
def sample(self, random_state=None): r"""Sample from the specified distribution. Parameters ---------- random_state : random_state Set the initial random state. Returns ------- numpy.ndarray Sample. """ from numpy_sugar import epsilon from numpy_sugar.linalg import sum2diag from numpy_sugar.random import multivariate_normal if random_state is None: random_state = RandomState() m = self._mean.value() K = self._cov.value().copy() sum2diag(K, +epsilon.small, out=K) return self._lik.sample(multivariate_normal(m, K, random_state), random_state)
[ "r", "Sample", "from", "the", "specified", "distribution", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/random/_ggp.py#L52-L77
[ "def", "sample", "(", "self", ",", "random_state", "=", "None", ")", ":", "from", "numpy_sugar", "import", "epsilon", "from", "numpy_sugar", ".", "linalg", "import", "sum2diag", "from", "numpy_sugar", ".", "random", "import", "multivariate_normal", "if", "random_state", "is", "None", ":", "random_state", "=", "RandomState", "(", ")", "m", "=", "self", ".", "_mean", ".", "value", "(", ")", "K", "=", "self", ".", "_cov", ".", "value", "(", ")", ".", "copy", "(", ")", "sum2diag", "(", "K", ",", "+", "epsilon", ".", "small", ",", "out", "=", "K", ")", "return", "self", ".", "_lik", ".", "sample", "(", "multivariate_normal", "(", "m", ",", "K", ",", "random_state", ")", ",", "random_state", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
economic_qs_zeros
Eigen decomposition of a zero matrix.
glimix_core/_util/eigen.py
def economic_qs_zeros(n): """Eigen decomposition of a zero matrix.""" Q0 = empty((n, 0)) Q1 = eye(n) S0 = empty(0) return ((Q0, Q1), S0)
def economic_qs_zeros(n): """Eigen decomposition of a zero matrix.""" Q0 = empty((n, 0)) Q1 = eye(n) S0 = empty(0) return ((Q0, Q1), S0)
[ "Eigen", "decomposition", "of", "a", "zero", "matrix", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_util/eigen.py#L4-L11
[ "def", "economic_qs_zeros", "(", "n", ")", ":", "Q0", "=", "empty", "(", "(", "n", ",", "0", ")", ")", "Q1", "=", "eye", "(", "n", ")", "S0", "=", "empty", "(", "0", ")", "return", "(", "(", "Q0", ",", "Q1", ")", ",", "S0", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2Sum.get_fast_scanner
Return :class:`.FastScanner` for association scan. Returns ------- :class:`.FastScanner` Instance of a class designed to perform very fast association scan.
glimix_core/lmm/_kron2sum.py
def get_fast_scanner(self): """ Return :class:`.FastScanner` for association scan. Returns ------- :class:`.FastScanner` Instance of a class designed to perform very fast association scan. """ terms = self._terms return KronFastScanner(self._Y, self._mean.A, self._mean.X, self._cov.Ge, terms)
def get_fast_scanner(self): """ Return :class:`.FastScanner` for association scan. Returns ------- :class:`.FastScanner` Instance of a class designed to perform very fast association scan. """ terms = self._terms return KronFastScanner(self._Y, self._mean.A, self._mean.X, self._cov.Ge, terms)
[ "Return", ":", "class", ":", ".", "FastScanner", "for", "association", "scan", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum.py#L140-L150
[ "def", "get_fast_scanner", "(", "self", ")", ":", "terms", "=", "self", ".", "_terms", "return", "KronFastScanner", "(", "self", ".", "_Y", ",", "self", ".", "_mean", ".", "A", ",", "self", ".", "_mean", ".", "X", ",", "self", ".", "_cov", ".", "Ge", ",", "terms", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2Sum.lml
Log of the marginal likelihood. Let 𝐲 = vec(Y), M = A⊗X, and H = MᵀK⁻¹M. The restricted log of the marginal likelihood is given by [R07]_:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|K|) - log(|H|) - (𝐲-𝐦)ᵀ K⁻¹ (𝐲-𝐦), where 𝐦 = M𝛃 for 𝛃 = H⁻¹MᵀK⁻¹𝐲. For implementation purpose, let X = (L₀ ⊗ G) and R = (L₁ ⊗ I)(L₁ ⊗ I)ᵀ. The covariance can be written as:: K = XXᵀ + R. From the Woodbury matrix identity, we have 𝐲ᵀK⁻¹𝐲 = 𝐲ᵀR⁻¹𝐲 - 𝐲ᵀR⁻¹XZ⁻¹XᵀR⁻¹𝐲, where Z = I + XᵀR⁻¹X. Note that R⁻¹ = (U₁S₁⁻¹U₁ᵀ) ⊗ I and :: XᵀR⁻¹𝐲 = (L₀ᵀW ⊗ Gᵀ)𝐲 = vec(GᵀYWL₀), where W = U₁S₁⁻¹U₁ᵀ. The term GᵀY can be calculated only once and it will form a r×p matrix. We similarly have :: XᵀR⁻¹M = (L₀ᵀWA) ⊗ (GᵀX), for which GᵀX is pre-computed. The log-determinant of the covariance matrix is given by log(|K|) = log(|Z|) - log(|R⁻¹|) = log(|Z|) - 2·n·log(|U₁S₁⁻½|). The log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|Z|) + 2·n·log(|U₁S₁⁻½|) - log(|MᵀR⁻¹M - MᵀR⁻¹XZ⁻¹XᵀR⁻¹M|) - 𝐲ᵀR⁻¹𝐲 + (𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐲) - 𝐦ᵀR⁻¹𝐦 + (𝐦ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦) + 2𝐲ᵀR⁻¹𝐦 - 2(𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦). Returns ------- lml : float Log of the marginal likelihood. References ---------- .. [R07] LaMotte, L. R. (2007). A direct derivation of the REML likelihood function. Statistical Papers, 48(2), 321-327.
glimix_core/lmm/_kron2sum.py
def lml(self): """ Log of the marginal likelihood. Let 𝐲 = vec(Y), M = A⊗X, and H = MᵀK⁻¹M. The restricted log of the marginal likelihood is given by [R07]_:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|K|) - log(|H|) - (𝐲-𝐦)ᵀ K⁻¹ (𝐲-𝐦), where 𝐦 = M𝛃 for 𝛃 = H⁻¹MᵀK⁻¹𝐲. For implementation purpose, let X = (L₀ ⊗ G) and R = (L₁ ⊗ I)(L₁ ⊗ I)ᵀ. The covariance can be written as:: K = XXᵀ + R. From the Woodbury matrix identity, we have 𝐲ᵀK⁻¹𝐲 = 𝐲ᵀR⁻¹𝐲 - 𝐲ᵀR⁻¹XZ⁻¹XᵀR⁻¹𝐲, where Z = I + XᵀR⁻¹X. Note that R⁻¹ = (U₁S₁⁻¹U₁ᵀ) ⊗ I and :: XᵀR⁻¹𝐲 = (L₀ᵀW ⊗ Gᵀ)𝐲 = vec(GᵀYWL₀), where W = U₁S₁⁻¹U₁ᵀ. The term GᵀY can be calculated only once and it will form a r×p matrix. We similarly have :: XᵀR⁻¹M = (L₀ᵀWA) ⊗ (GᵀX), for which GᵀX is pre-computed. The log-determinant of the covariance matrix is given by log(|K|) = log(|Z|) - log(|R⁻¹|) = log(|Z|) - 2·n·log(|U₁S₁⁻½|). The log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|Z|) + 2·n·log(|U₁S₁⁻½|) - log(|MᵀR⁻¹M - MᵀR⁻¹XZ⁻¹XᵀR⁻¹M|) - 𝐲ᵀR⁻¹𝐲 + (𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐲) - 𝐦ᵀR⁻¹𝐦 + (𝐦ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦) + 2𝐲ᵀR⁻¹𝐦 - 2(𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦). Returns ------- lml : float Log of the marginal likelihood. References ---------- .. [R07] LaMotte, L. R. (2007). A direct derivation of the REML likelihood function. Statistical Papers, 48(2), 321-327. """ terms = self._terms yKiy = terms["yKiy"] mKiy = terms["mKiy"] mKim = terms["mKim"] lml = -self._df * log2pi + self._logdet_MM - self._logdetK lml -= self._logdetH lml += -yKiy - mKim + 2 * mKiy return lml / 2
def lml(self): """ Log of the marginal likelihood. Let 𝐲 = vec(Y), M = A⊗X, and H = MᵀK⁻¹M. The restricted log of the marginal likelihood is given by [R07]_:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|K|) - log(|H|) - (𝐲-𝐦)ᵀ K⁻¹ (𝐲-𝐦), where 𝐦 = M𝛃 for 𝛃 = H⁻¹MᵀK⁻¹𝐲. For implementation purpose, let X = (L₀ ⊗ G) and R = (L₁ ⊗ I)(L₁ ⊗ I)ᵀ. The covariance can be written as:: K = XXᵀ + R. From the Woodbury matrix identity, we have 𝐲ᵀK⁻¹𝐲 = 𝐲ᵀR⁻¹𝐲 - 𝐲ᵀR⁻¹XZ⁻¹XᵀR⁻¹𝐲, where Z = I + XᵀR⁻¹X. Note that R⁻¹ = (U₁S₁⁻¹U₁ᵀ) ⊗ I and :: XᵀR⁻¹𝐲 = (L₀ᵀW ⊗ Gᵀ)𝐲 = vec(GᵀYWL₀), where W = U₁S₁⁻¹U₁ᵀ. The term GᵀY can be calculated only once and it will form a r×p matrix. We similarly have :: XᵀR⁻¹M = (L₀ᵀWA) ⊗ (GᵀX), for which GᵀX is pre-computed. The log-determinant of the covariance matrix is given by log(|K|) = log(|Z|) - log(|R⁻¹|) = log(|Z|) - 2·n·log(|U₁S₁⁻½|). The log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -(n⋅p - c⋅p) log(2π) + log(|MᵀM|) - log(|Z|) + 2·n·log(|U₁S₁⁻½|) - log(|MᵀR⁻¹M - MᵀR⁻¹XZ⁻¹XᵀR⁻¹M|) - 𝐲ᵀR⁻¹𝐲 + (𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐲) - 𝐦ᵀR⁻¹𝐦 + (𝐦ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦) + 2𝐲ᵀR⁻¹𝐦 - 2(𝐲ᵀR⁻¹X)Z⁻¹(XᵀR⁻¹𝐦). Returns ------- lml : float Log of the marginal likelihood. References ---------- .. [R07] LaMotte, L. R. (2007). A direct derivation of the REML likelihood function. Statistical Papers, 48(2), 321-327. """ terms = self._terms yKiy = terms["yKiy"] mKiy = terms["mKiy"] mKim = terms["mKim"] lml = -self._df * log2pi + self._logdet_MM - self._logdetK lml -= self._logdetH lml += -yKiy - mKim + 2 * mKiy return lml / 2
[ "Log", "of", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum.py#L293-L357
[ "def", "lml", "(", "self", ")", ":", "terms", "=", "self", ".", "_terms", "yKiy", "=", "terms", "[", "\"yKiy\"", "]", "mKiy", "=", "terms", "[", "\"mKiy\"", "]", "mKim", "=", "terms", "[", "\"mKim\"", "]", "lml", "=", "-", "self", ".", "_df", "*", "log2pi", "+", "self", ".", "_logdet_MM", "-", "self", ".", "_logdetK", "lml", "-=", "self", ".", "_logdetH", "lml", "+=", "-", "yKiy", "-", "mKim", "+", "2", "*", "mKiy", "return", "lml", "/", "2" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Kron2Sum._lml_gradient
Gradient of the log of the marginal likelihood. Let 𝐲 = vec(Y), 𝕂 = K⁻¹∂(K)K⁻¹, and H = MᵀK⁻¹M. The gradient is given by:: 2⋅∂log(p(𝐲)) = -tr(K⁻¹∂K) - tr(H⁻¹∂H) + 𝐲ᵀ𝕂𝐲 - 𝐦ᵀ𝕂(2⋅𝐲-𝐦) - 2⋅(𝐦-𝐲)ᵀK⁻¹∂(𝐦). Observe that ∂𝛃 = -H⁻¹(∂H)𝛃 - H⁻¹Mᵀ𝕂𝐲 and ∂H = -Mᵀ𝕂M. Let Z = I + XᵀR⁻¹X and 𝓡 = R⁻¹(∂K)R⁻¹. We use Woodbury matrix identity to write :: 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) Mᵀ𝕂M = Mᵀ𝓡M - 2(Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) Mᵀ𝕂𝐲 = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) H⁻¹ = MᵀR⁻¹M - (MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M), where we have used parentheses to separate expressions that we will compute separately. For example, we have :: 𝐲ᵀ𝓡𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ ⊗ I)(∂C₀ ⊗ GGᵀ)(U₁S₁⁻¹U₁ᵀ ⊗ I)𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ∂C₀ ⊗ G)(U₁S₁⁻¹U₁ᵀ ⊗ Gᵀ)𝐲 = vec(GᵀYU₁S₁⁻¹U₁ᵀ∂C₀)ᵀvec(GᵀYU₁S₁⁻¹U₁ᵀ), when the derivative is over the parameters of C₀. Otherwise, we have 𝐲ᵀ𝓡𝐲 = vec(YU₁S₁⁻¹U₁ᵀ∂C₁)ᵀvec(YU₁S₁⁻¹U₁ᵀ). The above equations can be more compactly written as 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW), where W = U₁S₁⁻¹U₁ᵀ, E₀ = G, and E₁ = I. We will now just state the results for the other instances of the aBc form, which follow similar derivations:: Xᵀ𝓡X = (L₀ᵀW∂CᵢWL₀) ⊗ (GᵀEᵢEᵢᵀG) Mᵀ𝓡y = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA) Mᵀ𝓡X = AᵀW∂CᵢWL₀ ⊗ XᵀEᵢEᵢᵀG Mᵀ𝓡M = AᵀW∂CᵢWA ⊗ XᵀEᵢEᵢᵀX Xᵀ𝓡𝐲 = GᵀEᵢEᵢᵀYW∂CᵢWL₀ From Woodbury matrix identity and Kronecker product properties we have :: tr(K⁻¹∂K) = tr[W∂Cᵢ]tr[EᵢEᵢᵀ] - tr[Z⁻¹(Xᵀ𝓡X)] tr(H⁻¹∂H) = - tr[(MᵀR⁻¹M)(Mᵀ𝕂M)] + tr[(MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M)(Mᵀ𝕂M)] Note also that :: ∂𝛃 = H⁻¹Mᵀ𝕂M𝛃 - H⁻¹Mᵀ𝕂𝐲. Returns ------- C0.Lu : ndarray Gradient of the log of the marginal likelihood over C₀ parameters. C1.Lu : ndarray Gradient of the log of the marginal likelihood over C₁ parameters.
glimix_core/lmm/_kron2sum.py
def _lml_gradient(self): """ Gradient of the log of the marginal likelihood. Let 𝐲 = vec(Y), 𝕂 = K⁻¹∂(K)K⁻¹, and H = MᵀK⁻¹M. The gradient is given by:: 2⋅∂log(p(𝐲)) = -tr(K⁻¹∂K) - tr(H⁻¹∂H) + 𝐲ᵀ𝕂𝐲 - 𝐦ᵀ𝕂(2⋅𝐲-𝐦) - 2⋅(𝐦-𝐲)ᵀK⁻¹∂(𝐦). Observe that ∂𝛃 = -H⁻¹(∂H)𝛃 - H⁻¹Mᵀ𝕂𝐲 and ∂H = -Mᵀ𝕂M. Let Z = I + XᵀR⁻¹X and 𝓡 = R⁻¹(∂K)R⁻¹. We use Woodbury matrix identity to write :: 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) Mᵀ𝕂M = Mᵀ𝓡M - 2(Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) Mᵀ𝕂𝐲 = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) H⁻¹ = MᵀR⁻¹M - (MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M), where we have used parentheses to separate expressions that we will compute separately. For example, we have :: 𝐲ᵀ𝓡𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ ⊗ I)(∂C₀ ⊗ GGᵀ)(U₁S₁⁻¹U₁ᵀ ⊗ I)𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ∂C₀ ⊗ G)(U₁S₁⁻¹U₁ᵀ ⊗ Gᵀ)𝐲 = vec(GᵀYU₁S₁⁻¹U₁ᵀ∂C₀)ᵀvec(GᵀYU₁S₁⁻¹U₁ᵀ), when the derivative is over the parameters of C₀. Otherwise, we have 𝐲ᵀ𝓡𝐲 = vec(YU₁S₁⁻¹U₁ᵀ∂C₁)ᵀvec(YU₁S₁⁻¹U₁ᵀ). The above equations can be more compactly written as 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW), where W = U₁S₁⁻¹U₁ᵀ, E₀ = G, and E₁ = I. We will now just state the results for the other instances of the aBc form, which follow similar derivations:: Xᵀ𝓡X = (L₀ᵀW∂CᵢWL₀) ⊗ (GᵀEᵢEᵢᵀG) Mᵀ𝓡y = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA) Mᵀ𝓡X = AᵀW∂CᵢWL₀ ⊗ XᵀEᵢEᵢᵀG Mᵀ𝓡M = AᵀW∂CᵢWA ⊗ XᵀEᵢEᵢᵀX Xᵀ𝓡𝐲 = GᵀEᵢEᵢᵀYW∂CᵢWL₀ From Woodbury matrix identity and Kronecker product properties we have :: tr(K⁻¹∂K) = tr[W∂Cᵢ]tr[EᵢEᵢᵀ] - tr[Z⁻¹(Xᵀ𝓡X)] tr(H⁻¹∂H) = - tr[(MᵀR⁻¹M)(Mᵀ𝕂M)] + tr[(MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M)(Mᵀ𝕂M)] Note also that :: ∂𝛃 = H⁻¹Mᵀ𝕂M𝛃 - H⁻¹Mᵀ𝕂𝐲. Returns ------- C0.Lu : ndarray Gradient of the log of the marginal likelihood over C₀ parameters. C1.Lu : ndarray Gradient of the log of the marginal likelihood over C₁ parameters. """ from scipy.linalg import cho_solve terms = self._terms dC0 = self._cov.C0.gradient()["Lu"] dC1 = self._cov.C1.gradient()["Lu"] b = terms["b"] W = terms["W"] Lh = terms["Lh"] Lz = terms["Lz"] WA = terms["WA"] WL0 = terms["WL0"] YW = terms["YW"] MRiM = terms["MRiM"] MRiy = terms["MRiy"] XRiM = terms["XRiM"] XRiy = terms["XRiy"] ZiXRiM = terms["ZiXRiM"] ZiXRiy = terms["ZiXRiy"] WdC0 = _mdot(W, dC0) WdC1 = _mdot(W, dC1) AWdC0 = _mdot(WA.T, dC0) AWdC1 = _mdot(WA.T, dC1) # Mᵀ𝓡M MR0M = _mkron(_mdot(AWdC0, WA), self._XGGX) MR1M = _mkron(_mdot(AWdC1, WA), self._XX) # Mᵀ𝓡X MR0X = _mkron(_mdot(AWdC0, WL0), self._XGGG) MR1X = _mkron(_mdot(AWdC1, WL0), self._GX.T) # Mᵀ𝓡𝐲 = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA) MR0y = vec(_mdot(self._XGGY, _mdot(WdC0, WA))) MR1y = vec(_mdot(self._XY, WdC1, WA)) # Xᵀ𝓡X XR0X = _mkron(_mdot(WL0.T, dC0, WL0), self._GGGG) XR1X = _mkron(_mdot(WL0.T, dC1, WL0), self._GG) # Xᵀ𝓡𝐲 XR0y = vec(_mdot(self._GGGY, WdC0, WL0)) XR1y = vec(_mdot(self._GY, WdC1, WL0)) # 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW) yR0y = vec(_mdot(self._GY, WdC0)).T @ vec(self._GY @ W) yR1y = (YW.T * _mdot(self._Y, WdC1).T).T.sum(axis=(0, 1)) ZiXR0X = cho_solve(Lz, XR0X) ZiXR1X = cho_solve(Lz, XR1X) ZiXR0y = cho_solve(Lz, XR0y) ZiXR1y = cho_solve(Lz, XR1y) # Mᵀ𝕂y = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) # + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) MK0y = MR0y - _mdot(XRiM.T, ZiXR0y) - _mdot(MR0X, ZiXRiy) MK0y += _mdot(XRiM.T, ZiXR0X, ZiXRiy) MK1y = MR1y - _mdot(XRiM.T, ZiXR1y) - _mdot(MR1X, ZiXRiy) MK1y += _mdot(XRiM.T, ZiXR1X, ZiXRiy) # 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) yK0y = yR0y - 2 * XR0y.T @ ZiXRiy + ZiXRiy.T @ _mdot(XR0X, ZiXRiy) yK1y = yR1y - 2 * XR1y.T @ ZiXRiy + ZiXRiy.T @ _mdot(XR1X, ZiXRiy) # Mᵀ𝕂M = Mᵀ𝓡M - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡M) # + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) MR0XZiXRiM = _mdot(MR0X, ZiXRiM) MK0M = MR0M - MR0XZiXRiM - MR0XZiXRiM.transpose([1, 0, 2]) MK0M += _mdot(ZiXRiM.T, XR0X, ZiXRiM) MR1XZiXRiM = _mdot(MR1X, ZiXRiM) MK1M = MR1M - MR1XZiXRiM - MR1XZiXRiM.transpose([1, 0, 2]) MK1M += _mdot(ZiXRiM.T, XR1X, ZiXRiM) MK0m = _mdot(MK0M, b) mK0y = b.T @ MK0y mK0m = b.T @ MK0m MK1m = _mdot(MK1M, b) mK1y = b.T @ MK1y mK1m = b.T @ MK1m XRim = XRiM @ b MRim = MRiM @ b db = {"C0.Lu": cho_solve(Lh, MK0m - MK0y), "C1.Lu": cho_solve(Lh, MK1m - MK1y)} grad = { "C0.Lu": -trace(WdC0) * self._trGG + trace(ZiXR0X), "C1.Lu": -trace(WdC1) * self.nsamples + trace(ZiXR1X), } if self._restricted: grad["C0.Lu"] += cho_solve(Lh, MK0M).diagonal().sum(1) grad["C1.Lu"] += cho_solve(Lh, MK1M).diagonal().sum(1) mKiM = MRim.T - XRim.T @ ZiXRiM yKiM = MRiy.T - XRiy.T @ ZiXRiM grad["C0.Lu"] += yK0y - 2 * mK0y + mK0m - 2 * _mdot(mKiM, db["C0.Lu"]) grad["C0.Lu"] += 2 * _mdot(yKiM, db["C0.Lu"]) grad["C1.Lu"] += yK1y - 2 * mK1y + mK1m - 2 * _mdot(mKiM, db["C1.Lu"]) grad["C1.Lu"] += 2 * _mdot(yKiM, db["C1.Lu"]) grad["C0.Lu"] /= 2 grad["C1.Lu"] /= 2 return grad
def _lml_gradient(self): """ Gradient of the log of the marginal likelihood. Let 𝐲 = vec(Y), 𝕂 = K⁻¹∂(K)K⁻¹, and H = MᵀK⁻¹M. The gradient is given by:: 2⋅∂log(p(𝐲)) = -tr(K⁻¹∂K) - tr(H⁻¹∂H) + 𝐲ᵀ𝕂𝐲 - 𝐦ᵀ𝕂(2⋅𝐲-𝐦) - 2⋅(𝐦-𝐲)ᵀK⁻¹∂(𝐦). Observe that ∂𝛃 = -H⁻¹(∂H)𝛃 - H⁻¹Mᵀ𝕂𝐲 and ∂H = -Mᵀ𝕂M. Let Z = I + XᵀR⁻¹X and 𝓡 = R⁻¹(∂K)R⁻¹. We use Woodbury matrix identity to write :: 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) Mᵀ𝕂M = Mᵀ𝓡M - 2(Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) Mᵀ𝕂𝐲 = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) H⁻¹ = MᵀR⁻¹M - (MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M), where we have used parentheses to separate expressions that we will compute separately. For example, we have :: 𝐲ᵀ𝓡𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ ⊗ I)(∂C₀ ⊗ GGᵀ)(U₁S₁⁻¹U₁ᵀ ⊗ I)𝐲 = 𝐲ᵀ(U₁S₁⁻¹U₁ᵀ∂C₀ ⊗ G)(U₁S₁⁻¹U₁ᵀ ⊗ Gᵀ)𝐲 = vec(GᵀYU₁S₁⁻¹U₁ᵀ∂C₀)ᵀvec(GᵀYU₁S₁⁻¹U₁ᵀ), when the derivative is over the parameters of C₀. Otherwise, we have 𝐲ᵀ𝓡𝐲 = vec(YU₁S₁⁻¹U₁ᵀ∂C₁)ᵀvec(YU₁S₁⁻¹U₁ᵀ). The above equations can be more compactly written as 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW), where W = U₁S₁⁻¹U₁ᵀ, E₀ = G, and E₁ = I. We will now just state the results for the other instances of the aBc form, which follow similar derivations:: Xᵀ𝓡X = (L₀ᵀW∂CᵢWL₀) ⊗ (GᵀEᵢEᵢᵀG) Mᵀ𝓡y = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA) Mᵀ𝓡X = AᵀW∂CᵢWL₀ ⊗ XᵀEᵢEᵢᵀG Mᵀ𝓡M = AᵀW∂CᵢWA ⊗ XᵀEᵢEᵢᵀX Xᵀ𝓡𝐲 = GᵀEᵢEᵢᵀYW∂CᵢWL₀ From Woodbury matrix identity and Kronecker product properties we have :: tr(K⁻¹∂K) = tr[W∂Cᵢ]tr[EᵢEᵢᵀ] - tr[Z⁻¹(Xᵀ𝓡X)] tr(H⁻¹∂H) = - tr[(MᵀR⁻¹M)(Mᵀ𝕂M)] + tr[(MᵀR⁻¹X)Z⁻¹(XᵀR⁻¹M)(Mᵀ𝕂M)] Note also that :: ∂𝛃 = H⁻¹Mᵀ𝕂M𝛃 - H⁻¹Mᵀ𝕂𝐲. Returns ------- C0.Lu : ndarray Gradient of the log of the marginal likelihood over C₀ parameters. C1.Lu : ndarray Gradient of the log of the marginal likelihood over C₁ parameters. """ from scipy.linalg import cho_solve terms = self._terms dC0 = self._cov.C0.gradient()["Lu"] dC1 = self._cov.C1.gradient()["Lu"] b = terms["b"] W = terms["W"] Lh = terms["Lh"] Lz = terms["Lz"] WA = terms["WA"] WL0 = terms["WL0"] YW = terms["YW"] MRiM = terms["MRiM"] MRiy = terms["MRiy"] XRiM = terms["XRiM"] XRiy = terms["XRiy"] ZiXRiM = terms["ZiXRiM"] ZiXRiy = terms["ZiXRiy"] WdC0 = _mdot(W, dC0) WdC1 = _mdot(W, dC1) AWdC0 = _mdot(WA.T, dC0) AWdC1 = _mdot(WA.T, dC1) # Mᵀ𝓡M MR0M = _mkron(_mdot(AWdC0, WA), self._XGGX) MR1M = _mkron(_mdot(AWdC1, WA), self._XX) # Mᵀ𝓡X MR0X = _mkron(_mdot(AWdC0, WL0), self._XGGG) MR1X = _mkron(_mdot(AWdC1, WL0), self._GX.T) # Mᵀ𝓡𝐲 = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA) MR0y = vec(_mdot(self._XGGY, _mdot(WdC0, WA))) MR1y = vec(_mdot(self._XY, WdC1, WA)) # Xᵀ𝓡X XR0X = _mkron(_mdot(WL0.T, dC0, WL0), self._GGGG) XR1X = _mkron(_mdot(WL0.T, dC1, WL0), self._GG) # Xᵀ𝓡𝐲 XR0y = vec(_mdot(self._GGGY, WdC0, WL0)) XR1y = vec(_mdot(self._GY, WdC1, WL0)) # 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW) yR0y = vec(_mdot(self._GY, WdC0)).T @ vec(self._GY @ W) yR1y = (YW.T * _mdot(self._Y, WdC1).T).T.sum(axis=(0, 1)) ZiXR0X = cho_solve(Lz, XR0X) ZiXR1X = cho_solve(Lz, XR1X) ZiXR0y = cho_solve(Lz, XR0y) ZiXR1y = cho_solve(Lz, XR1y) # Mᵀ𝕂y = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) # + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) MK0y = MR0y - _mdot(XRiM.T, ZiXR0y) - _mdot(MR0X, ZiXRiy) MK0y += _mdot(XRiM.T, ZiXR0X, ZiXRiy) MK1y = MR1y - _mdot(XRiM.T, ZiXR1y) - _mdot(MR1X, ZiXRiy) MK1y += _mdot(XRiM.T, ZiXR1X, ZiXRiy) # 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) yK0y = yR0y - 2 * XR0y.T @ ZiXRiy + ZiXRiy.T @ _mdot(XR0X, ZiXRiy) yK1y = yR1y - 2 * XR1y.T @ ZiXRiy + ZiXRiy.T @ _mdot(XR1X, ZiXRiy) # Mᵀ𝕂M = Mᵀ𝓡M - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡M) # + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) MR0XZiXRiM = _mdot(MR0X, ZiXRiM) MK0M = MR0M - MR0XZiXRiM - MR0XZiXRiM.transpose([1, 0, 2]) MK0M += _mdot(ZiXRiM.T, XR0X, ZiXRiM) MR1XZiXRiM = _mdot(MR1X, ZiXRiM) MK1M = MR1M - MR1XZiXRiM - MR1XZiXRiM.transpose([1, 0, 2]) MK1M += _mdot(ZiXRiM.T, XR1X, ZiXRiM) MK0m = _mdot(MK0M, b) mK0y = b.T @ MK0y mK0m = b.T @ MK0m MK1m = _mdot(MK1M, b) mK1y = b.T @ MK1y mK1m = b.T @ MK1m XRim = XRiM @ b MRim = MRiM @ b db = {"C0.Lu": cho_solve(Lh, MK0m - MK0y), "C1.Lu": cho_solve(Lh, MK1m - MK1y)} grad = { "C0.Lu": -trace(WdC0) * self._trGG + trace(ZiXR0X), "C1.Lu": -trace(WdC1) * self.nsamples + trace(ZiXR1X), } if self._restricted: grad["C0.Lu"] += cho_solve(Lh, MK0M).diagonal().sum(1) grad["C1.Lu"] += cho_solve(Lh, MK1M).diagonal().sum(1) mKiM = MRim.T - XRim.T @ ZiXRiM yKiM = MRiy.T - XRiy.T @ ZiXRiM grad["C0.Lu"] += yK0y - 2 * mK0y + mK0m - 2 * _mdot(mKiM, db["C0.Lu"]) grad["C0.Lu"] += 2 * _mdot(yKiM, db["C0.Lu"]) grad["C1.Lu"] += yK1y - 2 * mK1y + mK1m - 2 * _mdot(mKiM, db["C1.Lu"]) grad["C1.Lu"] += 2 * _mdot(yKiM, db["C1.Lu"]) grad["C0.Lu"] /= 2 grad["C1.Lu"] /= 2 return grad
[ "Gradient", "of", "the", "log", "of", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_kron2sum.py#L523-L691
[ "def", "_lml_gradient", "(", "self", ")", ":", "from", "scipy", ".", "linalg", "import", "cho_solve", "terms", "=", "self", ".", "_terms", "dC0", "=", "self", ".", "_cov", ".", "C0", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "dC1", "=", "self", ".", "_cov", ".", "C1", ".", "gradient", "(", ")", "[", "\"Lu\"", "]", "b", "=", "terms", "[", "\"b\"", "]", "W", "=", "terms", "[", "\"W\"", "]", "Lh", "=", "terms", "[", "\"Lh\"", "]", "Lz", "=", "terms", "[", "\"Lz\"", "]", "WA", "=", "terms", "[", "\"WA\"", "]", "WL0", "=", "terms", "[", "\"WL0\"", "]", "YW", "=", "terms", "[", "\"YW\"", "]", "MRiM", "=", "terms", "[", "\"MRiM\"", "]", "MRiy", "=", "terms", "[", "\"MRiy\"", "]", "XRiM", "=", "terms", "[", "\"XRiM\"", "]", "XRiy", "=", "terms", "[", "\"XRiy\"", "]", "ZiXRiM", "=", "terms", "[", "\"ZiXRiM\"", "]", "ZiXRiy", "=", "terms", "[", "\"ZiXRiy\"", "]", "WdC0", "=", "_mdot", "(", "W", ",", "dC0", ")", "WdC1", "=", "_mdot", "(", "W", ",", "dC1", ")", "AWdC0", "=", "_mdot", "(", "WA", ".", "T", ",", "dC0", ")", "AWdC1", "=", "_mdot", "(", "WA", ".", "T", ",", "dC1", ")", "# Mᵀ𝓡M", "MR0M", "=", "_mkron", "(", "_mdot", "(", "AWdC0", ",", "WA", ")", ",", "self", ".", "_XGGX", ")", "MR1M", "=", "_mkron", "(", "_mdot", "(", "AWdC1", ",", "WA", ")", ",", "self", ".", "_XX", ")", "# Mᵀ𝓡X", "MR0X", "=", "_mkron", "(", "_mdot", "(", "AWdC0", ",", "WL0", ")", ",", "self", ".", "_XGGG", ")", "MR1X", "=", "_mkron", "(", "_mdot", "(", "AWdC1", ",", "WL0", ")", ",", "self", ".", "_GX", ".", "T", ")", "# Mᵀ𝓡𝐲 = (AᵀW∂Cᵢ⊗XᵀEᵢ)vec(EᵢᵀYW) = vec(XᵀEᵢEᵢᵀYW∂CᵢWA)", "MR0y", "=", "vec", "(", "_mdot", "(", "self", ".", "_XGGY", ",", "_mdot", "(", "WdC0", ",", "WA", ")", ")", ")", "MR1y", "=", "vec", "(", "_mdot", "(", "self", ".", "_XY", ",", "WdC1", ",", "WA", ")", ")", "# Xᵀ𝓡X", "XR0X", "=", "_mkron", "(", "_mdot", "(", "WL0", ".", "T", ",", "dC0", ",", "WL0", ")", ",", "self", ".", "_GGGG", ")", "XR1X", "=", "_mkron", "(", "_mdot", "(", "WL0", ".", "T", ",", "dC1", ",", "WL0", ")", ",", "self", ".", "_GG", ")", "# Xᵀ𝓡𝐲", "XR0y", "=", "vec", "(", "_mdot", "(", "self", ".", "_GGGY", ",", "WdC0", ",", "WL0", ")", ")", "XR1y", "=", "vec", "(", "_mdot", "(", "self", ".", "_GY", ",", "WdC1", ",", "WL0", ")", ")", "# 𝐲ᵀ𝓡𝐲 = vec(EᵢᵀYW∂Cᵢ)ᵀvec(EᵢᵀYW)", "yR0y", "=", "vec", "(", "_mdot", "(", "self", ".", "_GY", ",", "WdC0", ")", ")", ".", "T", "@", "vec", "(", "self", ".", "_GY", "@", "W", ")", "yR1y", "=", "(", "YW", ".", "T", "*", "_mdot", "(", "self", ".", "_Y", ",", "WdC1", ")", ".", "T", ")", ".", "T", ".", "sum", "(", "axis", "=", "(", "0", ",", "1", ")", ")", "ZiXR0X", "=", "cho_solve", "(", "Lz", ",", "XR0X", ")", "ZiXR1X", "=", "cho_solve", "(", "Lz", ",", "XR1X", ")", "ZiXR0y", "=", "cho_solve", "(", "Lz", ",", "XR0y", ")", "ZiXR1y", "=", "cho_solve", "(", "Lz", ",", "XR1y", ")", "# Mᵀ𝕂y = Mᵀ𝓡𝐲 - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡𝐲) - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲)", "# + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲)", "MK0y", "=", "MR0y", "-", "_mdot", "(", "XRiM", ".", "T", ",", "ZiXR0y", ")", "-", "_mdot", "(", "MR0X", ",", "ZiXRiy", ")", "MK0y", "+=", "_mdot", "(", "XRiM", ".", "T", ",", "ZiXR0X", ",", "ZiXRiy", ")", "MK1y", "=", "MR1y", "-", "_mdot", "(", "XRiM", ".", "T", ",", "ZiXR1y", ")", "-", "_mdot", "(", "MR1X", ",", "ZiXRiy", ")", "MK1y", "+=", "_mdot", "(", "XRiM", ".", "T", ",", "ZiXR1X", ",", "ZiXRiy", ")", "# 𝐲ᵀ𝕂𝐲 = 𝐲ᵀ𝓡𝐲 - 2(𝐲ᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲) + (𝐲ᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹𝐲)", "yK0y", "=", "yR0y", "-", "2", "*", "XR0y", ".", "T", "@", "ZiXRiy", "+", "ZiXRiy", ".", "T", "@", "_mdot", "(", "XR0X", ",", "ZiXRiy", ")", "yK1y", "=", "yR1y", "-", "2", "*", "XR1y", ".", "T", "@", "ZiXRiy", "+", "ZiXRiy", ".", "T", "@", "_mdot", "(", "XR1X", ",", "ZiXRiy", ")", "# Mᵀ𝕂M = Mᵀ𝓡M - (Mᵀ𝓡X)Z⁻¹(XᵀR⁻¹M) - (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡M)", "# + (MᵀR⁻¹X)Z⁻¹(Xᵀ𝓡X)Z⁻¹(XᵀR⁻¹M)", "MR0XZiXRiM", "=", "_mdot", "(", "MR0X", ",", "ZiXRiM", ")", "MK0M", "=", "MR0M", "-", "MR0XZiXRiM", "-", "MR0XZiXRiM", ".", "transpose", "(", "[", "1", ",", "0", ",", "2", "]", ")", "MK0M", "+=", "_mdot", "(", "ZiXRiM", ".", "T", ",", "XR0X", ",", "ZiXRiM", ")", "MR1XZiXRiM", "=", "_mdot", "(", "MR1X", ",", "ZiXRiM", ")", "MK1M", "=", "MR1M", "-", "MR1XZiXRiM", "-", "MR1XZiXRiM", ".", "transpose", "(", "[", "1", ",", "0", ",", "2", "]", ")", "MK1M", "+=", "_mdot", "(", "ZiXRiM", ".", "T", ",", "XR1X", ",", "ZiXRiM", ")", "MK0m", "=", "_mdot", "(", "MK0M", ",", "b", ")", "mK0y", "=", "b", ".", "T", "@", "MK0y", "mK0m", "=", "b", ".", "T", "@", "MK0m", "MK1m", "=", "_mdot", "(", "MK1M", ",", "b", ")", "mK1y", "=", "b", ".", "T", "@", "MK1y", "mK1m", "=", "b", ".", "T", "@", "MK1m", "XRim", "=", "XRiM", "@", "b", "MRim", "=", "MRiM", "@", "b", "db", "=", "{", "\"C0.Lu\"", ":", "cho_solve", "(", "Lh", ",", "MK0m", "-", "MK0y", ")", ",", "\"C1.Lu\"", ":", "cho_solve", "(", "Lh", ",", "MK1m", "-", "MK1y", ")", "}", "grad", "=", "{", "\"C0.Lu\"", ":", "-", "trace", "(", "WdC0", ")", "*", "self", ".", "_trGG", "+", "trace", "(", "ZiXR0X", ")", ",", "\"C1.Lu\"", ":", "-", "trace", "(", "WdC1", ")", "*", "self", ".", "nsamples", "+", "trace", "(", "ZiXR1X", ")", ",", "}", "if", "self", ".", "_restricted", ":", "grad", "[", "\"C0.Lu\"", "]", "+=", "cho_solve", "(", "Lh", ",", "MK0M", ")", ".", "diagonal", "(", ")", ".", "sum", "(", "1", ")", "grad", "[", "\"C1.Lu\"", "]", "+=", "cho_solve", "(", "Lh", ",", "MK1M", ")", ".", "diagonal", "(", ")", ".", "sum", "(", "1", ")", "mKiM", "=", "MRim", ".", "T", "-", "XRim", ".", "T", "@", "ZiXRiM", "yKiM", "=", "MRiy", ".", "T", "-", "XRiy", ".", "T", "@", "ZiXRiM", "grad", "[", "\"C0.Lu\"", "]", "+=", "yK0y", "-", "2", "*", "mK0y", "+", "mK0m", "-", "2", "*", "_mdot", "(", "mKiM", ",", "db", "[", "\"C0.Lu\"", "]", ")", "grad", "[", "\"C0.Lu\"", "]", "+=", "2", "*", "_mdot", "(", "yKiM", ",", "db", "[", "\"C0.Lu\"", "]", ")", "grad", "[", "\"C1.Lu\"", "]", "+=", "yK1y", "-", "2", "*", "mK1y", "+", "mK1m", "-", "2", "*", "_mdot", "(", "mKiM", ",", "db", "[", "\"C1.Lu\"", "]", ")", "grad", "[", "\"C1.Lu\"", "]", "+=", "2", "*", "_mdot", "(", "yKiM", ",", "db", "[", "\"C1.Lu\"", "]", ")", "grad", "[", "\"C0.Lu\"", "]", "/=", "2", "grad", "[", "\"C1.Lu\"", "]", "/=", "2", "return", "grad" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMMExpFam.gradient
r"""Gradient of the log of the marginal likelihood. Returns ------- dict Map between variables to their gradient values.
glimix_core/glmm/_expfam.py
def gradient(self): r"""Gradient of the log of the marginal likelihood. Returns ------- dict Map between variables to their gradient values. """ self._update_approx() g = self._ep.lml_derivatives(self._X) ed = exp(-self.logitdelta) es = exp(self.logscale) grad = dict() grad["logitdelta"] = g["delta"] * (ed / (1 + ed)) / (1 + ed) grad["logscale"] = g["scale"] * es grad["beta"] = g["mean"] return grad
def gradient(self): r"""Gradient of the log of the marginal likelihood. Returns ------- dict Map between variables to their gradient values. """ self._update_approx() g = self._ep.lml_derivatives(self._X) ed = exp(-self.logitdelta) es = exp(self.logscale) grad = dict() grad["logitdelta"] = g["delta"] * (ed / (1 + ed)) / (1 + ed) grad["logscale"] = g["scale"] * es grad["beta"] = g["mean"] return grad
[ "r", "Gradient", "of", "the", "log", "of", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_expfam.py#L127-L146
[ "def", "gradient", "(", "self", ")", ":", "self", ".", "_update_approx", "(", ")", "g", "=", "self", ".", "_ep", ".", "lml_derivatives", "(", "self", ".", "_X", ")", "ed", "=", "exp", "(", "-", "self", ".", "logitdelta", ")", "es", "=", "exp", "(", "self", ".", "logscale", ")", "grad", "=", "dict", "(", ")", "grad", "[", "\"logitdelta\"", "]", "=", "g", "[", "\"delta\"", "]", "*", "(", "ed", "/", "(", "1", "+", "ed", ")", ")", "/", "(", "1", "+", "ed", ")", "grad", "[", "\"logscale\"", "]", "=", "g", "[", "\"scale\"", "]", "*", "es", "grad", "[", "\"beta\"", "]", "=", "g", "[", "\"mean\"", "]", "return", "grad" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LRFreeFormCov.gradient
Derivative of the covariance matrix over the lower triangular, flat part of L. It is equal to ∂K/∂Lᵢⱼ = ALᵀ + LAᵀ, where Aᵢⱼ is an n×m matrix of zeros except at [Aᵢⱼ]ᵢⱼ=1. Returns ------- Lu : ndarray Derivative of K over the lower-triangular, flat part of L.
glimix_core/cov/_lrfree.py
def gradient(self): """ Derivative of the covariance matrix over the lower triangular, flat part of L. It is equal to ∂K/∂Lᵢⱼ = ALᵀ + LAᵀ, where Aᵢⱼ is an n×m matrix of zeros except at [Aᵢⱼ]ᵢⱼ=1. Returns ------- Lu : ndarray Derivative of K over the lower-triangular, flat part of L. """ L = self.L n = self.L.shape[0] grad = {"Lu": zeros((n, n, n * self._L.shape[1]))} for ii in range(self._L.shape[0] * self._L.shape[1]): row = ii // self._L.shape[1] col = ii % self._L.shape[1] grad["Lu"][row, :, ii] = L[:, col] grad["Lu"][:, row, ii] += L[:, col] return grad
def gradient(self): """ Derivative of the covariance matrix over the lower triangular, flat part of L. It is equal to ∂K/∂Lᵢⱼ = ALᵀ + LAᵀ, where Aᵢⱼ is an n×m matrix of zeros except at [Aᵢⱼ]ᵢⱼ=1. Returns ------- Lu : ndarray Derivative of K over the lower-triangular, flat part of L. """ L = self.L n = self.L.shape[0] grad = {"Lu": zeros((n, n, n * self._L.shape[1]))} for ii in range(self._L.shape[0] * self._L.shape[1]): row = ii // self._L.shape[1] col = ii % self._L.shape[1] grad["Lu"][row, :, ii] = L[:, col] grad["Lu"][:, row, ii] += L[:, col] return grad
[ "Derivative", "of", "the", "covariance", "matrix", "over", "the", "lower", "triangular", "flat", "part", "of", "L", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/cov/_lrfree.py#L129-L153
[ "def", "gradient", "(", "self", ")", ":", "L", "=", "self", ".", "L", "n", "=", "self", ".", "L", ".", "shape", "[", "0", "]", "grad", "=", "{", "\"Lu\"", ":", "zeros", "(", "(", "n", ",", "n", ",", "n", "*", "self", ".", "_L", ".", "shape", "[", "1", "]", ")", ")", "}", "for", "ii", "in", "range", "(", "self", ".", "_L", ".", "shape", "[", "0", "]", "*", "self", ".", "_L", ".", "shape", "[", "1", "]", ")", ":", "row", "=", "ii", "//", "self", ".", "_L", ".", "shape", "[", "1", "]", "col", "=", "ii", "%", "self", ".", "_L", ".", "shape", "[", "1", "]", "grad", "[", "\"Lu\"", "]", "[", "row", ",", ":", ",", "ii", "]", "=", "L", "[", ":", ",", "col", "]", "grad", "[", "\"Lu\"", "]", "[", ":", ",", "row", ",", "ii", "]", "+=", "L", "[", ":", ",", "col", "]", "return", "grad" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.beta
Fixed-effect sizes. Returns ------- effect-sizes : numpy.ndarray Optimal fixed-effect sizes. Notes ----- Setting the derivative of log(p(𝐲)) over effect sizes equal to zero leads to solutions 𝜷 from equation :: (QᵀX)ᵀD⁻¹(QᵀX)𝜷 = (QᵀX)ᵀD⁻¹(Qᵀ𝐲).
glimix_core/lmm/_lmm.py
def beta(self): """ Fixed-effect sizes. Returns ------- effect-sizes : numpy.ndarray Optimal fixed-effect sizes. Notes ----- Setting the derivative of log(p(𝐲)) over effect sizes equal to zero leads to solutions 𝜷 from equation :: (QᵀX)ᵀD⁻¹(QᵀX)𝜷 = (QᵀX)ᵀD⁻¹(Qᵀ𝐲). """ from numpy_sugar.linalg import rsolve return rsolve(self._X["VT"], rsolve(self._X["tX"], self.mean()))
def beta(self): """ Fixed-effect sizes. Returns ------- effect-sizes : numpy.ndarray Optimal fixed-effect sizes. Notes ----- Setting the derivative of log(p(𝐲)) over effect sizes equal to zero leads to solutions 𝜷 from equation :: (QᵀX)ᵀD⁻¹(QᵀX)𝜷 = (QᵀX)ᵀD⁻¹(Qᵀ𝐲). """ from numpy_sugar.linalg import rsolve return rsolve(self._X["VT"], rsolve(self._X["tX"], self.mean()))
[ "Fixed", "-", "effect", "sizes", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L181-L199
[ "def", "beta", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "rsolve", "return", "rsolve", "(", "self", ".", "_X", "[", "\"VT\"", "]", ",", "rsolve", "(", "self", ".", "_X", "[", "\"tX\"", "]", ",", "self", ".", "mean", "(", ")", ")", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.beta_covariance
Estimates the covariance-matrix of the optimal beta. Returns ------- beta-covariance : ndarray (Xᵀ(s((1-𝛿)K + 𝛿I))⁻¹X)⁻¹. References ---------- .. Rencher, A. C., & Schaalje, G. B. (2008). Linear models in statistics. John Wiley & Sons.
glimix_core/lmm/_lmm.py
def beta_covariance(self): """ Estimates the covariance-matrix of the optimal beta. Returns ------- beta-covariance : ndarray (Xᵀ(s((1-𝛿)K + 𝛿I))⁻¹X)⁻¹. References ---------- .. Rencher, A. C., & Schaalje, G. B. (2008). Linear models in statistics. John Wiley & Sons. """ from numpy_sugar.linalg import ddot tX = self._X["tX"] Q = concatenate(self._QS[0], axis=1) S0 = self._QS[1] D = self.v0 * S0 + self.v1 D = D.tolist() + [self.v1] * (len(self._y) - len(D)) D = asarray(D) A = inv(tX.T @ (Q @ ddot(1 / D, Q.T @ tX))) VT = self._X["VT"] H = lstsq(VT, A, rcond=None)[0] return lstsq(VT, H.T, rcond=None)[0]
def beta_covariance(self): """ Estimates the covariance-matrix of the optimal beta. Returns ------- beta-covariance : ndarray (Xᵀ(s((1-𝛿)K + 𝛿I))⁻¹X)⁻¹. References ---------- .. Rencher, A. C., & Schaalje, G. B. (2008). Linear models in statistics. John Wiley & Sons. """ from numpy_sugar.linalg import ddot tX = self._X["tX"] Q = concatenate(self._QS[0], axis=1) S0 = self._QS[1] D = self.v0 * S0 + self.v1 D = D.tolist() + [self.v1] * (len(self._y) - len(D)) D = asarray(D) A = inv(tX.T @ (Q @ ddot(1 / D, Q.T @ tX))) VT = self._X["VT"] H = lstsq(VT, A, rcond=None)[0] return lstsq(VT, H.T, rcond=None)[0]
[ "Estimates", "the", "covariance", "-", "matrix", "of", "the", "optimal", "beta", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L209-L234
[ "def", "beta_covariance", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", "tX", "=", "self", ".", "_X", "[", "\"tX\"", "]", "Q", "=", "concatenate", "(", "self", ".", "_QS", "[", "0", "]", ",", "axis", "=", "1", ")", "S0", "=", "self", ".", "_QS", "[", "1", "]", "D", "=", "self", ".", "v0", "*", "S0", "+", "self", ".", "v1", "D", "=", "D", ".", "tolist", "(", ")", "+", "[", "self", ".", "v1", "]", "*", "(", "len", "(", "self", ".", "_y", ")", "-", "len", "(", "D", ")", ")", "D", "=", "asarray", "(", "D", ")", "A", "=", "inv", "(", "tX", ".", "T", "@", "(", "Q", "@", "ddot", "(", "1", "/", "D", ",", "Q", ".", "T", "@", "tX", ")", ")", ")", "VT", "=", "self", ".", "_X", "[", "\"VT\"", "]", "H", "=", "lstsq", "(", "VT", ",", "A", ",", "rcond", "=", "None", ")", "[", "0", "]", "return", "lstsq", "(", "VT", ",", "H", ".", "T", ",", "rcond", "=", "None", ")", "[", "0", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.fix
Disable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``.
glimix_core/lmm/_lmm.py
def fix(self, param): """ Disable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``. """ if param == "delta": super()._fix("logistic") else: self._fix[param] = True
def fix(self, param): """ Disable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``. """ if param == "delta": super()._fix("logistic") else: self._fix[param] = True
[ "Disable", "parameter", "optimization", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L236-L248
[ "def", "fix", "(", "self", ",", "param", ")", ":", "if", "param", "==", "\"delta\"", ":", "super", "(", ")", ".", "_fix", "(", "\"logistic\"", ")", "else", ":", "self", ".", "_fix", "[", "param", "]", "=", "True" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.unfix
Enable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``.
glimix_core/lmm/_lmm.py
def unfix(self, param): """ Enable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``. """ if param == "delta": self._unfix("logistic") else: self._fix[param] = False
def unfix(self, param): """ Enable parameter optimization. Parameters ---------- param : str Possible values are ``"delta"``, ``"beta"``, and ``"scale"``. """ if param == "delta": self._unfix("logistic") else: self._fix[param] = False
[ "Enable", "parameter", "optimization", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L250-L262
[ "def", "unfix", "(", "self", ",", "param", ")", ":", "if", "param", "==", "\"delta\"", ":", "self", ".", "_unfix", "(", "\"logistic\"", ")", "else", ":", "self", ".", "_fix", "[", "param", "]", "=", "False" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.fit
Maximise the marginal likelihood. Parameters ---------- verbose : bool, optional ``True`` for progress output; ``False`` otherwise. Defaults to ``True``.
glimix_core/lmm/_lmm.py
def fit(self, verbose=True): """ Maximise the marginal likelihood. Parameters ---------- verbose : bool, optional ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. """ if not self._isfixed("logistic"): self._maximize_scalar(desc="LMM", rtol=1e-6, atol=1e-6, verbose=verbose) if not self._fix["beta"]: self._update_beta() if not self._fix["scale"]: self._update_scale()
def fit(self, verbose=True): """ Maximise the marginal likelihood. Parameters ---------- verbose : bool, optional ``True`` for progress output; ``False`` otherwise. Defaults to ``True``. """ if not self._isfixed("logistic"): self._maximize_scalar(desc="LMM", rtol=1e-6, atol=1e-6, verbose=verbose) if not self._fix["beta"]: self._update_beta() if not self._fix["scale"]: self._update_scale()
[ "Maximise", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L288-L305
[ "def", "fit", "(", "self", ",", "verbose", "=", "True", ")", ":", "if", "not", "self", ".", "_isfixed", "(", "\"logistic\"", ")", ":", "self", ".", "_maximize_scalar", "(", "desc", "=", "\"LMM\"", ",", "rtol", "=", "1e-6", ",", "atol", "=", "1e-6", ",", "verbose", "=", "verbose", ")", "if", "not", "self", ".", "_fix", "[", "\"beta\"", "]", ":", "self", ".", "_update_beta", "(", ")", "if", "not", "self", ".", "_fix", "[", "\"scale\"", "]", ":", "self", ".", "_update_scale", "(", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.get_fast_scanner
Return :class:`.FastScanner` for association scan. Returns ------- fast-scanner : :class:`.FastScanner` Instance of a class designed to perform very fast association scan.
glimix_core/lmm/_lmm.py
def get_fast_scanner(self): """ Return :class:`.FastScanner` for association scan. Returns ------- fast-scanner : :class:`.FastScanner` Instance of a class designed to perform very fast association scan. """ v0 = self.v0 v1 = self.v1 QS = (self._QS[0], v0 * self._QS[1]) return FastScanner(self._y, self.X, QS, v1)
def get_fast_scanner(self): """ Return :class:`.FastScanner` for association scan. Returns ------- fast-scanner : :class:`.FastScanner` Instance of a class designed to perform very fast association scan. """ v0 = self.v0 v1 = self.v1 QS = (self._QS[0], v0 * self._QS[1]) return FastScanner(self._y, self.X, QS, v1)
[ "Return", ":", "class", ":", ".", "FastScanner", "for", "association", "scan", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L307-L319
[ "def", "get_fast_scanner", "(", "self", ")", ":", "v0", "=", "self", ".", "v0", "v1", "=", "self", ".", "v1", "QS", "=", "(", "self", ".", "_QS", "[", "0", "]", ",", "v0", "*", "self", ".", "_QS", "[", "1", "]", ")", "return", "FastScanner", "(", "self", ".", "_y", ",", "self", ".", "X", ",", "QS", ",", "v1", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.value
Internal use only.
glimix_core/lmm/_lmm.py
def value(self): """ Internal use only. """ if not self._fix["beta"]: self._update_beta() if not self._fix["scale"]: self._update_scale() return self.lml()
def value(self): """ Internal use only. """ if not self._fix["beta"]: self._update_beta() if not self._fix["scale"]: self._update_scale() return self.lml()
[ "Internal", "use", "only", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L321-L331
[ "def", "value", "(", "self", ")", ":", "if", "not", "self", ".", "_fix", "[", "\"beta\"", "]", ":", "self", ".", "_update_beta", "(", ")", "if", "not", "self", ".", "_fix", "[", "\"scale\"", "]", ":", "self", ".", "_update_scale", "(", ")", "return", "self", ".", "lml", "(", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.lml
Log of the marginal likelihood. Returns ------- lml : float Log of the marginal likelihood. Notes ----- The log of the marginal likelihood is given by :: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| - (Qᵀ𝐲)ᵀs⁻¹D⁻¹(Qᵀ𝐲) + (Qᵀ𝐲)ᵀs⁻¹D⁻¹(QᵀX𝜷)/2 - (QᵀX𝜷)ᵀs⁻¹D⁻¹(QᵀX𝜷). By using the optimal 𝜷, the log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| + (Qᵀ𝐲)ᵀs⁻¹D⁻¹Qᵀ(X𝜷-𝐲). In the extreme case where 𝜷 is such that 𝐲 = X𝜷, the maximum is attained as s→0. For optimals 𝜷 and s, the log of the marginal likelihood can be further simplified to :: 2⋅log(p(𝐲; 𝜷, s)) = -n⋅log(2π) - n⋅log s - log|D| - n.
glimix_core/lmm/_lmm.py
def lml(self): """ Log of the marginal likelihood. Returns ------- lml : float Log of the marginal likelihood. Notes ----- The log of the marginal likelihood is given by :: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| - (Qᵀ𝐲)ᵀs⁻¹D⁻¹(Qᵀ𝐲) + (Qᵀ𝐲)ᵀs⁻¹D⁻¹(QᵀX𝜷)/2 - (QᵀX𝜷)ᵀs⁻¹D⁻¹(QᵀX𝜷). By using the optimal 𝜷, the log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| + (Qᵀ𝐲)ᵀs⁻¹D⁻¹Qᵀ(X𝜷-𝐲). In the extreme case where 𝜷 is such that 𝐲 = X𝜷, the maximum is attained as s→0. For optimals 𝜷 and s, the log of the marginal likelihood can be further simplified to :: 2⋅log(p(𝐲; 𝜷, s)) = -n⋅log(2π) - n⋅log s - log|D| - n. """ reml = (self._logdetXX() - self._logdetH()) / 2 if self._optimal["scale"]: lml = self._lml_optimal_scale() else: lml = self._lml_arbitrary_scale() return lml + reml
def lml(self): """ Log of the marginal likelihood. Returns ------- lml : float Log of the marginal likelihood. Notes ----- The log of the marginal likelihood is given by :: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| - (Qᵀ𝐲)ᵀs⁻¹D⁻¹(Qᵀ𝐲) + (Qᵀ𝐲)ᵀs⁻¹D⁻¹(QᵀX𝜷)/2 - (QᵀX𝜷)ᵀs⁻¹D⁻¹(QᵀX𝜷). By using the optimal 𝜷, the log of the marginal likelihood can be rewritten as:: 2⋅log(p(𝐲)) = -n⋅log(2π) - n⋅log(s) - log|D| + (Qᵀ𝐲)ᵀs⁻¹D⁻¹Qᵀ(X𝜷-𝐲). In the extreme case where 𝜷 is such that 𝐲 = X𝜷, the maximum is attained as s→0. For optimals 𝜷 and s, the log of the marginal likelihood can be further simplified to :: 2⋅log(p(𝐲; 𝜷, s)) = -n⋅log(2π) - n⋅log s - log|D| - n. """ reml = (self._logdetXX() - self._logdetH()) / 2 if self._optimal["scale"]: lml = self._lml_optimal_scale() else: lml = self._lml_arbitrary_scale() return lml + reml
[ "Log", "of", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L353-L388
[ "def", "lml", "(", "self", ")", ":", "reml", "=", "(", "self", ".", "_logdetXX", "(", ")", "-", "self", ".", "_logdetH", "(", ")", ")", "/", "2", "if", "self", ".", "_optimal", "[", "\"scale\"", "]", ":", "lml", "=", "self", ".", "_lml_optimal_scale", "(", ")", "else", ":", "lml", "=", "self", ".", "_lml_arbitrary_scale", "(", ")", "return", "lml", "+", "reml" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM.delta
Variance ratio between ``K`` and ``I``.
glimix_core/lmm/_lmm.py
def delta(self): """ Variance ratio between ``K`` and ``I``. """ v = float(self._logistic.value) if v > 0.0: v = 1 / (1 + exp(-v)) else: v = exp(v) v = v / (v + 1.0) return min(max(v, epsilon.tiny), 1 - epsilon.tiny)
def delta(self): """ Variance ratio between ``K`` and ``I``. """ v = float(self._logistic.value) if v > 0.0: v = 1 / (1 + exp(-v)) else: v = exp(v) v = v / (v + 1.0) return min(max(v, epsilon.tiny), 1 - epsilon.tiny)
[ "Variance", "ratio", "between", "K", "and", "I", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L403-L416
[ "def", "delta", "(", "self", ")", ":", "v", "=", "float", "(", "self", ".", "_logistic", ".", "value", ")", "if", "v", ">", "0.0", ":", "v", "=", "1", "/", "(", "1", "+", "exp", "(", "-", "v", ")", ")", "else", ":", "v", "=", "exp", "(", "v", ")", "v", "=", "v", "/", "(", "v", "+", "1.0", ")", "return", "min", "(", "max", "(", "v", ",", "epsilon", ".", "tiny", ")", ",", "1", "-", "epsilon", ".", "tiny", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM._logdetXX
log(|XᵀX|).
glimix_core/lmm/_lmm.py
def _logdetXX(self): """ log(|XᵀX|). """ if not self._restricted: return 0.0 ldet = slogdet(self._X["tX"].T @ self._X["tX"]) if ldet[0] != 1.0: raise ValueError("The determinant of XᵀX should be positive.") return ldet[1]
def _logdetXX(self): """ log(|XᵀX|). """ if not self._restricted: return 0.0 ldet = slogdet(self._X["tX"].T @ self._X["tX"]) if ldet[0] != 1.0: raise ValueError("The determinant of XᵀX should be positive.") return ldet[1]
[ "log", "(", "|XᵀX|", ")", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L489-L499
[ "def", "_logdetXX", "(", "self", ")", ":", "if", "not", "self", ".", "_restricted", ":", "return", "0.0", "ldet", "=", "slogdet", "(", "self", ".", "_X", "[", "\"tX\"", "]", ".", "T", "@", "self", ".", "_X", "[", "\"tX\"", "]", ")", "if", "ldet", "[", "0", "]", "!=", "1.0", ":", "raise", "ValueError", "(", "\"The determinant of XᵀX should be positive.\")", "", "return", "ldet", "[", "1", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM._logdetH
log(|H|) for H = s⁻¹XᵀQD⁻¹QᵀX.
glimix_core/lmm/_lmm.py
def _logdetH(self): """ log(|H|) for H = s⁻¹XᵀQD⁻¹QᵀX. """ if not self._restricted: return 0.0 ldet = slogdet(sum(self._XTQDiQTX) / self.scale) if ldet[0] != 1.0: raise ValueError("The determinant of H should be positive.") return ldet[1]
def _logdetH(self): """ log(|H|) for H = s⁻¹XᵀQD⁻¹QᵀX. """ if not self._restricted: return 0.0 ldet = slogdet(sum(self._XTQDiQTX) / self.scale) if ldet[0] != 1.0: raise ValueError("The determinant of H should be positive.") return ldet[1]
[ "log", "(", "|H|", ")", "for", "H", "=", "s⁻¹XᵀQD⁻¹QᵀX", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L501-L510
[ "def", "_logdetH", "(", "self", ")", ":", "if", "not", "self", ".", "_restricted", ":", "return", "0.0", "ldet", "=", "slogdet", "(", "sum", "(", "self", ".", "_XTQDiQTX", ")", "/", "self", ".", "scale", ")", "if", "ldet", "[", "0", "]", "!=", "1.0", ":", "raise", "ValueError", "(", "\"The determinant of H should be positive.\"", ")", "return", "ldet", "[", "1", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM._lml_optimal_scale
Log of the marginal likelihood for optimal scale. Implementation for unrestricted LML:: Returns ------- lml : float Log of the marginal likelihood.
glimix_core/lmm/_lmm.py
def _lml_optimal_scale(self): """ Log of the marginal likelihood for optimal scale. Implementation for unrestricted LML:: Returns ------- lml : float Log of the marginal likelihood. """ assert self._optimal["scale"] n = len(self._y) lml = -self._df * log2pi - self._df - n * log(self.scale) lml -= sum(npsum(log(D)) for D in self._D) return lml / 2
def _lml_optimal_scale(self): """ Log of the marginal likelihood for optimal scale. Implementation for unrestricted LML:: Returns ------- lml : float Log of the marginal likelihood. """ assert self._optimal["scale"] n = len(self._y) lml = -self._df * log2pi - self._df - n * log(self.scale) lml -= sum(npsum(log(D)) for D in self._D) return lml / 2
[ "Log", "of", "the", "marginal", "likelihood", "for", "optimal", "scale", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L512-L528
[ "def", "_lml_optimal_scale", "(", "self", ")", ":", "assert", "self", ".", "_optimal", "[", "\"scale\"", "]", "n", "=", "len", "(", "self", ".", "_y", ")", "lml", "=", "-", "self", ".", "_df", "*", "log2pi", "-", "self", ".", "_df", "-", "n", "*", "log", "(", "self", ".", "scale", ")", "lml", "-=", "sum", "(", "npsum", "(", "log", "(", "D", ")", ")", "for", "D", "in", "self", ".", "_D", ")", "return", "lml", "/", "2" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM._lml_arbitrary_scale
Log of the marginal likelihood for arbitrary scale. Returns ------- lml : float Log of the marginal likelihood.
glimix_core/lmm/_lmm.py
def _lml_arbitrary_scale(self): """ Log of the marginal likelihood for arbitrary scale. Returns ------- lml : float Log of the marginal likelihood. """ s = self.scale D = self._D n = len(self._y) lml = -self._df * log2pi - n * log(s) lml -= sum(npsum(log(d)) for d in D) d = (mTQ - yTQ for (mTQ, yTQ) in zip(self._mTQ, self._yTQ)) lml -= sum((i / j) @ i for (i, j) in zip(d, D)) / s return lml / 2
def _lml_arbitrary_scale(self): """ Log of the marginal likelihood for arbitrary scale. Returns ------- lml : float Log of the marginal likelihood. """ s = self.scale D = self._D n = len(self._y) lml = -self._df * log2pi - n * log(s) lml -= sum(npsum(log(d)) for d in D) d = (mTQ - yTQ for (mTQ, yTQ) in zip(self._mTQ, self._yTQ)) lml -= sum((i / j) @ i for (i, j) in zip(d, D)) / s return lml / 2
[ "Log", "of", "the", "marginal", "likelihood", "for", "arbitrary", "scale", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L530-L547
[ "def", "_lml_arbitrary_scale", "(", "self", ")", ":", "s", "=", "self", ".", "scale", "D", "=", "self", ".", "_D", "n", "=", "len", "(", "self", ".", "_y", ")", "lml", "=", "-", "self", ".", "_df", "*", "log2pi", "-", "n", "*", "log", "(", "s", ")", "lml", "-=", "sum", "(", "npsum", "(", "log", "(", "d", ")", ")", "for", "d", "in", "D", ")", "d", "=", "(", "mTQ", "-", "yTQ", "for", "(", "mTQ", ",", "yTQ", ")", "in", "zip", "(", "self", ".", "_mTQ", ",", "self", ".", "_yTQ", ")", ")", "lml", "-=", "sum", "(", "(", "i", "/", "j", ")", "@", "i", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "d", ",", "D", ")", ")", "/", "s", "return", "lml", "/", "2" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
LMM._df
Degrees of freedom.
glimix_core/lmm/_lmm.py
def _df(self): """ Degrees of freedom. """ if not self._restricted: return self.nsamples return self.nsamples - self._X["tX"].shape[1]
def _df(self): """ Degrees of freedom. """ if not self._restricted: return self.nsamples return self.nsamples - self._X["tX"].shape[1]
[ "Degrees", "of", "freedom", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/lmm/_lmm.py#L550-L556
[ "def", "_df", "(", "self", ")", ":", "if", "not", "self", ".", "_restricted", ":", "return", "self", ".", "nsamples", "return", "self", ".", "nsamples", "-", "self", ".", "_X", "[", "\"tX\"", "]", ".", "shape", "[", "1", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMMNormal.get_fast_scanner
r"""Return :class:`glimix_core.lmm.FastScanner` for the current delta.
glimix_core/glmm/_normal.py
def get_fast_scanner(self): r"""Return :class:`glimix_core.lmm.FastScanner` for the current delta.""" from numpy_sugar.linalg import ddot, economic_qs, sum2diag y = self.eta / self.tau if self._QS is None: K = eye(y.shape[0]) / self.tau else: Q0 = self._QS[0][0] S0 = self._QS[1] K = dot(ddot(Q0, self.v0 * S0), Q0.T) K = sum2diag(K, 1 / self.tau) return FastScanner(y, self._X, economic_qs(K), self.v1)
def get_fast_scanner(self): r"""Return :class:`glimix_core.lmm.FastScanner` for the current delta.""" from numpy_sugar.linalg import ddot, economic_qs, sum2diag y = self.eta / self.tau if self._QS is None: K = eye(y.shape[0]) / self.tau else: Q0 = self._QS[0][0] S0 = self._QS[1] K = dot(ddot(Q0, self.v0 * S0), Q0.T) K = sum2diag(K, 1 / self.tau) return FastScanner(y, self._X, economic_qs(K), self.v1)
[ "r", "Return", ":", "class", ":", "glimix_core", ".", "lmm", ".", "FastScanner", "for", "the", "current", "delta", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_normal.py#L97-L112
[ "def", "get_fast_scanner", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", ",", "economic_qs", ",", "sum2diag", "y", "=", "self", ".", "eta", "/", "self", ".", "tau", "if", "self", ".", "_QS", "is", "None", ":", "K", "=", "eye", "(", "y", ".", "shape", "[", "0", "]", ")", "/", "self", ".", "tau", "else", ":", "Q0", "=", "self", ".", "_QS", "[", "0", "]", "[", "0", "]", "S0", "=", "self", ".", "_QS", "[", "1", "]", "K", "=", "dot", "(", "ddot", "(", "Q0", ",", "self", ".", "v0", "*", "S0", ")", ",", "Q0", ".", "T", ")", "K", "=", "sum2diag", "(", "K", ",", "1", "/", "self", ".", "tau", ")", "return", "FastScanner", "(", "y", ",", "self", ".", "_X", ",", "economic_qs", "(", "K", ")", ",", "self", ".", "v1", ")" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
GLMMNormal.value
r"""Log of the marginal likelihood. Formally, .. math:: - \frac{n}{2}\log{2\pi} - \frac{1}{2} \log{\left| v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right|} - \frac{1}{2} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right)^{\intercal} \left( v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right)^{-1} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right) Returns ------- float :math:`\log{p(\tilde{\boldsymbol\mu})}`
glimix_core/glmm/_normal.py
def value(self): r"""Log of the marginal likelihood. Formally, .. math:: - \frac{n}{2}\log{2\pi} - \frac{1}{2} \log{\left| v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right|} - \frac{1}{2} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right)^{\intercal} \left( v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right)^{-1} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right) Returns ------- float :math:`\log{p(\tilde{\boldsymbol\mu})}` """ from numpy_sugar.linalg import ddot, sum2diag if self._cache["value"] is not None: return self._cache["value"] scale = exp(self.logscale) delta = 1 / (1 + exp(-self.logitdelta)) v0 = scale * (1 - delta) v1 = scale * delta mu = self.eta / self.tau n = len(mu) if self._QS is None: K = zeros((n, n)) else: Q0 = self._QS[0][0] S0 = self._QS[1] K = dot(ddot(Q0, S0), Q0.T) A = sum2diag(sum2diag(v0 * K, v1), 1 / self.tau) m = mu - self.mean() v = -n * log(2 * pi) v -= slogdet(A)[1] v -= dot(m, solve(A, m)) self._cache["value"] = v / 2 return self._cache["value"]
def value(self): r"""Log of the marginal likelihood. Formally, .. math:: - \frac{n}{2}\log{2\pi} - \frac{1}{2} \log{\left| v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right|} - \frac{1}{2} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right)^{\intercal} \left( v_0 \mathrm K + v_1 \mathrm I + \tilde{\Sigma} \right)^{-1} \left(\tilde{\boldsymbol\mu} - \mathrm X\boldsymbol\beta\right) Returns ------- float :math:`\log{p(\tilde{\boldsymbol\mu})}` """ from numpy_sugar.linalg import ddot, sum2diag if self._cache["value"] is not None: return self._cache["value"] scale = exp(self.logscale) delta = 1 / (1 + exp(-self.logitdelta)) v0 = scale * (1 - delta) v1 = scale * delta mu = self.eta / self.tau n = len(mu) if self._QS is None: K = zeros((n, n)) else: Q0 = self._QS[0][0] S0 = self._QS[1] K = dot(ddot(Q0, S0), Q0.T) A = sum2diag(sum2diag(v0 * K, v1), 1 / self.tau) m = mu - self.mean() v = -n * log(2 * pi) v -= slogdet(A)[1] v -= dot(m, solve(A, m)) self._cache["value"] = v / 2 return self._cache["value"]
[ "r", "Log", "of", "the", "marginal", "likelihood", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/glmm/_normal.py#L175-L226
[ "def", "value", "(", "self", ")", ":", "from", "numpy_sugar", ".", "linalg", "import", "ddot", ",", "sum2diag", "if", "self", ".", "_cache", "[", "\"value\"", "]", "is", "not", "None", ":", "return", "self", ".", "_cache", "[", "\"value\"", "]", "scale", "=", "exp", "(", "self", ".", "logscale", ")", "delta", "=", "1", "/", "(", "1", "+", "exp", "(", "-", "self", ".", "logitdelta", ")", ")", "v0", "=", "scale", "*", "(", "1", "-", "delta", ")", "v1", "=", "scale", "*", "delta", "mu", "=", "self", ".", "eta", "/", "self", ".", "tau", "n", "=", "len", "(", "mu", ")", "if", "self", ".", "_QS", "is", "None", ":", "K", "=", "zeros", "(", "(", "n", ",", "n", ")", ")", "else", ":", "Q0", "=", "self", ".", "_QS", "[", "0", "]", "[", "0", "]", "S0", "=", "self", ".", "_QS", "[", "1", "]", "K", "=", "dot", "(", "ddot", "(", "Q0", ",", "S0", ")", ",", "Q0", ".", "T", ")", "A", "=", "sum2diag", "(", "sum2diag", "(", "v0", "*", "K", ",", "v1", ")", ",", "1", "/", "self", ".", "tau", ")", "m", "=", "mu", "-", "self", ".", "mean", "(", ")", "v", "=", "-", "n", "*", "log", "(", "2", "*", "pi", ")", "v", "-=", "slogdet", "(", "A", ")", "[", "1", "]", "v", "-=", "dot", "(", "m", ",", "solve", "(", "A", ",", "m", ")", ")", "self", ".", "_cache", "[", "\"value\"", "]", "=", "v", "/", "2", "return", "self", ".", "_cache", "[", "\"value\"", "]" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Posterior._initialize
r"""Initialize the mean and covariance of the posterior. Given that :math:`\tilde{\mathrm T}` is a matrix of zeros right before the first EP iteration, we have .. math:: \boldsymbol\mu = \mathrm K^{-1} \mathbf m ~\text{ and }~ \Sigma = \mathrm K as the initial posterior mean and covariance.
glimix_core/_ep/posterior.py
def _initialize(self): r"""Initialize the mean and covariance of the posterior. Given that :math:`\tilde{\mathrm T}` is a matrix of zeros right before the first EP iteration, we have .. math:: \boldsymbol\mu = \mathrm K^{-1} \mathbf m ~\text{ and }~ \Sigma = \mathrm K as the initial posterior mean and covariance. """ if self._mean is None or self._cov is None: return Q = self._cov["QS"][0][0] S = self._cov["QS"][1] if S.size > 0: self.tau[:] = 1 / npsum((Q * sqrt(S)) ** 2, axis=1) else: self.tau[:] = 0.0 self.eta[:] = self._mean self.eta[:] *= self.tau
def _initialize(self): r"""Initialize the mean and covariance of the posterior. Given that :math:`\tilde{\mathrm T}` is a matrix of zeros right before the first EP iteration, we have .. math:: \boldsymbol\mu = \mathrm K^{-1} \mathbf m ~\text{ and }~ \Sigma = \mathrm K as the initial posterior mean and covariance. """ if self._mean is None or self._cov is None: return Q = self._cov["QS"][0][0] S = self._cov["QS"][1] if S.size > 0: self.tau[:] = 1 / npsum((Q * sqrt(S)) ** 2, axis=1) else: self.tau[:] = 0.0 self.eta[:] = self._mean self.eta[:] *= self.tau
[ "r", "Initialize", "the", "mean", "and", "covariance", "of", "the", "posterior", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_ep/posterior.py#L63-L87
[ "def", "_initialize", "(", "self", ")", ":", "if", "self", ".", "_mean", "is", "None", "or", "self", ".", "_cov", "is", "None", ":", "return", "Q", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "0", "]", "[", "0", "]", "S", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "1", "]", "if", "S", ".", "size", ">", "0", ":", "self", ".", "tau", "[", ":", "]", "=", "1", "/", "npsum", "(", "(", "Q", "*", "sqrt", "(", "S", ")", ")", "**", "2", ",", "axis", "=", "1", ")", "else", ":", "self", ".", "tau", "[", ":", "]", "=", "0.0", "self", ".", "eta", "[", ":", "]", "=", "self", ".", "_mean", "self", ".", "eta", "[", ":", "]", "*=", "self", ".", "tau" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
Posterior.L
r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1}
glimix_core/_ep/posterior.py
def L(self): r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1} """ from scipy.linalg import cho_factor from numpy_sugar.linalg import ddot, sum2diag if self._L_cache is not None: return self._L_cache Q = self._cov["QS"][0][0] S = self._cov["QS"][1] B = dot(Q.T, ddot(self._site.tau, Q, left=True)) sum2diag(B, 1.0 / S, out=B) self._L_cache = cho_factor(B, lower=True)[0] return self._L_cache
def L(self): r"""Cholesky decomposition of :math:`\mathrm B`. .. math:: \mathrm B = \mathrm Q^{\intercal}\tilde{\mathrm{T}}\mathrm Q + \mathrm{S}^{-1} """ from scipy.linalg import cho_factor from numpy_sugar.linalg import ddot, sum2diag if self._L_cache is not None: return self._L_cache Q = self._cov["QS"][0][0] S = self._cov["QS"][1] B = dot(Q.T, ddot(self._site.tau, Q, left=True)) sum2diag(B, 1.0 / S, out=B) self._L_cache = cho_factor(B, lower=True)[0] return self._L_cache
[ "r", "Cholesky", "decomposition", "of", ":", "math", ":", "\\", "mathrm", "B", "." ]
limix/glimix-core
python
https://github.com/limix/glimix-core/blob/cddd0994591d100499cc41c1f480ddd575e7a980/glimix_core/_ep/posterior.py#L107-L126
[ "def", "L", "(", "self", ")", ":", "from", "scipy", ".", "linalg", "import", "cho_factor", "from", "numpy_sugar", ".", "linalg", "import", "ddot", ",", "sum2diag", "if", "self", ".", "_L_cache", "is", "not", "None", ":", "return", "self", ".", "_L_cache", "Q", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "0", "]", "[", "0", "]", "S", "=", "self", ".", "_cov", "[", "\"QS\"", "]", "[", "1", "]", "B", "=", "dot", "(", "Q", ".", "T", ",", "ddot", "(", "self", ".", "_site", ".", "tau", ",", "Q", ",", "left", "=", "True", ")", ")", "sum2diag", "(", "B", ",", "1.0", "/", "S", ",", "out", "=", "B", ")", "self", ".", "_L_cache", "=", "cho_factor", "(", "B", ",", "lower", "=", "True", ")", "[", "0", "]", "return", "self", ".", "_L_cache" ]
cddd0994591d100499cc41c1f480ddd575e7a980
valid
build_engine_session
Build an engine and a session. :param str connection: An RFC-1738 database connection string :param bool echo: Turn on echoing SQL :param Optional[bool] autoflush: Defaults to True if not specified in kwargs or configuration. :param Optional[bool] autocommit: Defaults to False if not specified in kwargs or configuration. :param Optional[bool] expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases.
src/bio2bel/manager/connection_manager.py
def build_engine_session(connection, echo=False, autoflush=None, autocommit=None, expire_on_commit=None, scopefunc=None): """Build an engine and a session. :param str connection: An RFC-1738 database connection string :param bool echo: Turn on echoing SQL :param Optional[bool] autoflush: Defaults to True if not specified in kwargs or configuration. :param Optional[bool] autocommit: Defaults to False if not specified in kwargs or configuration. :param Optional[bool] expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. """ if connection is None: raise ValueError('can not build engine when connection is None') engine = create_engine(connection, echo=echo) autoflush = autoflush if autoflush is not None else False autocommit = autocommit if autocommit is not None else False expire_on_commit = expire_on_commit if expire_on_commit is not None else True log.debug('auto flush: %s, auto commit: %s, expire on commmit: %s', autoflush, autocommit, expire_on_commit) #: A SQLAlchemy session maker session_maker = sessionmaker( bind=engine, autoflush=autoflush, autocommit=autocommit, expire_on_commit=expire_on_commit, ) #: A SQLAlchemy session object session = scoped_session( session_maker, scopefunc=scopefunc ) return engine, session
def build_engine_session(connection, echo=False, autoflush=None, autocommit=None, expire_on_commit=None, scopefunc=None): """Build an engine and a session. :param str connection: An RFC-1738 database connection string :param bool echo: Turn on echoing SQL :param Optional[bool] autoflush: Defaults to True if not specified in kwargs or configuration. :param Optional[bool] autocommit: Defaults to False if not specified in kwargs or configuration. :param Optional[bool] expire_on_commit: Defaults to False if not specified in kwargs or configuration. :param scopefunc: Scoped function to pass to :func:`sqlalchemy.orm.scoped_session` :rtype: tuple[Engine,Session] From the Flask-SQLAlchemy documentation: An extra key ``'scopefunc'`` can be set on the ``options`` dict to specify a custom scope function. If it's not provided, Flask's app context stack identity is used. This will ensure that sessions are created and removed with the request/response cycle, and should be fine in most cases. """ if connection is None: raise ValueError('can not build engine when connection is None') engine = create_engine(connection, echo=echo) autoflush = autoflush if autoflush is not None else False autocommit = autocommit if autocommit is not None else False expire_on_commit = expire_on_commit if expire_on_commit is not None else True log.debug('auto flush: %s, auto commit: %s, expire on commmit: %s', autoflush, autocommit, expire_on_commit) #: A SQLAlchemy session maker session_maker = sessionmaker( bind=engine, autoflush=autoflush, autocommit=autocommit, expire_on_commit=expire_on_commit, ) #: A SQLAlchemy session object session = scoped_session( session_maker, scopefunc=scopefunc ) return engine, session
[ "Build", "an", "engine", "and", "a", "session", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/connection_manager.py#L105-L150
[ "def", "build_engine_session", "(", "connection", ",", "echo", "=", "False", ",", "autoflush", "=", "None", ",", "autocommit", "=", "None", ",", "expire_on_commit", "=", "None", ",", "scopefunc", "=", "None", ")", ":", "if", "connection", "is", "None", ":", "raise", "ValueError", "(", "'can not build engine when connection is None'", ")", "engine", "=", "create_engine", "(", "connection", ",", "echo", "=", "echo", ")", "autoflush", "=", "autoflush", "if", "autoflush", "is", "not", "None", "else", "False", "autocommit", "=", "autocommit", "if", "autocommit", "is", "not", "None", "else", "False", "expire_on_commit", "=", "expire_on_commit", "if", "expire_on_commit", "is", "not", "None", "else", "True", "log", ".", "debug", "(", "'auto flush: %s, auto commit: %s, expire on commmit: %s'", ",", "autoflush", ",", "autocommit", ",", "expire_on_commit", ")", "#: A SQLAlchemy session maker", "session_maker", "=", "sessionmaker", "(", "bind", "=", "engine", ",", "autoflush", "=", "autoflush", ",", "autocommit", "=", "autocommit", ",", "expire_on_commit", "=", "expire_on_commit", ",", ")", "#: A SQLAlchemy session object", "session", "=", "scoped_session", "(", "session_maker", ",", "scopefunc", "=", "scopefunc", ")", "return", "engine", ",", "session" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
ConnectionManager._get_connection
Get a default connection string. Wraps :func:`bio2bel.utils.get_connection` and passing this class's :data:`module_name` to it.
src/bio2bel/manager/connection_manager.py
def _get_connection(cls, connection: Optional[str] = None) -> str: """Get a default connection string. Wraps :func:`bio2bel.utils.get_connection` and passing this class's :data:`module_name` to it. """ return get_connection(cls.module_name, connection=connection)
def _get_connection(cls, connection: Optional[str] = None) -> str: """Get a default connection string. Wraps :func:`bio2bel.utils.get_connection` and passing this class's :data:`module_name` to it. """ return get_connection(cls.module_name, connection=connection)
[ "Get", "a", "default", "connection", "string", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/connection_manager.py#L82-L87
[ "def", "_get_connection", "(", "cls", ",", "connection", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "str", ":", "return", "get_connection", "(", "cls", ".", "module_name", ",", "connection", "=", "connection", ")" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
setup_smtp_factory
expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance
application/briefkasten/notifications.py
def setup_smtp_factory(**settings): """ expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance""" return CustomSMTP( host=settings.get('mail.host', 'localhost'), port=int(settings.get('mail.port', 25)), user=settings.get('mail.user'), password=settings.get('mail.password'), timeout=float(settings.get('mail.timeout', 60)), )
def setup_smtp_factory(**settings): """ expects a dictionary with 'mail.' keys to create an appropriate smtplib.SMTP instance""" return CustomSMTP( host=settings.get('mail.host', 'localhost'), port=int(settings.get('mail.port', 25)), user=settings.get('mail.user'), password=settings.get('mail.password'), timeout=float(settings.get('mail.timeout', 60)), )
[ "expects", "a", "dictionary", "with", "mail", ".", "keys", "to", "create", "an", "appropriate", "smtplib", ".", "SMTP", "instance" ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/application/briefkasten/notifications.py#L26-L34
[ "def", "setup_smtp_factory", "(", "*", "*", "settings", ")", ":", "return", "CustomSMTP", "(", "host", "=", "settings", ".", "get", "(", "'mail.host'", ",", "'localhost'", ")", ",", "port", "=", "int", "(", "settings", ".", "get", "(", "'mail.port'", ",", "25", ")", ")", ",", "user", "=", "settings", ".", "get", "(", "'mail.user'", ")", ",", "password", "=", "settings", ".", "get", "(", "'mail.password'", ")", ",", "timeout", "=", "float", "(", "settings", ".", "get", "(", "'mail.timeout'", ",", "60", ")", ")", ",", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
sendMultiPart
a helper method that composes and sends an email with attachments requires a pre-configured smtplib.SMTP instance
application/briefkasten/notifications.py
def sendMultiPart(smtp, gpg_context, sender, recipients, subject, text, attachments): """ a helper method that composes and sends an email with attachments requires a pre-configured smtplib.SMTP instance""" sent = 0 for to in recipients: if not to.startswith('<'): uid = '<%s>' % to else: uid = to if not checkRecipient(gpg_context, uid): continue msg = MIMEMultipart() msg['From'] = sender msg['To'] = to msg['Subject'] = subject msg["Date"] = formatdate(localtime=True) msg.preamble = u'This is an email in encrypted multipart format.' attach = MIMEText(str(gpg_context.encrypt(text.encode('utf-8'), uid, always_trust=True))) attach.set_charset('UTF-8') msg.attach(attach) for attachment in attachments: with open(attachment, 'rb') as fp: attach = MIMEBase('application', 'octet-stream') attach.set_payload(str(gpg_context.encrypt_file(fp, uid, always_trust=True))) attach.add_header('Content-Disposition', 'attachment', filename=basename('%s.pgp' % attachment)) msg.attach(attach) # TODO: need to catch exception? # yes :-) we need to adjust the status accordingly (>500 so it will be destroyed) smtp.begin() smtp.sendmail(sender, to, msg.as_string()) smtp.quit() sent += 1 return sent
def sendMultiPart(smtp, gpg_context, sender, recipients, subject, text, attachments): """ a helper method that composes and sends an email with attachments requires a pre-configured smtplib.SMTP instance""" sent = 0 for to in recipients: if not to.startswith('<'): uid = '<%s>' % to else: uid = to if not checkRecipient(gpg_context, uid): continue msg = MIMEMultipart() msg['From'] = sender msg['To'] = to msg['Subject'] = subject msg["Date"] = formatdate(localtime=True) msg.preamble = u'This is an email in encrypted multipart format.' attach = MIMEText(str(gpg_context.encrypt(text.encode('utf-8'), uid, always_trust=True))) attach.set_charset('UTF-8') msg.attach(attach) for attachment in attachments: with open(attachment, 'rb') as fp: attach = MIMEBase('application', 'octet-stream') attach.set_payload(str(gpg_context.encrypt_file(fp, uid, always_trust=True))) attach.add_header('Content-Disposition', 'attachment', filename=basename('%s.pgp' % attachment)) msg.attach(attach) # TODO: need to catch exception? # yes :-) we need to adjust the status accordingly (>500 so it will be destroyed) smtp.begin() smtp.sendmail(sender, to, msg.as_string()) smtp.quit() sent += 1 return sent
[ "a", "helper", "method", "that", "composes", "and", "sends", "an", "email", "with", "attachments", "requires", "a", "pre", "-", "configured", "smtplib", ".", "SMTP", "instance" ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/application/briefkasten/notifications.py#L44-L83
[ "def", "sendMultiPart", "(", "smtp", ",", "gpg_context", ",", "sender", ",", "recipients", ",", "subject", ",", "text", ",", "attachments", ")", ":", "sent", "=", "0", "for", "to", "in", "recipients", ":", "if", "not", "to", ".", "startswith", "(", "'<'", ")", ":", "uid", "=", "'<%s>'", "%", "to", "else", ":", "uid", "=", "to", "if", "not", "checkRecipient", "(", "gpg_context", ",", "uid", ")", ":", "continue", "msg", "=", "MIMEMultipart", "(", ")", "msg", "[", "'From'", "]", "=", "sender", "msg", "[", "'To'", "]", "=", "to", "msg", "[", "'Subject'", "]", "=", "subject", "msg", "[", "\"Date\"", "]", "=", "formatdate", "(", "localtime", "=", "True", ")", "msg", ".", "preamble", "=", "u'This is an email in encrypted multipart format.'", "attach", "=", "MIMEText", "(", "str", "(", "gpg_context", ".", "encrypt", "(", "text", ".", "encode", "(", "'utf-8'", ")", ",", "uid", ",", "always_trust", "=", "True", ")", ")", ")", "attach", ".", "set_charset", "(", "'UTF-8'", ")", "msg", ".", "attach", "(", "attach", ")", "for", "attachment", "in", "attachments", ":", "with", "open", "(", "attachment", ",", "'rb'", ")", "as", "fp", ":", "attach", "=", "MIMEBase", "(", "'application'", ",", "'octet-stream'", ")", "attach", ".", "set_payload", "(", "str", "(", "gpg_context", ".", "encrypt_file", "(", "fp", ",", "uid", ",", "always_trust", "=", "True", ")", ")", ")", "attach", ".", "add_header", "(", "'Content-Disposition'", ",", "'attachment'", ",", "filename", "=", "basename", "(", "'%s.pgp'", "%", "attachment", ")", ")", "msg", ".", "attach", "(", "attach", ")", "# TODO: need to catch exception?", "# yes :-) we need to adjust the status accordingly (>500 so it will be destroyed)", "smtp", ".", "begin", "(", ")", "smtp", ".", "sendmail", "(", "sender", ",", "to", ",", "msg", ".", "as_string", "(", ")", ")", "smtp", ".", "quit", "(", ")", "sent", "+=", "1", "return", "sent" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
CustomSMTP.begin
connects and optionally authenticates a connection.
application/briefkasten/notifications.py
def begin(self): """ connects and optionally authenticates a connection.""" self.connect(self.host, self.port) if self.user: self.starttls() self.login(self.user, self.password)
def begin(self): """ connects and optionally authenticates a connection.""" self.connect(self.host, self.port) if self.user: self.starttls() self.login(self.user, self.password)
[ "connects", "and", "optionally", "authenticates", "a", "connection", "." ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/application/briefkasten/notifications.py#L18-L23
[ "def", "begin", "(", "self", ")", ":", "self", ".", "connect", "(", "self", ".", "host", ",", "self", ".", "port", ")", "if", "self", ".", "user", ":", "self", ".", "starttls", "(", ")", "self", ".", "login", "(", "self", ".", "user", ",", "self", ".", "password", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
make_downloader
Make a function that downloads the data for you, or uses a cached version at the given path. :param url: The URL of some data :param path: The path of the cached data, or where data is cached if it does not already exist :return: A function that downloads the data and returns the path of the data
src/bio2bel/downloading.py
def make_downloader(url: str, path: str) -> Callable[[bool], str]: # noqa: D202 """Make a function that downloads the data for you, or uses a cached version at the given path. :param url: The URL of some data :param path: The path of the cached data, or where data is cached if it does not already exist :return: A function that downloads the data and returns the path of the data """ def download_data(force_download: bool = False) -> str: """Download the data. :param force_download: If true, overwrites a previously cached file """ if os.path.exists(path) and not force_download: log.info('using cached data at %s', path) else: log.info('downloading %s to %s', url, path) urlretrieve(url, path) return path return download_data
def make_downloader(url: str, path: str) -> Callable[[bool], str]: # noqa: D202 """Make a function that downloads the data for you, or uses a cached version at the given path. :param url: The URL of some data :param path: The path of the cached data, or where data is cached if it does not already exist :return: A function that downloads the data and returns the path of the data """ def download_data(force_download: bool = False) -> str: """Download the data. :param force_download: If true, overwrites a previously cached file """ if os.path.exists(path) and not force_download: log.info('using cached data at %s', path) else: log.info('downloading %s to %s', url, path) urlretrieve(url, path) return path return download_data
[ "Make", "a", "function", "that", "downloads", "the", "data", "for", "you", "or", "uses", "a", "cached", "version", "at", "the", "given", "path", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/downloading.py#L22-L43
[ "def", "make_downloader", "(", "url", ":", "str", ",", "path", ":", "str", ")", "->", "Callable", "[", "[", "bool", "]", ",", "str", "]", ":", "# noqa: D202", "def", "download_data", "(", "force_download", ":", "bool", "=", "False", ")", "->", "str", ":", "\"\"\"Download the data.\n\n :param force_download: If true, overwrites a previously cached file\n \"\"\"", "if", "os", ".", "path", ".", "exists", "(", "path", ")", "and", "not", "force_download", ":", "log", ".", "info", "(", "'using cached data at %s'", ",", "path", ")", "else", ":", "log", ".", "info", "(", "'downloading %s to %s'", ",", "url", ",", "path", ")", "urlretrieve", "(", "url", ",", "path", ")", "return", "path", "return", "download_data" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
make_df_getter
Build a function that handles downloading tabular data and parsing it into a pandas DataFrame. :param data_url: The URL of the data :param data_path: The path where the data should get stored :param kwargs: Any other arguments to pass to :func:`pandas.read_csv`
src/bio2bel/downloading.py
def make_df_getter(data_url: str, data_path: str, **kwargs) -> Callable[[Optional[str], bool, bool], pd.DataFrame]: """Build a function that handles downloading tabular data and parsing it into a pandas DataFrame. :param data_url: The URL of the data :param data_path: The path where the data should get stored :param kwargs: Any other arguments to pass to :func:`pandas.read_csv` """ download_function = make_downloader(data_url, data_path) def get_df(url: Optional[str] = None, cache: bool = True, force_download: bool = False) -> pd.DataFrame: """Get the data as a pandas DataFrame. :param url: The URL (or file path) to download. :param cache: If true, the data is downloaded to the file system, else it is loaded from the internet :param force_download: If true, overwrites a previously cached file """ if url is None and cache: url = download_function(force_download=force_download) return pd.read_csv( url or data_url, **kwargs ) return get_df
def make_df_getter(data_url: str, data_path: str, **kwargs) -> Callable[[Optional[str], bool, bool], pd.DataFrame]: """Build a function that handles downloading tabular data and parsing it into a pandas DataFrame. :param data_url: The URL of the data :param data_path: The path where the data should get stored :param kwargs: Any other arguments to pass to :func:`pandas.read_csv` """ download_function = make_downloader(data_url, data_path) def get_df(url: Optional[str] = None, cache: bool = True, force_download: bool = False) -> pd.DataFrame: """Get the data as a pandas DataFrame. :param url: The URL (or file path) to download. :param cache: If true, the data is downloaded to the file system, else it is loaded from the internet :param force_download: If true, overwrites a previously cached file """ if url is None and cache: url = download_function(force_download=force_download) return pd.read_csv( url or data_url, **kwargs ) return get_df
[ "Build", "a", "function", "that", "handles", "downloading", "tabular", "data", "and", "parsing", "it", "into", "a", "pandas", "DataFrame", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/downloading.py#L46-L70
[ "def", "make_df_getter", "(", "data_url", ":", "str", ",", "data_path", ":", "str", ",", "*", "*", "kwargs", ")", "->", "Callable", "[", "[", "Optional", "[", "str", "]", ",", "bool", ",", "bool", "]", ",", "pd", ".", "DataFrame", "]", ":", "download_function", "=", "make_downloader", "(", "data_url", ",", "data_path", ")", "def", "get_df", "(", "url", ":", "Optional", "[", "str", "]", "=", "None", ",", "cache", ":", "bool", "=", "True", ",", "force_download", ":", "bool", "=", "False", ")", "->", "pd", ".", "DataFrame", ":", "\"\"\"Get the data as a pandas DataFrame.\n\n :param url: The URL (or file path) to download.\n :param cache: If true, the data is downloaded to the file system, else it is loaded from the internet\n :param force_download: If true, overwrites a previously cached file\n \"\"\"", "if", "url", "is", "None", "and", "cache", ":", "url", "=", "download_function", "(", "force_download", "=", "force_download", ")", "return", "pd", ".", "read_csv", "(", "url", "or", "data_url", ",", "*", "*", "kwargs", ")", "return", "get_df" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
TypedUrnGenerator.generate
Generate a :term:`URI` based on parameters passed. :param id: The id of the concept or collection. :param type: What we're generating a :term:`URI` for: `concept` or `collection`. :rtype: string
skosprovider/uri.py
def generate(self, **kwargs): ''' Generate a :term:`URI` based on parameters passed. :param id: The id of the concept or collection. :param type: What we're generating a :term:`URI` for: `concept` or `collection`. :rtype: string ''' if kwargs['type'] not in ['concept', 'collection']: raise ValueError('Type %s is invalid' % kwargs['type']) return ( self.pattern % (self.vocabulary_id, kwargs['type'], kwargs['id']) ).lower()
def generate(self, **kwargs): ''' Generate a :term:`URI` based on parameters passed. :param id: The id of the concept or collection. :param type: What we're generating a :term:`URI` for: `concept` or `collection`. :rtype: string ''' if kwargs['type'] not in ['concept', 'collection']: raise ValueError('Type %s is invalid' % kwargs['type']) return ( self.pattern % (self.vocabulary_id, kwargs['type'], kwargs['id']) ).lower()
[ "Generate", "a", ":", "term", ":", "URI", "based", "on", "parameters", "passed", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/uri.py#L115-L128
[ "def", "generate", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", "[", "'type'", "]", "not", "in", "[", "'concept'", ",", "'collection'", "]", ":", "raise", "ValueError", "(", "'Type %s is invalid'", "%", "kwargs", "[", "'type'", "]", ")", "return", "(", "self", ".", "pattern", "%", "(", "self", ".", "vocabulary_id", ",", "kwargs", "[", "'type'", "]", ",", "kwargs", "[", "'id'", "]", ")", ")", ".", "lower", "(", ")" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
has_address
Determine whether the packet has an "address" encoded into it. There exists an undocumented bug/edge case in the spec - some packets with 0x82 as _start_, still encode the address into the packet, and thus throws off decoding. This edge case is handled explicitly.
nessclient/packet.py
def has_address(start: int, data_length: int) -> bool: """ Determine whether the packet has an "address" encoded into it. There exists an undocumented bug/edge case in the spec - some packets with 0x82 as _start_, still encode the address into the packet, and thus throws off decoding. This edge case is handled explicitly. """ return bool(0x01 & start) or (start == 0x82 and data_length == 16)
def has_address(start: int, data_length: int) -> bool: """ Determine whether the packet has an "address" encoded into it. There exists an undocumented bug/edge case in the spec - some packets with 0x82 as _start_, still encode the address into the packet, and thus throws off decoding. This edge case is handled explicitly. """ return bool(0x01 & start) or (start == 0x82 and data_length == 16)
[ "Determine", "whether", "the", "packet", "has", "an", "address", "encoded", "into", "it", ".", "There", "exists", "an", "undocumented", "bug", "/", "edge", "case", "in", "the", "spec", "-", "some", "packets", "with", "0x82", "as", "_start_", "still", "encode", "the", "address", "into", "the", "packet", "and", "thus", "throws", "off", "decoding", ".", "This", "edge", "case", "is", "handled", "explicitly", "." ]
nickw444/nessclient
python
https://github.com/nickw444/nessclient/blob/9a2e3d450448312f56e708b8c7adeaef878cc28a/nessclient/packet.py#L164-L171
[ "def", "has_address", "(", "start", ":", "int", ",", "data_length", ":", "int", ")", "->", "bool", ":", "return", "bool", "(", "0x01", "&", "start", ")", "or", "(", "start", "==", "0x82", "and", "data_length", "==", "16", ")" ]
9a2e3d450448312f56e708b8c7adeaef878cc28a
valid
decode_timestamp
Decode timestamp using bespoke decoder. Cannot use simple strptime since the ness panel contains a bug that P199E zone and state updates emitted on the hour cause a minute value of `60` to be sent, causing strptime to fail. This decoder handles this edge case.
nessclient/packet.py
def decode_timestamp(data: str) -> datetime.datetime: """ Decode timestamp using bespoke decoder. Cannot use simple strptime since the ness panel contains a bug that P199E zone and state updates emitted on the hour cause a minute value of `60` to be sent, causing strptime to fail. This decoder handles this edge case. """ year = 2000 + int(data[0:2]) month = int(data[2:4]) day = int(data[4:6]) hour = int(data[6:8]) minute = int(data[8:10]) second = int(data[10:12]) if minute == 60: minute = 0 hour += 1 return datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
def decode_timestamp(data: str) -> datetime.datetime: """ Decode timestamp using bespoke decoder. Cannot use simple strptime since the ness panel contains a bug that P199E zone and state updates emitted on the hour cause a minute value of `60` to be sent, causing strptime to fail. This decoder handles this edge case. """ year = 2000 + int(data[0:2]) month = int(data[2:4]) day = int(data[4:6]) hour = int(data[6:8]) minute = int(data[8:10]) second = int(data[10:12]) if minute == 60: minute = 0 hour += 1 return datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
[ "Decode", "timestamp", "using", "bespoke", "decoder", ".", "Cannot", "use", "simple", "strptime", "since", "the", "ness", "panel", "contains", "a", "bug", "that", "P199E", "zone", "and", "state", "updates", "emitted", "on", "the", "hour", "cause", "a", "minute", "value", "of", "60", "to", "be", "sent", "causing", "strptime", "to", "fail", ".", "This", "decoder", "handles", "this", "edge", "case", "." ]
nickw444/nessclient
python
https://github.com/nickw444/nessclient/blob/9a2e3d450448312f56e708b8c7adeaef878cc28a/nessclient/packet.py#L186-L205
[ "def", "decode_timestamp", "(", "data", ":", "str", ")", "->", "datetime", ".", "datetime", ":", "year", "=", "2000", "+", "int", "(", "data", "[", "0", ":", "2", "]", ")", "month", "=", "int", "(", "data", "[", "2", ":", "4", "]", ")", "day", "=", "int", "(", "data", "[", "4", ":", "6", "]", ")", "hour", "=", "int", "(", "data", "[", "6", ":", "8", "]", ")", "minute", "=", "int", "(", "data", "[", "8", ":", "10", "]", ")", "second", "=", "int", "(", "data", "[", "10", ":", "12", "]", ")", "if", "minute", "==", "60", ":", "minute", "=", "0", "hour", "+=", "1", "return", "datetime", ".", "datetime", "(", "year", "=", "year", ",", "month", "=", "month", ",", "day", "=", "day", ",", "hour", "=", "hour", ",", "minute", "=", "minute", ",", "second", "=", "second", ")" ]
9a2e3d450448312f56e708b8c7adeaef878cc28a
valid
create_application
Create a Flask application.
src/bio2bel/web/application.py
def create_application(connection: Optional[str] = None) -> Flask: """Create a Flask application.""" app = Flask(__name__) flask_bootstrap.Bootstrap(app) Admin(app) connection = connection or DEFAULT_CACHE_CONNECTION engine, session = build_engine_session(connection) for name, add_admin in add_admins.items(): url = '/{}'.format(name) add_admin(app, session, url=url, endpoint=name, name=name) log.debug('added %s - %s to %s', name, add_admin, url) app.register_blueprint(ui) return app
def create_application(connection: Optional[str] = None) -> Flask: """Create a Flask application.""" app = Flask(__name__) flask_bootstrap.Bootstrap(app) Admin(app) connection = connection or DEFAULT_CACHE_CONNECTION engine, session = build_engine_session(connection) for name, add_admin in add_admins.items(): url = '/{}'.format(name) add_admin(app, session, url=url, endpoint=name, name=name) log.debug('added %s - %s to %s', name, add_admin, url) app.register_blueprint(ui) return app
[ "Create", "a", "Flask", "application", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/web/application.py#L55-L72
[ "def", "create_application", "(", "connection", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Flask", ":", "app", "=", "Flask", "(", "__name__", ")", "flask_bootstrap", ".", "Bootstrap", "(", "app", ")", "Admin", "(", "app", ")", "connection", "=", "connection", "or", "DEFAULT_CACHE_CONNECTION", "engine", ",", "session", "=", "build_engine_session", "(", "connection", ")", "for", "name", ",", "add_admin", "in", "add_admins", ".", "items", "(", ")", ":", "url", "=", "'/{}'", ".", "format", "(", "name", ")", "add_admin", "(", "app", ",", "session", ",", "url", "=", "url", ",", "endpoint", "=", "name", ",", "name", "=", "name", ")", "log", ".", "debug", "(", "'added %s - %s to %s'", ",", "name", ",", "add_admin", ",", "url", ")", "app", ".", "register_blueprint", "(", "ui", ")", "return", "app" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
Registry.register_provider
Register a :class:`skosprovider.providers.VocabularyProvider`. :param skosprovider.providers.VocabularyProvider provider: The provider to register. :raises RegistryException: A provider with this id or uri has already been registered.
skosprovider/registry.py
def register_provider(self, provider): ''' Register a :class:`skosprovider.providers.VocabularyProvider`. :param skosprovider.providers.VocabularyProvider provider: The provider to register. :raises RegistryException: A provider with this id or uri has already been registered. ''' if provider.get_vocabulary_id() in self.providers: raise RegistryException( 'A provider with this id has already been registered.' ) self.providers[provider.get_vocabulary_id()] = provider if provider.concept_scheme.uri in self.concept_scheme_uri_map: raise RegistryException( 'A provider with URI %s has already been registered.' % provider.concept_scheme.uri ) self.concept_scheme_uri_map[provider.concept_scheme.uri] = provider.get_vocabulary_id()
def register_provider(self, provider): ''' Register a :class:`skosprovider.providers.VocabularyProvider`. :param skosprovider.providers.VocabularyProvider provider: The provider to register. :raises RegistryException: A provider with this id or uri has already been registered. ''' if provider.get_vocabulary_id() in self.providers: raise RegistryException( 'A provider with this id has already been registered.' ) self.providers[provider.get_vocabulary_id()] = provider if provider.concept_scheme.uri in self.concept_scheme_uri_map: raise RegistryException( 'A provider with URI %s has already been registered.' % provider.concept_scheme.uri ) self.concept_scheme_uri_map[provider.concept_scheme.uri] = provider.get_vocabulary_id()
[ "Register", "a", ":", "class", ":", "skosprovider", ".", "providers", ".", "VocabularyProvider", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L37-L55
[ "def", "register_provider", "(", "self", ",", "provider", ")", ":", "if", "provider", ".", "get_vocabulary_id", "(", ")", "in", "self", ".", "providers", ":", "raise", "RegistryException", "(", "'A provider with this id has already been registered.'", ")", "self", ".", "providers", "[", "provider", ".", "get_vocabulary_id", "(", ")", "]", "=", "provider", "if", "provider", ".", "concept_scheme", ".", "uri", "in", "self", ".", "concept_scheme_uri_map", ":", "raise", "RegistryException", "(", "'A provider with URI %s has already been registered.'", "%", "provider", ".", "concept_scheme", ".", "uri", ")", "self", ".", "concept_scheme_uri_map", "[", "provider", ".", "concept_scheme", ".", "uri", "]", "=", "provider", ".", "get_vocabulary_id", "(", ")" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.remove_provider
Remove the provider with the given id or :term:`URI`. :param str id: The identifier for the provider. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id is unknown.
skosprovider/registry.py
def remove_provider(self, id): ''' Remove the provider with the given id or :term:`URI`. :param str id: The identifier for the provider. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id is unknown. ''' if id in self.providers: p = self.providers.get(id, False) del self.providers[id] del self.concept_scheme_uri_map[p.concept_scheme.uri] return p elif id in self.concept_scheme_uri_map: id = self.concept_scheme_uri_map[id] return self.remove_provider(id) else: return False
def remove_provider(self, id): ''' Remove the provider with the given id or :term:`URI`. :param str id: The identifier for the provider. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id is unknown. ''' if id in self.providers: p = self.providers.get(id, False) del self.providers[id] del self.concept_scheme_uri_map[p.concept_scheme.uri] return p elif id in self.concept_scheme_uri_map: id = self.concept_scheme_uri_map[id] return self.remove_provider(id) else: return False
[ "Remove", "the", "provider", "with", "the", "given", "id", "or", ":", "term", ":", "URI", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L57-L74
[ "def", "remove_provider", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "providers", ":", "p", "=", "self", ".", "providers", ".", "get", "(", "id", ",", "False", ")", "del", "self", ".", "providers", "[", "id", "]", "del", "self", ".", "concept_scheme_uri_map", "[", "p", ".", "concept_scheme", ".", "uri", "]", "return", "p", "elif", "id", "in", "self", ".", "concept_scheme_uri_map", ":", "id", "=", "self", ".", "concept_scheme_uri_map", "[", "id", "]", "return", "self", ".", "remove_provider", "(", "id", ")", "else", ":", "return", "False" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.get_provider
Get a provider by id or :term:`uri`. :param str id: The identifier for the provider. This can either be the id with which it was registered or the :term:`uri` of the conceptscheme that the provider services. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id or uri is unknown.
skosprovider/registry.py
def get_provider(self, id): ''' Get a provider by id or :term:`uri`. :param str id: The identifier for the provider. This can either be the id with which it was registered or the :term:`uri` of the conceptscheme that the provider services. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id or uri is unknown. ''' if id in self.providers: return self.providers.get(id, False) elif is_uri(id) and id in self.concept_scheme_uri_map: return self.providers.get(self.concept_scheme_uri_map[id], False) return False
def get_provider(self, id): ''' Get a provider by id or :term:`uri`. :param str id: The identifier for the provider. This can either be the id with which it was registered or the :term:`uri` of the conceptscheme that the provider services. :returns: A :class:`skosprovider.providers.VocabularyProvider` or `False` if the id or uri is unknown. ''' if id in self.providers: return self.providers.get(id, False) elif is_uri(id) and id in self.concept_scheme_uri_map: return self.providers.get(self.concept_scheme_uri_map[id], False) return False
[ "Get", "a", "provider", "by", "id", "or", ":", "term", ":", "uri", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L76-L90
[ "def", "get_provider", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "providers", ":", "return", "self", ".", "providers", ".", "get", "(", "id", ",", "False", ")", "elif", "is_uri", "(", "id", ")", "and", "id", "in", "self", ".", "concept_scheme_uri_map", ":", "return", "self", ".", "providers", ".", "get", "(", "self", ".", "concept_scheme_uri_map", "[", "id", "]", ",", "False", ")", "return", "False" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.get_providers
Get all providers registered. If keyword `ids` is present, get only the providers with these ids. If keys `subject` is present, get only the providers that have this subject. .. code-block:: python # Get all providers with subject 'biology' registry.get_providers(subject='biology') # Get all providers with id 1 or 2 registry.get_providers(ids=[1,2]) # Get all providers with id 1 or 2 and subject 'biology' registry.get_providers(ids=[1,2], subject='biology'] :param list ids: Only return providers with one of the Ids or :term:`URIs <uri>`. :param str subject: Only return providers with this subject. :returns: A list of :class:`providers <skosprovider.providers.VocabularyProvider>`
skosprovider/registry.py
def get_providers(self, **kwargs): '''Get all providers registered. If keyword `ids` is present, get only the providers with these ids. If keys `subject` is present, get only the providers that have this subject. .. code-block:: python # Get all providers with subject 'biology' registry.get_providers(subject='biology') # Get all providers with id 1 or 2 registry.get_providers(ids=[1,2]) # Get all providers with id 1 or 2 and subject 'biology' registry.get_providers(ids=[1,2], subject='biology'] :param list ids: Only return providers with one of the Ids or :term:`URIs <uri>`. :param str subject: Only return providers with this subject. :returns: A list of :class:`providers <skosprovider.providers.VocabularyProvider>` ''' if 'ids' in kwargs: ids = [self.concept_scheme_uri_map.get(id, id) for id in kwargs['ids']] providers = [ self.providers[k] for k in self.providers.keys() if k in ids ] else: providers = list(self.providers.values()) if 'subject' in kwargs: providers = [p for p in providers if kwargs['subject'] in p.metadata['subject']] return providers
def get_providers(self, **kwargs): '''Get all providers registered. If keyword `ids` is present, get only the providers with these ids. If keys `subject` is present, get only the providers that have this subject. .. code-block:: python # Get all providers with subject 'biology' registry.get_providers(subject='biology') # Get all providers with id 1 or 2 registry.get_providers(ids=[1,2]) # Get all providers with id 1 or 2 and subject 'biology' registry.get_providers(ids=[1,2], subject='biology'] :param list ids: Only return providers with one of the Ids or :term:`URIs <uri>`. :param str subject: Only return providers with this subject. :returns: A list of :class:`providers <skosprovider.providers.VocabularyProvider>` ''' if 'ids' in kwargs: ids = [self.concept_scheme_uri_map.get(id, id) for id in kwargs['ids']] providers = [ self.providers[k] for k in self.providers.keys() if k in ids ] else: providers = list(self.providers.values()) if 'subject' in kwargs: providers = [p for p in providers if kwargs['subject'] in p.metadata['subject']] return providers
[ "Get", "all", "providers", "registered", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L92-L123
[ "def", "get_providers", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "'ids'", "in", "kwargs", ":", "ids", "=", "[", "self", ".", "concept_scheme_uri_map", ".", "get", "(", "id", ",", "id", ")", "for", "id", "in", "kwargs", "[", "'ids'", "]", "]", "providers", "=", "[", "self", ".", "providers", "[", "k", "]", "for", "k", "in", "self", ".", "providers", ".", "keys", "(", ")", "if", "k", "in", "ids", "]", "else", ":", "providers", "=", "list", "(", "self", ".", "providers", ".", "values", "(", ")", ")", "if", "'subject'", "in", "kwargs", ":", "providers", "=", "[", "p", "for", "p", "in", "providers", "if", "kwargs", "[", "'subject'", "]", "in", "p", ".", "metadata", "[", "'subject'", "]", "]", "return", "providers" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.find
Launch a query across all or a selection of providers. .. code-block:: python # Find anything that has a label of church in any provider. registry.find({'label': 'church'}) # Find anything that has a label of church with the BUILDINGS provider. # Attention, this syntax was deprecated in version 0.3.0 registry.find({'label': 'church'}, providers=['BUILDINGS']) # Find anything that has a label of church with the BUILDINGS provider. registry.find({'label': 'church'}, providers={'ids': ['BUILDINGS']}) # Find anything that has a label of church with a provider # marked with the subject 'architecture'. registry.find({'label': 'church'}, providers={'subject': 'architecture'}) # Find anything that has a label of church in any provider. # If possible, display the results with a Dutch label. registry.find({'label': 'church'}, language='nl') :param dict query: The query parameters that will be passed on to each :meth:`~skosprovider.providers.VocabularyProvider.find` method of the selected. :class:`providers <skosprovider.providers.VocabularyProvider>`. :param dict providers: Optional. If present, it should be a dictionary. This dictionary can contain any of the keyword arguments available to the :meth:`get_providers` method. The query will then only be passed to the providers confirming to these arguments. :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts.
skosprovider/registry.py
def find(self, query, **kwargs): '''Launch a query across all or a selection of providers. .. code-block:: python # Find anything that has a label of church in any provider. registry.find({'label': 'church'}) # Find anything that has a label of church with the BUILDINGS provider. # Attention, this syntax was deprecated in version 0.3.0 registry.find({'label': 'church'}, providers=['BUILDINGS']) # Find anything that has a label of church with the BUILDINGS provider. registry.find({'label': 'church'}, providers={'ids': ['BUILDINGS']}) # Find anything that has a label of church with a provider # marked with the subject 'architecture'. registry.find({'label': 'church'}, providers={'subject': 'architecture'}) # Find anything that has a label of church in any provider. # If possible, display the results with a Dutch label. registry.find({'label': 'church'}, language='nl') :param dict query: The query parameters that will be passed on to each :meth:`~skosprovider.providers.VocabularyProvider.find` method of the selected. :class:`providers <skosprovider.providers.VocabularyProvider>`. :param dict providers: Optional. If present, it should be a dictionary. This dictionary can contain any of the keyword arguments available to the :meth:`get_providers` method. The query will then only be passed to the providers confirming to these arguments. :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts. ''' if 'providers' not in kwargs: providers = self.get_providers() else: pargs = kwargs['providers'] if isinstance(pargs, list): providers = self.get_providers(ids=pargs) else: providers = self.get_providers(**pargs) kwarguments = {} if 'language' in kwargs: kwarguments['language'] = kwargs['language'] return [{'id': p.get_vocabulary_id(), 'concepts': p.find(query, **kwarguments)} for p in providers]
def find(self, query, **kwargs): '''Launch a query across all or a selection of providers. .. code-block:: python # Find anything that has a label of church in any provider. registry.find({'label': 'church'}) # Find anything that has a label of church with the BUILDINGS provider. # Attention, this syntax was deprecated in version 0.3.0 registry.find({'label': 'church'}, providers=['BUILDINGS']) # Find anything that has a label of church with the BUILDINGS provider. registry.find({'label': 'church'}, providers={'ids': ['BUILDINGS']}) # Find anything that has a label of church with a provider # marked with the subject 'architecture'. registry.find({'label': 'church'}, providers={'subject': 'architecture'}) # Find anything that has a label of church in any provider. # If possible, display the results with a Dutch label. registry.find({'label': 'church'}, language='nl') :param dict query: The query parameters that will be passed on to each :meth:`~skosprovider.providers.VocabularyProvider.find` method of the selected. :class:`providers <skosprovider.providers.VocabularyProvider>`. :param dict providers: Optional. If present, it should be a dictionary. This dictionary can contain any of the keyword arguments available to the :meth:`get_providers` method. The query will then only be passed to the providers confirming to these arguments. :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts. ''' if 'providers' not in kwargs: providers = self.get_providers() else: pargs = kwargs['providers'] if isinstance(pargs, list): providers = self.get_providers(ids=pargs) else: providers = self.get_providers(**pargs) kwarguments = {} if 'language' in kwargs: kwarguments['language'] = kwargs['language'] return [{'id': p.get_vocabulary_id(), 'concepts': p.find(query, **kwarguments)} for p in providers]
[ "Launch", "a", "query", "across", "all", "or", "a", "selection", "of", "providers", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L125-L175
[ "def", "find", "(", "self", ",", "query", ",", "*", "*", "kwargs", ")", ":", "if", "'providers'", "not", "in", "kwargs", ":", "providers", "=", "self", ".", "get_providers", "(", ")", "else", ":", "pargs", "=", "kwargs", "[", "'providers'", "]", "if", "isinstance", "(", "pargs", ",", "list", ")", ":", "providers", "=", "self", ".", "get_providers", "(", "ids", "=", "pargs", ")", "else", ":", "providers", "=", "self", ".", "get_providers", "(", "*", "*", "pargs", ")", "kwarguments", "=", "{", "}", "if", "'language'", "in", "kwargs", ":", "kwarguments", "[", "'language'", "]", "=", "kwargs", "[", "'language'", "]", "return", "[", "{", "'id'", ":", "p", ".", "get_vocabulary_id", "(", ")", ",", "'concepts'", ":", "p", ".", "find", "(", "query", ",", "*", "*", "kwarguments", ")", "}", "for", "p", "in", "providers", "]" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.get_all
Get all concepts from all providers. .. code-block:: python # get all concepts in all providers. registry.get_all() # get all concepts in all providers. # If possible, display the results with a Dutch label. registry.get_all(language='nl') :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts.
skosprovider/registry.py
def get_all(self, **kwargs): '''Get all concepts from all providers. .. code-block:: python # get all concepts in all providers. registry.get_all() # get all concepts in all providers. # If possible, display the results with a Dutch label. registry.get_all(language='nl') :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts. ''' kwarguments = {} if 'language' in kwargs: kwarguments['language'] = kwargs['language'] return [{'id': p.get_vocabulary_id(), 'concepts': p.get_all(**kwarguments)} for p in self.providers.values()]
def get_all(self, **kwargs): '''Get all concepts from all providers. .. code-block:: python # get all concepts in all providers. registry.get_all() # get all concepts in all providers. # If possible, display the results with a Dutch label. registry.get_all(language='nl') :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts. ''' kwarguments = {} if 'language' in kwargs: kwarguments['language'] = kwargs['language'] return [{'id': p.get_vocabulary_id(), 'concepts': p.get_all(**kwarguments)} for p in self.providers.values()]
[ "Get", "all", "concepts", "from", "all", "providers", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L177-L201
[ "def", "get_all", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwarguments", "=", "{", "}", "if", "'language'", "in", "kwargs", ":", "kwarguments", "[", "'language'", "]", "=", "kwargs", "[", "'language'", "]", "return", "[", "{", "'id'", ":", "p", ".", "get_vocabulary_id", "(", ")", ",", "'concepts'", ":", "p", ".", "get_all", "(", "*", "*", "kwarguments", ")", "}", "for", "p", "in", "self", ".", "providers", ".", "values", "(", ")", "]" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Registry.get_by_uri
Get a concept or collection by its uri. Returns a single concept or collection if one exists with this uri. Returns False otherwise. :param string uri: The uri to find a concept or collection for. :raises ValueError: The uri is invalid. :rtype: :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`
skosprovider/registry.py
def get_by_uri(self, uri): '''Get a concept or collection by its uri. Returns a single concept or collection if one exists with this uri. Returns False otherwise. :param string uri: The uri to find a concept or collection for. :raises ValueError: The uri is invalid. :rtype: :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection` ''' if not is_uri(uri): raise ValueError('%s is not a valid URI.' % uri) # Check if there's a provider that's more likely to have the URI csuris = [csuri for csuri in self.concept_scheme_uri_map.keys() if uri.startswith(csuri)] for csuri in csuris: c = self.get_provider(csuri).get_by_uri(uri) if c: return c # Check all providers for p in self.providers.values(): c = p.get_by_uri(uri) if c: return c return False
def get_by_uri(self, uri): '''Get a concept or collection by its uri. Returns a single concept or collection if one exists with this uri. Returns False otherwise. :param string uri: The uri to find a concept or collection for. :raises ValueError: The uri is invalid. :rtype: :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection` ''' if not is_uri(uri): raise ValueError('%s is not a valid URI.' % uri) # Check if there's a provider that's more likely to have the URI csuris = [csuri for csuri in self.concept_scheme_uri_map.keys() if uri.startswith(csuri)] for csuri in csuris: c = self.get_provider(csuri).get_by_uri(uri) if c: return c # Check all providers for p in self.providers.values(): c = p.get_by_uri(uri) if c: return c return False
[ "Get", "a", "concept", "or", "collection", "by", "its", "uri", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/registry.py#L203-L227
[ "def", "get_by_uri", "(", "self", ",", "uri", ")", ":", "if", "not", "is_uri", "(", "uri", ")", ":", "raise", "ValueError", "(", "'%s is not a valid URI.'", "%", "uri", ")", "# Check if there's a provider that's more likely to have the URI", "csuris", "=", "[", "csuri", "for", "csuri", "in", "self", ".", "concept_scheme_uri_map", ".", "keys", "(", ")", "if", "uri", ".", "startswith", "(", "csuri", ")", "]", "for", "csuri", "in", "csuris", ":", "c", "=", "self", ".", "get_provider", "(", "csuri", ")", ".", "get_by_uri", "(", "uri", ")", "if", "c", ":", "return", "c", "# Check all providers", "for", "p", "in", "self", ".", "providers", ".", "values", "(", ")", ":", "c", "=", "p", ".", "get_by_uri", "(", "uri", ")", "if", "c", ":", "return", "c", "return", "False" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
ExtensionImporter.find_module
Find a module if its name starts with :code:`self.group` and is registered.
src/bio2bel/exthook.py
def find_module(self, fullname, path=None): """Find a module if its name starts with :code:`self.group` and is registered.""" if not fullname.startswith(self._group_with_dot): return end_name = fullname[len(self._group_with_dot):] for entry_point in iter_entry_points(group=self.group, name=None): if entry_point.name == end_name: return self
def find_module(self, fullname, path=None): """Find a module if its name starts with :code:`self.group` and is registered.""" if not fullname.startswith(self._group_with_dot): return end_name = fullname[len(self._group_with_dot):] for entry_point in iter_entry_points(group=self.group, name=None): if entry_point.name == end_name: return self
[ "Find", "a", "module", "if", "its", "name", "starts", "with", ":", "code", ":", "self", ".", "group", "and", "is", "registered", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/exthook.py#L35-L42
[ "def", "find_module", "(", "self", ",", "fullname", ",", "path", "=", "None", ")", ":", "if", "not", "fullname", ".", "startswith", "(", "self", ".", "_group_with_dot", ")", ":", "return", "end_name", "=", "fullname", "[", "len", "(", "self", ".", "_group_with_dot", ")", ":", "]", "for", "entry_point", "in", "iter_entry_points", "(", "group", "=", "self", ".", "group", ",", "name", "=", "None", ")", ":", "if", "entry_point", ".", "name", "==", "end_name", ":", "return", "self" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
ExtensionImporter.load_module
Load a module if its name starts with :code:`self.group` and is registered.
src/bio2bel/exthook.py
def load_module(self, fullname): """Load a module if its name starts with :code:`self.group` and is registered.""" if fullname in sys.modules: return sys.modules[fullname] end_name = fullname[len(self._group_with_dot):] for entry_point in iter_entry_points(group=self.group, name=end_name): mod = entry_point.load() sys.modules[fullname] = mod return mod
def load_module(self, fullname): """Load a module if its name starts with :code:`self.group` and is registered.""" if fullname in sys.modules: return sys.modules[fullname] end_name = fullname[len(self._group_with_dot):] for entry_point in iter_entry_points(group=self.group, name=end_name): mod = entry_point.load() sys.modules[fullname] = mod return mod
[ "Load", "a", "module", "if", "its", "name", "starts", "with", ":", "code", ":", "self", ".", "group", "and", "is", "registered", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/exthook.py#L44-L52
[ "def", "load_module", "(", "self", ",", "fullname", ")", ":", "if", "fullname", "in", "sys", ".", "modules", ":", "return", "sys", ".", "modules", "[", "fullname", "]", "end_name", "=", "fullname", "[", "len", "(", "self", ".", "_group_with_dot", ")", ":", "]", "for", "entry_point", "in", "iter_entry_points", "(", "group", "=", "self", ".", "group", ",", "name", "=", "end_name", ")", ":", "mod", "=", "entry_point", ".", "load", "(", ")", "sys", ".", "modules", "[", "fullname", "]", "=", "mod", "return", "mod" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
upload_theme
upload and/or update the theme with the current git state
deployment/appserver.py
def upload_theme(): """ upload and/or update the theme with the current git state""" get_vars() with fab.settings(): local_theme_path = path.abspath( path.join(fab.env['config_base'], fab.env.instance.config['local_theme_path'])) rsync( '-av', '--delete', '%s/' % local_theme_path, '{{host_string}}:{themes_dir}/{ploy_theme_name}'.format(**AV) ) briefkasten_ctl('restart')
def upload_theme(): """ upload and/or update the theme with the current git state""" get_vars() with fab.settings(): local_theme_path = path.abspath( path.join(fab.env['config_base'], fab.env.instance.config['local_theme_path'])) rsync( '-av', '--delete', '%s/' % local_theme_path, '{{host_string}}:{themes_dir}/{ploy_theme_name}'.format(**AV) ) briefkasten_ctl('restart')
[ "upload", "and", "/", "or", "update", "the", "theme", "with", "the", "current", "git", "state" ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/deployment/appserver.py#L25-L38
[ "def", "upload_theme", "(", ")", ":", "get_vars", "(", ")", "with", "fab", ".", "settings", "(", ")", ":", "local_theme_path", "=", "path", ".", "abspath", "(", "path", ".", "join", "(", "fab", ".", "env", "[", "'config_base'", "]", ",", "fab", ".", "env", ".", "instance", ".", "config", "[", "'local_theme_path'", "]", ")", ")", "rsync", "(", "'-av'", ",", "'--delete'", ",", "'%s/'", "%", "local_theme_path", ",", "'{{host_string}}:{themes_dir}/{ploy_theme_name}'", ".", "format", "(", "*", "*", "AV", ")", ")", "briefkasten_ctl", "(", "'restart'", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
upload_pgp_keys
upload and/or update the PGP keys for editors, import them into PGP
deployment/appserver.py
def upload_pgp_keys(): """ upload and/or update the PGP keys for editors, import them into PGP""" get_vars() upload_target = '/tmp/pgp_pubkeys.tmp' with fab.settings(fab.hide('running')): fab.run('rm -rf %s' % upload_target) fab.run('mkdir %s' % upload_target) local_key_path = path.join(fab.env['config_base'], fab.env.instance.config['local_pgpkey_path']) remote_key_path = '/var/briefkasten/pgp_pubkeys/'.format(**AV) rsync('-av', local_key_path, '{host_string}:%s' % upload_target) fab.run('chown -R %s %s' % (AV['appuser'], remote_key_path)) fab.run('chmod 700 %s' % remote_key_path) with fab.shell_env(GNUPGHOME=remote_key_path): fab.sudo('''gpg --import %s/*.*''' % upload_target, user=AV['appuser'], shell_escape=False) fab.run('rm -rf %s' % upload_target)
def upload_pgp_keys(): """ upload and/or update the PGP keys for editors, import them into PGP""" get_vars() upload_target = '/tmp/pgp_pubkeys.tmp' with fab.settings(fab.hide('running')): fab.run('rm -rf %s' % upload_target) fab.run('mkdir %s' % upload_target) local_key_path = path.join(fab.env['config_base'], fab.env.instance.config['local_pgpkey_path']) remote_key_path = '/var/briefkasten/pgp_pubkeys/'.format(**AV) rsync('-av', local_key_path, '{host_string}:%s' % upload_target) fab.run('chown -R %s %s' % (AV['appuser'], remote_key_path)) fab.run('chmod 700 %s' % remote_key_path) with fab.shell_env(GNUPGHOME=remote_key_path): fab.sudo('''gpg --import %s/*.*''' % upload_target, user=AV['appuser'], shell_escape=False) fab.run('rm -rf %s' % upload_target)
[ "upload", "and", "/", "or", "update", "the", "PGP", "keys", "for", "editors", "import", "them", "into", "PGP" ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/deployment/appserver.py#L42-L57
[ "def", "upload_pgp_keys", "(", ")", ":", "get_vars", "(", ")", "upload_target", "=", "'/tmp/pgp_pubkeys.tmp'", "with", "fab", ".", "settings", "(", "fab", ".", "hide", "(", "'running'", ")", ")", ":", "fab", ".", "run", "(", "'rm -rf %s'", "%", "upload_target", ")", "fab", ".", "run", "(", "'mkdir %s'", "%", "upload_target", ")", "local_key_path", "=", "path", ".", "join", "(", "fab", ".", "env", "[", "'config_base'", "]", ",", "fab", ".", "env", ".", "instance", ".", "config", "[", "'local_pgpkey_path'", "]", ")", "remote_key_path", "=", "'/var/briefkasten/pgp_pubkeys/'", ".", "format", "(", "*", "*", "AV", ")", "rsync", "(", "'-av'", ",", "local_key_path", ",", "'{host_string}:%s'", "%", "upload_target", ")", "fab", ".", "run", "(", "'chown -R %s %s'", "%", "(", "AV", "[", "'appuser'", "]", ",", "remote_key_path", ")", ")", "fab", ".", "run", "(", "'chmod 700 %s'", "%", "remote_key_path", ")", "with", "fab", ".", "shell_env", "(", "GNUPGHOME", "=", "remote_key_path", ")", ":", "fab", ".", "sudo", "(", "'''gpg --import %s/*.*'''", "%", "upload_target", ",", "user", "=", "AV", "[", "'appuser'", "]", ",", "shell_escape", "=", "False", ")", "fab", ".", "run", "(", "'rm -rf %s'", "%", "upload_target", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
upload_backend
Build the backend and upload it to the remote server at the given index
deployment/appserver.py
def upload_backend(index='dev', user=None): """ Build the backend and upload it to the remote server at the given index """ get_vars() use_devpi(index=index) with fab.lcd('../application'): fab.local('make upload')
def upload_backend(index='dev', user=None): """ Build the backend and upload it to the remote server at the given index """ get_vars() use_devpi(index=index) with fab.lcd('../application'): fab.local('make upload')
[ "Build", "the", "backend", "and", "upload", "it", "to", "the", "remote", "server", "at", "the", "given", "index" ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/deployment/appserver.py#L61-L68
[ "def", "upload_backend", "(", "index", "=", "'dev'", ",", "user", "=", "None", ")", ":", "get_vars", "(", ")", "use_devpi", "(", "index", "=", "index", ")", "with", "fab", ".", "lcd", "(", "'../application'", ")", ":", "fab", ".", "local", "(", "'make upload'", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
update_backend
Install the backend from the given devpi index at the given version on the target host and restart the service. If version is None, it defaults to the latest version Optionally, build and upload the application first from local sources. This requires a full backend development environment on the machine running this command (pyramid etc.)
deployment/appserver.py
def update_backend(use_pypi=False, index='dev', build=True, user=None, version=None): """ Install the backend from the given devpi index at the given version on the target host and restart the service. If version is None, it defaults to the latest version Optionally, build and upload the application first from local sources. This requires a full backend development environment on the machine running this command (pyramid etc.) """ get_vars() if value_asbool(build): upload_backend(index=index, user=user) with fab.cd('{apphome}'.format(**AV)): if value_asbool(use_pypi): command = 'bin/pip install --upgrade briefkasten' else: command = 'bin/pip install --upgrade --pre -i {ploy_default_publish_devpi}/briefkasten/{index}/+simple/ briefkasten'.format( index=index, user=user, **AV) if version: command = '%s==%s' % (command, version) fab.sudo(command) briefkasten_ctl('restart')
def update_backend(use_pypi=False, index='dev', build=True, user=None, version=None): """ Install the backend from the given devpi index at the given version on the target host and restart the service. If version is None, it defaults to the latest version Optionally, build and upload the application first from local sources. This requires a full backend development environment on the machine running this command (pyramid etc.) """ get_vars() if value_asbool(build): upload_backend(index=index, user=user) with fab.cd('{apphome}'.format(**AV)): if value_asbool(use_pypi): command = 'bin/pip install --upgrade briefkasten' else: command = 'bin/pip install --upgrade --pre -i {ploy_default_publish_devpi}/briefkasten/{index}/+simple/ briefkasten'.format( index=index, user=user, **AV) if version: command = '%s==%s' % (command, version) fab.sudo(command) briefkasten_ctl('restart')
[ "Install", "the", "backend", "from", "the", "given", "devpi", "index", "at", "the", "given", "version", "on", "the", "target", "host", "and", "restart", "the", "service", "." ]
ZeitOnline/briefkasten
python
https://github.com/ZeitOnline/briefkasten/blob/ce6b6eeb89196014fe21d68614c20059d02daa11/deployment/appserver.py#L84-L108
[ "def", "update_backend", "(", "use_pypi", "=", "False", ",", "index", "=", "'dev'", ",", "build", "=", "True", ",", "user", "=", "None", ",", "version", "=", "None", ")", ":", "get_vars", "(", ")", "if", "value_asbool", "(", "build", ")", ":", "upload_backend", "(", "index", "=", "index", ",", "user", "=", "user", ")", "with", "fab", ".", "cd", "(", "'{apphome}'", ".", "format", "(", "*", "*", "AV", ")", ")", ":", "if", "value_asbool", "(", "use_pypi", ")", ":", "command", "=", "'bin/pip install --upgrade briefkasten'", "else", ":", "command", "=", "'bin/pip install --upgrade --pre -i {ploy_default_publish_devpi}/briefkasten/{index}/+simple/ briefkasten'", ".", "format", "(", "index", "=", "index", ",", "user", "=", "user", ",", "*", "*", "AV", ")", "if", "version", ":", "command", "=", "'%s==%s'", "%", "(", "command", ",", "version", ")", "fab", ".", "sudo", "(", "command", ")", "briefkasten_ctl", "(", "'restart'", ")" ]
ce6b6eeb89196014fe21d68614c20059d02daa11
valid
VocabularyProvider._sort
Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list
skosprovider/providers.py
def _sort(self, concepts, sort=None, language='any', reverse=False): ''' Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list ''' sorted = copy.copy(concepts) if sort: sorted.sort(key=methodcaller('_sortkey', sort, language), reverse=reverse) return sorted
def _sort(self, concepts, sort=None, language='any', reverse=False): ''' Returns a sorted version of a list of concepts. Will leave the original list unsorted. :param list concepts: A list of concepts and collections. :param string sort: What to sort on: `id`, `label` or `sortlabel` :param string language: Language to use when sorting on `label` or `sortlabel`. :param boolean reverse: Reverse the sort order? :rtype: list ''' sorted = copy.copy(concepts) if sort: sorted.sort(key=methodcaller('_sortkey', sort, language), reverse=reverse) return sorted
[ "Returns", "a", "sorted", "version", "of", "a", "list", "of", "concepts", ".", "Will", "leave", "the", "original", "list", "unsorted", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/providers.py#L121-L136
[ "def", "_sort", "(", "self", ",", "concepts", ",", "sort", "=", "None", ",", "language", "=", "'any'", ",", "reverse", "=", "False", ")", ":", "sorted", "=", "copy", ".", "copy", "(", "concepts", ")", "if", "sort", ":", "sorted", ".", "sort", "(", "key", "=", "methodcaller", "(", "'_sortkey'", ",", "sort", ",", "language", ")", ",", "reverse", "=", "reverse", ")", "return", "sorted" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
MemoryProvider._include_in_find
:param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :param query: A dict that can be used to express a query. :rtype: boolean
skosprovider/providers.py
def _include_in_find(self, c, query): ''' :param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :param query: A dict that can be used to express a query. :rtype: boolean ''' include = True if include and 'type' in query: include = query['type'] == c.type if include and 'label' in query: def finder(l, query): if not self.case_insensitive: return l.label.find(query['label']) else: return l.label.upper().find(query['label'].upper()) include = any([finder(l, query) >= 0 for l in c.labels]) if include and 'collection' in query: coll = self.get_by_id(query['collection']['id']) if not coll or not isinstance(coll, Collection): raise ValueError( 'You are searching for items in an unexisting collection.' ) if 'depth' in query['collection'] and query['collection']['depth'] == 'all': members = self.expand(coll.id) else: members = coll.members include = any([True for id in members if str(id) == str(c.id)]) return include
def _include_in_find(self, c, query): ''' :param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :param query: A dict that can be used to express a query. :rtype: boolean ''' include = True if include and 'type' in query: include = query['type'] == c.type if include and 'label' in query: def finder(l, query): if not self.case_insensitive: return l.label.find(query['label']) else: return l.label.upper().find(query['label'].upper()) include = any([finder(l, query) >= 0 for l in c.labels]) if include and 'collection' in query: coll = self.get_by_id(query['collection']['id']) if not coll or not isinstance(coll, Collection): raise ValueError( 'You are searching for items in an unexisting collection.' ) if 'depth' in query['collection'] and query['collection']['depth'] == 'all': members = self.expand(coll.id) else: members = coll.members include = any([True for id in members if str(id) == str(c.id)]) return include
[ ":", "param", "c", ":", "A", ":", "class", ":", "skosprovider", ".", "skos", ".", "Concept", "or", ":", "class", ":", "skosprovider", ".", "skos", ".", "Collection", ".", ":", "param", "query", ":", "A", "dict", "that", "can", "be", "used", "to", "express", "a", "query", ".", ":", "rtype", ":", "boolean" ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/providers.py#L458-L486
[ "def", "_include_in_find", "(", "self", ",", "c", ",", "query", ")", ":", "include", "=", "True", "if", "include", "and", "'type'", "in", "query", ":", "include", "=", "query", "[", "'type'", "]", "==", "c", ".", "type", "if", "include", "and", "'label'", "in", "query", ":", "def", "finder", "(", "l", ",", "query", ")", ":", "if", "not", "self", ".", "case_insensitive", ":", "return", "l", ".", "label", ".", "find", "(", "query", "[", "'label'", "]", ")", "else", ":", "return", "l", ".", "label", ".", "upper", "(", ")", ".", "find", "(", "query", "[", "'label'", "]", ".", "upper", "(", ")", ")", "include", "=", "any", "(", "[", "finder", "(", "l", ",", "query", ")", ">=", "0", "for", "l", "in", "c", ".", "labels", "]", ")", "if", "include", "and", "'collection'", "in", "query", ":", "coll", "=", "self", ".", "get_by_id", "(", "query", "[", "'collection'", "]", "[", "'id'", "]", ")", "if", "not", "coll", "or", "not", "isinstance", "(", "coll", ",", "Collection", ")", ":", "raise", "ValueError", "(", "'You are searching for items in an unexisting collection.'", ")", "if", "'depth'", "in", "query", "[", "'collection'", "]", "and", "query", "[", "'collection'", "]", "[", "'depth'", "]", "==", "'all'", ":", "members", "=", "self", ".", "expand", "(", "coll", ".", "id", ")", "else", ":", "members", "=", "coll", ".", "members", "include", "=", "any", "(", "[", "True", "for", "id", "in", "members", "if", "str", "(", "id", ")", "==", "str", "(", "c", ".", "id", ")", "]", ")", "return", "include" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
MemoryProvider._get_find_dict
Return a dict that can be used in the return list of the :meth:`find` method. :param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :rtype: dict
skosprovider/providers.py
def _get_find_dict(self, c, **kwargs): ''' Return a dict that can be used in the return list of the :meth:`find` method. :param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :rtype: dict ''' language = self._get_language(**kwargs) return { 'id': c.id, 'uri': c.uri, 'type': c.type, 'label': None if c.label() is None else c.label(language).label }
def _get_find_dict(self, c, **kwargs): ''' Return a dict that can be used in the return list of the :meth:`find` method. :param c: A :class:`skosprovider.skos.Concept` or :class:`skosprovider.skos.Collection`. :rtype: dict ''' language = self._get_language(**kwargs) return { 'id': c.id, 'uri': c.uri, 'type': c.type, 'label': None if c.label() is None else c.label(language).label }
[ "Return", "a", "dict", "that", "can", "be", "used", "in", "the", "return", "list", "of", "the", ":", "meth", ":", "find", "method", "." ]
koenedaele/skosprovider
python
https://github.com/koenedaele/skosprovider/blob/7304a37953978ca8227febc2d3cc2b2be178f215/skosprovider/providers.py#L488-L503
[ "def", "_get_find_dict", "(", "self", ",", "c", ",", "*", "*", "kwargs", ")", ":", "language", "=", "self", ".", "_get_language", "(", "*", "*", "kwargs", ")", "return", "{", "'id'", ":", "c", ".", "id", ",", "'uri'", ":", "c", ".", "uri", ",", "'type'", ":", "c", ".", "type", ",", "'label'", ":", "None", "if", "c", ".", "label", "(", ")", "is", "None", "else", "c", ".", "label", "(", "language", ")", ".", "label", "}" ]
7304a37953978ca8227febc2d3cc2b2be178f215
valid
Client.update
Force update of alarm status and zones
nessclient/client.py
async def update(self) -> None: """Force update of alarm status and zones""" _LOGGER.debug("Requesting state update from server (S00, S14)") await asyncio.gather( # List unsealed Zones self.send_command('S00'), # Arming status update self.send_command('S14'), )
async def update(self) -> None: """Force update of alarm status and zones""" _LOGGER.debug("Requesting state update from server (S00, S14)") await asyncio.gather( # List unsealed Zones self.send_command('S00'), # Arming status update self.send_command('S14'), )
[ "Force", "update", "of", "alarm", "status", "and", "zones" ]
nickw444/nessclient
python
https://github.com/nickw444/nessclient/blob/9a2e3d450448312f56e708b8c7adeaef878cc28a/nessclient/client.py#L73-L81
[ "async", "def", "update", "(", "self", ")", "->", "None", ":", "_LOGGER", ".", "debug", "(", "\"Requesting state update from server (S00, S14)\"", ")", "await", "asyncio", ".", "gather", "(", "# List unsealed Zones", "self", ".", "send_command", "(", "'S00'", ")", ",", "# Arming status update", "self", ".", "send_command", "(", "'S14'", ")", ",", ")" ]
9a2e3d450448312f56e708b8c7adeaef878cc28a
valid
Client._update_loop
Schedule a state update to keep the connection alive
nessclient/client.py
async def _update_loop(self) -> None: """Schedule a state update to keep the connection alive""" await asyncio.sleep(self._update_interval) while not self._closed: await self.update() await asyncio.sleep(self._update_interval)
async def _update_loop(self) -> None: """Schedule a state update to keep the connection alive""" await asyncio.sleep(self._update_interval) while not self._closed: await self.update() await asyncio.sleep(self._update_interval)
[ "Schedule", "a", "state", "update", "to", "keep", "the", "connection", "alive" ]
nickw444/nessclient
python
https://github.com/nickw444/nessclient/blob/9a2e3d450448312f56e708b8c7adeaef878cc28a/nessclient/client.py#L150-L155
[ "async", "def", "_update_loop", "(", "self", ")", "->", "None", ":", "await", "asyncio", ".", "sleep", "(", "self", ".", "_update_interval", ")", "while", "not", "self", ".", "_closed", ":", "await", "self", ".", "update", "(", ")", "await", "asyncio", ".", "sleep", "(", "self", ".", "_update_interval", ")" ]
9a2e3d450448312f56e708b8c7adeaef878cc28a
valid
add_cli_to_bel_namespace
Add a ``upload_bel_namespace`` command to main :mod:`click` function.
src/bio2bel/manager/namespace_manager.py
def add_cli_to_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``upload_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.option('-u', '--update', is_flag=True) @click.pass_obj def upload(manager: BELNamespaceManagerMixin, update): """Upload names/identifiers to terminology store.""" namespace = manager.upload_bel_namespace(update=update) click.echo(f'uploaded [{namespace.id}] {namespace.keyword}') return main
def add_cli_to_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``upload_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.option('-u', '--update', is_flag=True) @click.pass_obj def upload(manager: BELNamespaceManagerMixin, update): """Upload names/identifiers to terminology store.""" namespace = manager.upload_bel_namespace(update=update) click.echo(f'uploaded [{namespace.id}] {namespace.keyword}') return main
[ "Add", "a", "upload_bel_namespace", "command", "to", "main", ":", "mod", ":", "click", "function", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L493-L504
[ "def", "add_cli_to_bel_namespace", "(", "main", ":", "click", ".", "Group", ")", "->", "click", ".", "Group", ":", "# noqa: D202", "@", "main", ".", "command", "(", ")", "@", "click", ".", "option", "(", "'-u'", ",", "'--update'", ",", "is_flag", "=", "True", ")", "@", "click", ".", "pass_obj", "def", "upload", "(", "manager", ":", "BELNamespaceManagerMixin", ",", "update", ")", ":", "\"\"\"Upload names/identifiers to terminology store.\"\"\"", "namespace", "=", "manager", ".", "upload_bel_namespace", "(", "update", "=", "update", ")", "click", ".", "echo", "(", "f'uploaded [{namespace.id}] {namespace.keyword}'", ")", "return", "main" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
add_cli_clear_bel_namespace
Add a ``clear_bel_namespace`` command to main :mod:`click` function.
src/bio2bel/manager/namespace_manager.py
def add_cli_clear_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``clear_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.pass_obj def drop(manager: BELNamespaceManagerMixin): """Clear names/identifiers to terminology store.""" namespace = manager.drop_bel_namespace() if namespace: click.echo(f'namespace {namespace} was cleared') return main
def add_cli_clear_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``clear_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.pass_obj def drop(manager: BELNamespaceManagerMixin): """Clear names/identifiers to terminology store.""" namespace = manager.drop_bel_namespace() if namespace: click.echo(f'namespace {namespace} was cleared') return main
[ "Add", "a", "clear_bel_namespace", "command", "to", "main", ":", "mod", ":", "click", "function", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L507-L519
[ "def", "add_cli_clear_bel_namespace", "(", "main", ":", "click", ".", "Group", ")", "->", "click", ".", "Group", ":", "# noqa: D202", "@", "main", ".", "command", "(", ")", "@", "click", ".", "pass_obj", "def", "drop", "(", "manager", ":", "BELNamespaceManagerMixin", ")", ":", "\"\"\"Clear names/identifiers to terminology store.\"\"\"", "namespace", "=", "manager", ".", "drop_bel_namespace", "(", ")", "if", "namespace", ":", "click", ".", "echo", "(", "f'namespace {namespace} was cleared'", ")", "return", "main" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
add_cli_write_bel_namespace
Add a ``write_bel_namespace`` command to main :mod:`click` function.
src/bio2bel/manager/namespace_manager.py
def add_cli_write_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``write_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.option('-d', '--directory', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(), help='output directory') @click.pass_obj def write(manager: BELNamespaceManagerMixin, directory: str): """Write a BEL namespace names/identifiers to terminology store.""" manager.write_directory(directory) return main
def add_cli_write_bel_namespace(main: click.Group) -> click.Group: # noqa: D202 """Add a ``write_bel_namespace`` command to main :mod:`click` function.""" @main.command() @click.option('-d', '--directory', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(), help='output directory') @click.pass_obj def write(manager: BELNamespaceManagerMixin, directory: str): """Write a BEL namespace names/identifiers to terminology store.""" manager.write_directory(directory) return main
[ "Add", "a", "write_bel_namespace", "command", "to", "main", ":", "mod", ":", "click", "function", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L522-L533
[ "def", "add_cli_write_bel_namespace", "(", "main", ":", "click", ".", "Group", ")", "->", "click", ".", "Group", ":", "# noqa: D202", "@", "main", ".", "command", "(", ")", "@", "click", ".", "option", "(", "'-d'", ",", "'--directory'", ",", "type", "=", "click", ".", "Path", "(", "file_okay", "=", "False", ",", "dir_okay", "=", "True", ")", ",", "default", "=", "os", ".", "getcwd", "(", ")", ",", "help", "=", "'output directory'", ")", "@", "click", ".", "pass_obj", "def", "write", "(", "manager", ":", "BELNamespaceManagerMixin", ",", "directory", ":", "str", ")", ":", "\"\"\"Write a BEL namespace names/identifiers to terminology store.\"\"\"", "manager", ".", "write_directory", "(", "directory", ")", "return", "main" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
add_cli_write_bel_annotation
Add a ``write_bel_annotation`` command to main :mod:`click` function.
src/bio2bel/manager/namespace_manager.py
def add_cli_write_bel_annotation(main: click.Group) -> click.Group: # noqa: D202 """Add a ``write_bel_annotation`` command to main :mod:`click` function.""" @main.command() @click.option('-d', '--directory', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(), help='output directory') @click.pass_obj def write(manager: BELNamespaceManagerMixin, directory: str): """Write a BEL annotation.""" with open(os.path.join(directory, manager.identifiers_namespace), 'w') as file: manager.write_bel_annotation(file) return main
def add_cli_write_bel_annotation(main: click.Group) -> click.Group: # noqa: D202 """Add a ``write_bel_annotation`` command to main :mod:`click` function.""" @main.command() @click.option('-d', '--directory', type=click.Path(file_okay=False, dir_okay=True), default=os.getcwd(), help='output directory') @click.pass_obj def write(manager: BELNamespaceManagerMixin, directory: str): """Write a BEL annotation.""" with open(os.path.join(directory, manager.identifiers_namespace), 'w') as file: manager.write_bel_annotation(file) return main
[ "Add", "a", "write_bel_annotation", "command", "to", "main", ":", "mod", ":", "click", "function", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L536-L548
[ "def", "add_cli_write_bel_annotation", "(", "main", ":", "click", ".", "Group", ")", "->", "click", ".", "Group", ":", "# noqa: D202", "@", "main", ".", "command", "(", ")", "@", "click", ".", "option", "(", "'-d'", ",", "'--directory'", ",", "type", "=", "click", ".", "Path", "(", "file_okay", "=", "False", ",", "dir_okay", "=", "True", ")", ",", "default", "=", "os", ".", "getcwd", "(", ")", ",", "help", "=", "'output directory'", ")", "@", "click", ".", "pass_obj", "def", "write", "(", "manager", ":", "BELNamespaceManagerMixin", ",", "directory", ":", "str", ")", ":", "\"\"\"Write a BEL annotation.\"\"\"", "with", "open", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "manager", ".", "identifiers_namespace", ")", ",", "'w'", ")", "as", "file", ":", "manager", ".", "write_bel_annotation", "(", "file", ")", "return", "main" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._iterate_namespace_models
Return an iterator over the models to be converted to the namespace.
src/bio2bel/manager/namespace_manager.py
def _iterate_namespace_models(self, **kwargs) -> Iterable: """Return an iterator over the models to be converted to the namespace.""" return tqdm( self._get_query(self.namespace_model), total=self._count_model(self.namespace_model), **kwargs )
def _iterate_namespace_models(self, **kwargs) -> Iterable: """Return an iterator over the models to be converted to the namespace.""" return tqdm( self._get_query(self.namespace_model), total=self._count_model(self.namespace_model), **kwargs )
[ "Return", "an", "iterator", "over", "the", "models", "to", "be", "converted", "to", "the", "namespace", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L203-L209
[ "def", "_iterate_namespace_models", "(", "self", ",", "*", "*", "kwargs", ")", "->", "Iterable", ":", "return", "tqdm", "(", "self", ".", "_get_query", "(", "self", ".", "namespace_model", ")", ",", "total", "=", "self", ".", "_count_model", "(", "self", ".", "namespace_model", ")", ",", "*", "*", "kwargs", ")" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._get_default_namespace
Get the reference BEL namespace if it exists.
src/bio2bel/manager/namespace_manager.py
def _get_default_namespace(self) -> Optional[Namespace]: """Get the reference BEL namespace if it exists.""" return self._get_query(Namespace).filter(Namespace.url == self._get_namespace_url()).one_or_none()
def _get_default_namespace(self) -> Optional[Namespace]: """Get the reference BEL namespace if it exists.""" return self._get_query(Namespace).filter(Namespace.url == self._get_namespace_url()).one_or_none()
[ "Get", "the", "reference", "BEL", "namespace", "if", "it", "exists", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L226-L228
[ "def", "_get_default_namespace", "(", "self", ")", "->", "Optional", "[", "Namespace", "]", ":", "return", "self", ".", "_get_query", "(", "Namespace", ")", ".", "filter", "(", "Namespace", ".", "url", "==", "self", ".", "_get_namespace_url", "(", ")", ")", ".", "one_or_none", "(", ")" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._make_namespace
Make a namespace.
src/bio2bel/manager/namespace_manager.py
def _make_namespace(self) -> Namespace: """Make a namespace.""" namespace = Namespace( name=self._get_namespace_name(), keyword=self._get_namespace_keyword(), url=self._get_namespace_url(), version=str(time.asctime()), ) self.session.add(namespace) entries = self._get_namespace_entries(namespace) self.session.add_all(entries) t = time.time() log.info('committing models') self.session.commit() log.info('committed models in %.2f seconds', time.time() - t) return namespace
def _make_namespace(self) -> Namespace: """Make a namespace.""" namespace = Namespace( name=self._get_namespace_name(), keyword=self._get_namespace_keyword(), url=self._get_namespace_url(), version=str(time.asctime()), ) self.session.add(namespace) entries = self._get_namespace_entries(namespace) self.session.add_all(entries) t = time.time() log.info('committing models') self.session.commit() log.info('committed models in %.2f seconds', time.time() - t) return namespace
[ "Make", "a", "namespace", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L240-L258
[ "def", "_make_namespace", "(", "self", ")", "->", "Namespace", ":", "namespace", "=", "Namespace", "(", "name", "=", "self", ".", "_get_namespace_name", "(", ")", ",", "keyword", "=", "self", ".", "_get_namespace_keyword", "(", ")", ",", "url", "=", "self", ".", "_get_namespace_url", "(", ")", ",", "version", "=", "str", "(", "time", ".", "asctime", "(", ")", ")", ",", ")", "self", ".", "session", ".", "add", "(", "namespace", ")", "entries", "=", "self", ".", "_get_namespace_entries", "(", "namespace", ")", "self", ".", "session", ".", "add_all", "(", "entries", ")", "t", "=", "time", ".", "time", "(", ")", "log", ".", "info", "(", "'committing models'", ")", "self", ".", "session", ".", "commit", "(", ")", "log", ".", "info", "(", "'committed models in %.2f seconds'", ",", "time", ".", "time", "(", ")", "-", "t", ")", "return", "namespace" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._get_old_entry_identifiers
Convert a PyBEL generalized namespace entries to a set. Default to using the identifier, but can be overridden to use the name instead. >>> {term.identifier for term in namespace.entries}
src/bio2bel/manager/namespace_manager.py
def _get_old_entry_identifiers(namespace: Namespace) -> Set[NamespaceEntry]: """Convert a PyBEL generalized namespace entries to a set. Default to using the identifier, but can be overridden to use the name instead. >>> {term.identifier for term in namespace.entries} """ return {term.identifier for term in namespace.entries}
def _get_old_entry_identifiers(namespace: Namespace) -> Set[NamespaceEntry]: """Convert a PyBEL generalized namespace entries to a set. Default to using the identifier, but can be overridden to use the name instead. >>> {term.identifier for term in namespace.entries} """ return {term.identifier for term in namespace.entries}
[ "Convert", "a", "PyBEL", "generalized", "namespace", "entries", "to", "a", "set", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L261-L268
[ "def", "_get_old_entry_identifiers", "(", "namespace", ":", "Namespace", ")", "->", "Set", "[", "NamespaceEntry", "]", ":", "return", "{", "term", ".", "identifier", "for", "term", "in", "namespace", ".", "entries", "}" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._update_namespace
Update an already-created namespace. Note: Only call this if namespace won't be none!
src/bio2bel/manager/namespace_manager.py
def _update_namespace(self, namespace: Namespace) -> None: """Update an already-created namespace. Note: Only call this if namespace won't be none! """ old_entry_identifiers = self._get_old_entry_identifiers(namespace) new_count = 0 skip_count = 0 for model in self._iterate_namespace_models(): if self._get_identifier(model) in old_entry_identifiers: continue entry = self._create_namespace_entry_from_model(model, namespace=namespace) if entry is None or entry.name is None: skip_count += 1 continue new_count += 1 self.session.add(entry) t = time.time() log.info('got %d new entries. skipped %d entries missing names. committing models', new_count, skip_count) self.session.commit() log.info('committed models in %.2f seconds', time.time() - t)
def _update_namespace(self, namespace: Namespace) -> None: """Update an already-created namespace. Note: Only call this if namespace won't be none! """ old_entry_identifiers = self._get_old_entry_identifiers(namespace) new_count = 0 skip_count = 0 for model in self._iterate_namespace_models(): if self._get_identifier(model) in old_entry_identifiers: continue entry = self._create_namespace_entry_from_model(model, namespace=namespace) if entry is None or entry.name is None: skip_count += 1 continue new_count += 1 self.session.add(entry) t = time.time() log.info('got %d new entries. skipped %d entries missing names. committing models', new_count, skip_count) self.session.commit() log.info('committed models in %.2f seconds', time.time() - t)
[ "Update", "an", "already", "-", "created", "namespace", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L270-L294
[ "def", "_update_namespace", "(", "self", ",", "namespace", ":", "Namespace", ")", "->", "None", ":", "old_entry_identifiers", "=", "self", ".", "_get_old_entry_identifiers", "(", "namespace", ")", "new_count", "=", "0", "skip_count", "=", "0", "for", "model", "in", "self", ".", "_iterate_namespace_models", "(", ")", ":", "if", "self", ".", "_get_identifier", "(", "model", ")", "in", "old_entry_identifiers", ":", "continue", "entry", "=", "self", ".", "_create_namespace_entry_from_model", "(", "model", ",", "namespace", "=", "namespace", ")", "if", "entry", "is", "None", "or", "entry", ".", "name", "is", "None", ":", "skip_count", "+=", "1", "continue", "new_count", "+=", "1", "self", ".", "session", ".", "add", "(", "entry", ")", "t", "=", "time", ".", "time", "(", ")", "log", ".", "info", "(", "'got %d new entries. skipped %d entries missing names. committing models'", ",", "new_count", ",", "skip_count", ")", "self", ".", "session", ".", "commit", "(", ")", "log", ".", "info", "(", "'committed models in %.2f seconds'", ",", "time", ".", "time", "(", ")", "-", "t", ")" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin.add_namespace_to_graph
Add this manager's namespace to the graph.
src/bio2bel/manager/namespace_manager.py
def add_namespace_to_graph(self, graph: BELGraph) -> Namespace: """Add this manager's namespace to the graph.""" namespace = self.upload_bel_namespace() graph.namespace_url[namespace.keyword] = namespace.url # Add this manager as an annotation, too self._add_annotation_to_graph(graph) return namespace
def add_namespace_to_graph(self, graph: BELGraph) -> Namespace: """Add this manager's namespace to the graph.""" namespace = self.upload_bel_namespace() graph.namespace_url[namespace.keyword] = namespace.url # Add this manager as an annotation, too self._add_annotation_to_graph(graph) return namespace
[ "Add", "this", "manager", "s", "namespace", "to", "the", "graph", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L296-L304
[ "def", "add_namespace_to_graph", "(", "self", ",", "graph", ":", "BELGraph", ")", "->", "Namespace", ":", "namespace", "=", "self", ".", "upload_bel_namespace", "(", ")", "graph", ".", "namespace_url", "[", "namespace", ".", "keyword", "]", "=", "namespace", ".", "url", "# Add this manager as an annotation, too", "self", ".", "_add_annotation_to_graph", "(", "graph", ")", "return", "namespace" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin._add_annotation_to_graph
Add this manager as an annotation to the graph.
src/bio2bel/manager/namespace_manager.py
def _add_annotation_to_graph(self, graph: BELGraph) -> None: """Add this manager as an annotation to the graph.""" if 'bio2bel' not in graph.annotation_list: graph.annotation_list['bio2bel'] = set() graph.annotation_list['bio2bel'].add(self.module_name)
def _add_annotation_to_graph(self, graph: BELGraph) -> None: """Add this manager as an annotation to the graph.""" if 'bio2bel' not in graph.annotation_list: graph.annotation_list['bio2bel'] = set() graph.annotation_list['bio2bel'].add(self.module_name)
[ "Add", "this", "manager", "as", "an", "annotation", "to", "the", "graph", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L306-L311
[ "def", "_add_annotation_to_graph", "(", "self", ",", "graph", ":", "BELGraph", ")", "->", "None", ":", "if", "'bio2bel'", "not", "in", "graph", ".", "annotation_list", ":", "graph", ".", "annotation_list", "[", "'bio2bel'", "]", "=", "set", "(", ")", "graph", ".", "annotation_list", "[", "'bio2bel'", "]", ".", "add", "(", "self", ".", "module_name", ")" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin.upload_bel_namespace
Upload the namespace to the PyBEL database. :param update: Should the namespace be updated first?
src/bio2bel/manager/namespace_manager.py
def upload_bel_namespace(self, update: bool = False) -> Namespace: """Upload the namespace to the PyBEL database. :param update: Should the namespace be updated first? """ if not self.is_populated(): self.populate() namespace = self._get_default_namespace() if namespace is None: log.info('making namespace for %s', self._get_namespace_name()) return self._make_namespace() if update: self._update_namespace(namespace) return namespace
def upload_bel_namespace(self, update: bool = False) -> Namespace: """Upload the namespace to the PyBEL database. :param update: Should the namespace be updated first? """ if not self.is_populated(): self.populate() namespace = self._get_default_namespace() if namespace is None: log.info('making namespace for %s', self._get_namespace_name()) return self._make_namespace() if update: self._update_namespace(namespace) return namespace
[ "Upload", "the", "namespace", "to", "the", "PyBEL", "database", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L313-L330
[ "def", "upload_bel_namespace", "(", "self", ",", "update", ":", "bool", "=", "False", ")", "->", "Namespace", ":", "if", "not", "self", ".", "is_populated", "(", ")", ":", "self", ".", "populate", "(", ")", "namespace", "=", "self", ".", "_get_default_namespace", "(", ")", "if", "namespace", "is", "None", ":", "log", ".", "info", "(", "'making namespace for %s'", ",", "self", ".", "_get_namespace_name", "(", ")", ")", "return", "self", ".", "_make_namespace", "(", ")", "if", "update", ":", "self", ".", "_update_namespace", "(", "namespace", ")", "return", "namespace" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2
valid
BELNamespaceManagerMixin.drop_bel_namespace
Remove the default namespace if it exists.
src/bio2bel/manager/namespace_manager.py
def drop_bel_namespace(self) -> Optional[Namespace]: """Remove the default namespace if it exists.""" namespace = self._get_default_namespace() if namespace is not None: for entry in tqdm(namespace.entries, desc=f'deleting entries in {self._get_namespace_name()}'): self.session.delete(entry) self.session.delete(namespace) log.info('committing deletions') self.session.commit() return namespace
def drop_bel_namespace(self) -> Optional[Namespace]: """Remove the default namespace if it exists.""" namespace = self._get_default_namespace() if namespace is not None: for entry in tqdm(namespace.entries, desc=f'deleting entries in {self._get_namespace_name()}'): self.session.delete(entry) self.session.delete(namespace) log.info('committing deletions') self.session.commit() return namespace
[ "Remove", "the", "default", "namespace", "if", "it", "exists", "." ]
bio2bel/bio2bel
python
https://github.com/bio2bel/bio2bel/blob/d80762d891fa18b248709ff0b0f97ebb65ec64c2/src/bio2bel/manager/namespace_manager.py#L332-L343
[ "def", "drop_bel_namespace", "(", "self", ")", "->", "Optional", "[", "Namespace", "]", ":", "namespace", "=", "self", ".", "_get_default_namespace", "(", ")", "if", "namespace", "is", "not", "None", ":", "for", "entry", "in", "tqdm", "(", "namespace", ".", "entries", ",", "desc", "=", "f'deleting entries in {self._get_namespace_name()}'", ")", ":", "self", ".", "session", ".", "delete", "(", "entry", ")", "self", ".", "session", ".", "delete", "(", "namespace", ")", "log", ".", "info", "(", "'committing deletions'", ")", "self", ".", "session", ".", "commit", "(", ")", "return", "namespace" ]
d80762d891fa18b248709ff0b0f97ebb65ec64c2