diff --git a/.gitignore b/.gitignore index 1d81be5..d332568 100644 --- a/.gitignore +++ b/.gitignore @@ -111,9 +111,8 @@ simulations/ mlda_analysis/ References/ -dataAnalysis/* -!dataAnalysis/chess/ -dataAnalysis/chess/*.fen +dataAnalysis/chess/*.Rdata +dataAnalysis/Classification of EEG/ *.csv *.csv.log diff --git a/tensorPredictors/NAMESPACE b/tensorPredictors/NAMESPACE index 4a1f73d..47be8e1 100644 --- a/tensorPredictors/NAMESPACE +++ b/tensorPredictors/NAMESPACE @@ -58,7 +58,6 @@ export(kpir.momentum) export(kpir.new) export(kronperm) export(mat) -export(matProj) export(matpow) export(matrixImage) export(mcov) diff --git a/tensorPredictors/R/TSIR.R b/tensorPredictors/R/TSIR.R index fcb78a5..cae0d07 100644 --- a/tensorPredictors/R/TSIR.R +++ b/tensorPredictors/R/TSIR.R @@ -109,5 +109,5 @@ TSIR <- function(X, y, d, sample.axis = 1L, # reductions matrices `Omega_k^-1 Gamma_k` where there (reverse) kronecker # product spans the central tensor subspace (CTS) estimate - Map(solve, Omegas, Gammas) + structure(Map(solve, Omegas, Gammas), mcov = Omegas, Gammas = Gammas) } diff --git a/tensorPredictors/R/gmlm_tensor_normal.R b/tensorPredictors/R/gmlm_tensor_normal.R index 9aec59b..e3da781 100644 --- a/tensorPredictors/R/gmlm_tensor_normal.R +++ b/tensorPredictors/R/gmlm_tensor_normal.R @@ -61,8 +61,14 @@ gmlm_tensor_normal <- function(X, F, sample.axis = length(dim(X)), # Residuals R <- X - mlm(F, Map(`%*%`, Sigmas, betas)) + # Numerically more stable version of `sum(log(mapply(det, Omegas)) / dimX)` + # which is itself equivalent to `log(det(Omega)) / prod(nrow(Omega))` where + # `Omega <- Reduce(kronecker, rev(Omegas))`. + det.Omega <- sum(mapply(function(Omega) { + sum(log(eigen(Omega, TRUE, TRUE)$values)) + }, Omegas) / dimX) # Initial value of the log-likelihood (scaled and constants dropped) - loss <- mean(R * mlm(R, Omegas)) - sum(log(mapply(det, Omegas)) / dimX) + loss <- mean(R * mlm(R, Omegas)) - det.Omega # invoke the logger if (is.function(logger)) do.call(logger, list(