library(tensorPredictors) library(mvbernoulli) set.seed(141421356, "Mersenne-Twister", "Inversion", "Rejection") ### simulation configuration reps <- 100 # number of simulation replications max.iter <- 1000 # maximum number of iterations for GMLM n <- 100 # sample sizes `n` N <- 2000 # validation set size p <- c(4, 4) # preditor dimensions (ONLY 4 by 4 allowed!) q <- c(2, 2) # response dimensions (ONLY 2 by 2 allowed!) r <- length(p) # parameter configuration rho <- -0.55 c1 <- 1 c2 <- 1 # initial consistency checks stopifnot(exprs = { r == 2 all.equal(p, c(4, 4)) all.equal(q, c(2, 2)) }) ### small helpers # 270 deg matrix layout rotation (90 deg clockwise) rot270 <- function(A) t(A)[, rev(seq_len(nrow(A))), drop = FALSE] # Auto-Regression Covariance Matrix AR <- function(rho, dim) rho^abs(outer(seq_len(dim), seq_len(dim), `-`)) # Inverse of the AR matrix AR.inv <- function(rho, dim) { A <- diag(c(1, rep(rho^2 + 1, dim - 2), 1)) A[abs(.row(dim(A)) - .col(dim(A))) == 1] <- -rho A / (1 - rho^2) } # projection matrix `P_A` as a projection onto the span of `A` proj <- function(A) tcrossprod(A, A %*% solve(crossprod(A, A))) ### setup Ising parameters (to get reasonable parameters) eta1 <- 0 # alphas <- Map(function(pj, qj) { # qj ignored, its 2 # linspace <- seq(-1, 1, length.out = pj) # matrix(c(linspace, rev(linspace)), pj, 2) # }, p, q) alphas <- Map(function(pj, qj) { # qj ignored, its 2 linspace <- seq(-1, 1, length.out = pj) matrix(c(linspace, linspace^2), pj, 2) }, p, q) # alphas <- Map(function(pj, qj) { # qr.Q(qr(matrix(rnorm(pj * qj), pj, qj))) # }, p, q) Omegas <- Map(AR, dim = p, MoreArgs = list(rho)) # data sampling routine sample.data <- function(n, eta1, alphas, Omegas, sample.axis = r + 1L) { # generate response (sample axis is last axis) y <- runif(n, -1, 1) # Y ~ U[-1, 1] Fy <- rbind(cos(pi * y), sin(pi * y), -sin(pi * y), cos(pi * y)) dim(Fy) <- c(2, 2, n) # natural exponential family parameters eta_y1 <- c1 * (mlm(Fy, alphas) + c(eta1)) eta_y2 <- c2 * Reduce(`%x%`, rev(Omegas)) # conditional Ising model parameters theta_y <- matrix(rep(vech(eta_y2), n), ncol = n) ltri <- which(lower.tri(eta_y2, diag = TRUE)) diagonal <- which(diag(TRUE, nrow(eta_y2))[ltri]) theta_y[diagonal, ] <- eta_y1 # Sample X from conditional distribution X <- apply(theta_y, 2, ising_sample, n = 1) # convert (from compressed integer vector) to array data attr(X, "p") <- prod(p) X <- t(as.mvbmatrix(X)) dim(X) <- c(p, n) storage.mode(X) <- "double" # permute axis to requested get the sample axis if (sample.axis != r + 1L) { perm <- integer(r + 1L) perm[sample.axis] <- r + 1L perm[-sample.axis] <- seq_len(r) X <- aperm(X, perm) Fy <- aperm(Fy, perm) } list(X = X, Fy = Fy, y = y, sample.axis = sample.axis) } ### sample (training) data c(X, Fy, y, sample.axis) %<-% sample.data(n, eta1, alphas, Omegas) ### Fit data using GMLM with logging # logger to log iterative change in the estimation process of GMLM # log <- data.frame() log.likelihood <- tensorPredictors:::make.gmlm.family("ising")$log.likelihood B.true <- Reduce(`%x%`, rev(alphas)) logger <- function(iter, eta1.est, alphas.est, Omegas.est) { B.est <- Reduce(`%x%`, rev(alphas.est)) err.alphas <- mapply(dist.subspace, alphas, alphas.est, MoreArgs = list(normalize = TRUE)) err.Omegas <- mapply(norm, Map(`-`, Omegas, Omegas.est), MoreArgs = list(type = "F")) if (iter > 0) { cat("\033[9A") } cat(sprintf("\n\033[2mIter: loss - dist\n\033[0m%4d: %8.3f - %8.3f", iter, log.likelihood(X, Fy, eta1.est, alphas.est, Omegas.est), dist.subspace(B.true, B.est, normalize = TRUE) ), "\033[2mMSE eta1\033[0m", mean((eta1 - eta1.est)^2), "\033[2msubspace distances of alphas\033[0m", do.call(paste, Map(sprintf, err.alphas, MoreArgs = list(fmt = "%8.3f"))), "\033[2mFrob. norm of Omega differences\033[0m", do.call(paste, Map(sprintf, err.Omegas, MoreArgs = list(fmt = "%8.3f"))), sep = "\n " ) } # now call the GMLM fitting routine with performance profiling tryCatch({ system.time( # profvis::profvis( fit.gmlm <- GMLM.default( X, Fy, sample.axis = sample.axis, max.iter = max.iter, family = "ising", logger = logger ) ) }, error = function(ex) { print(ex) traceback() })