add: custom metrics support,

add: MAVE subspace distance measure
This commit is contained in:
Daniel Kapla 2021-09-07 12:55:51 +02:00
rodzic 69a008535b
commit c3757cb055
3 zmienionych plików z 69 dodań i 13 usunięć

Wyświetl plik

@ -4,6 +4,7 @@ S3method(coef,nnsdr)
S3method(summary,nnsdr)
export(dataset)
export(dist.grassmann)
export(dist.mave)
export(dist.subspace)
export(get.script)
export(nnsdr)
@ -12,6 +13,7 @@ export(reinitialize_weights)
export(reset_optimizer)
exportClasses(nnsdr)
import(methods)
import(reticulate)
import(stats)
import(tensorflow)
importFrom(stats,rbinom)

27
NNSDR/R/dist_mave.R Normal file
Wyświetl plik

@ -0,0 +1,27 @@
#' Subspace distance mentioned in [Xia et al, 2002] (first MAVE paper).
#'
#' @param A,B Basis matrices (assumed full rank) as representations of elements
#' of the Grassmann manifold.
#' @param is.ortho Boolean to specify if `A` and `B` are semi-orthogonal. If
#' false, a QR decomposition is used to orthogonalize both `A` and `B`.
#'
#' @seealso
#' Y. Xia and H. Tong and W.K. Li and L. Zhu (2002) "An adaptive estimation of
#' dimension reduction space" <DOI:10.1111/1467-9868.03411>
#'
#' @export
dist.mave <- function(A, B, is.ortho = FALSE) {
if (!is.matrix(A)) A <- as.matrix(A)
if (!is.matrix(B)) B <- as.matrix(B)
if (!is.ortho) {
A <- qr.Q(qr(A))
B <- qr.Q(qr(B))
}
if (ncol(A) < ncol(B)) {
norm((diag(nrow(A)) - tcrossprod(B, B)) %*% A, 'F')
} else {
norm((diag(nrow(A)) - tcrossprod(A, A)) %*% B, 'F')
}
}

Wyświetl plik

@ -10,18 +10,20 @@ Sys.setenv(TF_CPP_MIN_LOG_LEVEL = "3")
#' @param add_reduction TODO:
#' @param hidden_units TODO:
#' @param activation TODO:
#' @param output_activation TODO:
#' @param dropout TODO:
#' @param loss TODO:
#' @param optimizer TODO:
#' @param metrics TODO:
#' @param trainable_reduction TODO:
#'
#' @import tensorflow
#' @import reticulate tensorflow
#' @keywords internal
build.MLP <- function(input_shapes, d, name, add_reduction,
output_shape = 1L,
hidden_units = 512L,
activation = 'relu',
output_activation = NULL,
dropout = 0.4,
loss = 'MSE',
optimizer = 'RMSProp',
@ -63,9 +65,25 @@ build.MLP <- function(input_shapes, d, name, add_reduction,
if (dropout > 0)
out <- K$layers$Dropout(rate = dropout, name = paste0('dropout', i))(out)
}
out <- K$layers$Dense(units = output_shape, name = 'output')(out)
out <- K$layers$Dense(units = output_shape, activation = output_activation,
name = 'output')(out)
mlp <- K$models$Model(inputs = inputs, outputs = out, name = name)
if (!is.null(metrics)) {
metrics <- as.list(metrics)
for (i in seq_along(metrics)) {
metric <- metrics[[i]]
if (all(c("nnsdr.metric", name) %in% class(metric))) {
metric_fn <- reticulate::py_func(metric(mlp))
reticulate::py_set_attr(metric_fn, "__name__", attr(metric, "name"))
metrics[[i]] <- metric_fn
} else if ("nnsdr.metric" %in% class(metric)) {
metrics[[i]] <- NULL # Drop
}
}
}
mlp$compile(loss = loss, optimizer = optimizer, metrics = metrics)
mlp
@ -95,18 +113,27 @@ nnsdr <- setRefClass('nnsdr',
if (is.null(.self$history.opg))
return(NULL)
history <- data.frame(
.self$history.opg,
model = factor('OPG'),
epoch = seq_len(nrow(.self$history.opg))
)
if (!is.null(.self$history.ref))
history <- rbind(history, data.frame(
if (is.null(.self$history.ref)) {
history <- data.frame(
.self$history.opg,
model = factor('OPG')
)
} else {
hist.opg <- data.frame(
.self$history.opg,
model = factor('OPG')
)
hist.ref <- data.frame(
.self$history.ref,
model = factor('Refinement'),
epoch = seq_len(nrow(.self$history.ref))
))
model = factor('Refinement')
)
# Augment mutualy exclusive columns
hist.opg[setdiff(names(hist.ref), names(hist.opg))] <- NA
hist.ref[setdiff(names(hist.opg), names(hist.ref))] <- NA
# Combine/Bind
history <- rbind(hist.opg, hist.ref)
}
history$epoch <- seq_len(nrow(history))
history
}