context("entropy and mutual information function")
test_that("entropy function works", {
entropyR = function(x) {
y = x / sum(x)
-1.0 * sum(ifelse(y == 0, 0, y * log(y)))
}
for (i in seq.int(10)) {
x = runif(10)
expect_equal(entropyR(x), .entropy(x))
# missing values
expect_true(is.na(.entropy(c(x, NA))))
expect_identical(.entropy(x), .entropy(c(x, NA), na_rm = T))
}
expect_error(.entropy(c(x, -1)))
expect_identical(.entropy(x), .entropy(c(x, 0)))
})
test_that("mutual information function works", {
x = matrix(sample.int(3, 500, replace = T), ncol = 2)
expect_gt(mutual_info(table(x[, 1], x[, 2])), 0.0)
for (i in seq.int(3)) {
x = matrix(sample.int(1000, 500, replace = T), ncol = 4)
expect_gt(mutual_info(x), 0.0)
x2 = x; x2[1,1] = .1
expect_error(mutual_info(x2))
x2 = x; x2[1,1] = -1L
expect_error(mutual_info(x2))
# check dominance between NMI
old_m = 0.0; old_adj = -100
for (type in c("max", "arithmetic", "geometric", "harmonic",
"min", "none")) {
m = mutual_info(x, normalized = type, adjust = FALSE)
a = mutual_info(x, normalized = type, adjust = TRUE)
expect_gt(m, old_m)
expect_gt(a, old_adj)
expect_gt(m, a)
old_m = m
old_adj = a
}
}
# for (type in c("min", "max", "sqrt", "sum")) {
#
# type2 = switch(type,
# min = "min",
# max = "max",
# sqrt = "geometric",
# sum = "arithmetic")
# m1 = aricode::NMI(x[,1], x[,2], type)
# m2 = mutual_info(table(x[,1], x[,2]),
# what = type2, adjusted = F)
#
# expect_equal(m1, m2)
#
# }
})
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.