R/overlap.R

Defines functions calculate_omega_constraint calculate_omega_overlap vertex_detection intersection_vertex_detection inside_vertex_detection generate_span_vectors normalize calculate_omega interaction_matrix_random

Documented in calculate_omega calculate_omega_constraint calculate_omega_overlap generate_span_vectors inside_vertex_detection interaction_matrix_random intersection_vertex_detection normalize vertex_detection

# nolint start
#' function that generates a random matrix (as in May, Nature (1972))
#' @param num number of species
#' @param stren standard deviation of interaction strength
#' @param conne connectance of the interaction matrix
#' @return the generated random matrix
#' @importFrom stats rnorm
#' @export
interaction_matrix_random <- function(num, stren, conne) {
  Inte <- rnorm(num * num, mean = 0, sd = stren)
  zeroes <- sample(c(rep.int(1, floor(num * num * conne)), rep.int(0, (num * num - floor(num * num * conne)))))
  Inte[which(zeroes == 0)] <- 0
  Inte <- matrix(Inte, ncol = num, nrow = num)
  diag(Inte) <- -1
  return(Inte)
}

#' function that computes the normalized feasibility from an interaction matrix
#' @import geometry
#' @import uniformly
#' @import mvtnorm
#' @importFrom stats runif
#' @param vertex all the vertexes of the feasibility domain
#' @param raw TRUE: raw omega, FALSE: normalized omega
#' @param nsamples number of sampled points
#' @param method either 'convex hull' or 'sphere'
#' @return the normalized feasibility
#' @export
calculate_omega <- function(vertex, raw = FALSE, nsamples = 100,
                            method = "convex_hull") {
  num <- nrow(vertex)
  vertex <- generate_span_vectors(vertex)

  if (method == "convex_hull") {
    set.seed(1010)
    vertex <- cbind(
      vertex,
      vertex %*% t(abs(runif_on_sphere(n = nsamples, d = ncol(vertex), r = 1)))
    )
    if (num < 5) {
      vertex <- generate_span_vectors(vertex) %*% diag(
        runif(
          ncol(vertex),
          (1 - .05 * (num - 2)),
          (1 + .05 * (num - 2))
        )
      )
    } else {
      vertex <- generate_span_vectors(vertex) %*% diag(
        runif(
          ncol(vertex),
          (1 - .05 * (num - 2)),
          (1 + .1 * (num - 2))
        )
      )
    }

    vertex <- cbind(vertex, rep(0, num))

    vol_ori <- (convhulln(t(vertex), output.options = TRUE)$vol)
    vol_ball <- (pi^(num / 2) / gamma(num / 2 + 1))
    # vol_ball <- calculate_omega(diag(num), nsamples = nsamples)

    omega <- ifelse(raw == FALSE,
      (vol_ori / vol_ball)^(1 / num),
      vol_ori / vol_ball
    )
  }
  if (method == "sphere") {
    m <- matrix(0, num, 1)
    a <- matrix(0, num, 1)
    b <- matrix(Inf, num, 1)
    d <- pmvnorm(
      lower = rep(0, num),
      upper = rep(Inf, num),
      mean = rep(0, num), sigma = solve(t(vertex) %*% vertex)
    )
    omega <- ifelse(raw == FALSE,
      d[1]^(1 / num),
      d[1]
    )
  }
  omega
}

#' function that normalizes a vector in the L2 norm
#' @param a the original vector
#' @param norm specify L^n norm
#' @return the normalized vector
#' @export
normalize <- function(a, norm = 2) {
  a / sqrt(sum(a^2))
}

#' function that normalizes the spanning vectors of the feasibility domain in the L2 norm
#' @param alpha interaction matrix
#' @return the normalized spanning vectors
#' @export
generate_span_vectors <- function(alpha) {
  apply(alpha, 2, function(x) normalize(x))
}

#' function that computes all the extreme points that belong to original vertexes
#' @param A one interaction matrix
#' @param B another interaction matrix
#' @return all the extreme points that belong to original vertexes
#' @export
inside_vertex_detection <- function(A, B) {
  SpanA <- generate_span_vectors(A)
  SpanB <- generate_span_vectors(B)
  # to determine whether a vertex of one cone is inside another cone or not.
  inside_detection <- function(Span, vector) {
    lambda <- solve(Span, vector)
    if (sum(lambda >= -1e-10) == length(lambda)) {
      return(1)
    } else {
      return(0)
    }
  }
  inside_vertex <- list()
  l <- 1
  for (i in 1:ncol(B)) {
    auxi <- inside_detection(SpanA, SpanB[, i])
    if (auxi == 1) {
      inside_vertex[[l]] <- SpanB[, i]
      l <- l + 1
    }
  }
  for (i in 1:ncol(A)) {
    auxi <- inside_detection(SpanB, SpanA[, i])
    if (auxi == 1) {
      inside_vertex[[l]] <- SpanA[, i]
      l <- l + 1
    }
  }
  return(inside_vertex)
}

#' function that computes all the extreme points generated that are generated by the intersections of the cones
#' @param S one interaction matrix
#' @param M another interaction matrix
#' @importFrom utils combn
#' @importFrom pracma nullspace Rank
#' @return all the extreme points that are generated by the intersections of the cones
#' @export
intersection_vertex_detection <- function(S, M) {
  num <- ncol(S)
  if (num == 2) {
    return(list())
  } else {
    combination_S <- combn(1:ncol(S), 2)
    combination_M <- combn(1:ncol(S), (num - 1))
    Span_S <- generate_span_vectors(S)
    Span_M <- generate_span_vectors(M)

    border_M <- list()
    extreme_point_M <- list()
    for (i in 1:ncol(M)) {
      coeff_matrix <- matrix(0, ncol = num, nrow = num - 1)
      for (j in 1:(num - 1)) {
        coeff_matrix[j, ] <- Span_M[, combination_M[j, i]]
      }
      if (Rank(coeff_matrix) == (num - 1)) {
        border_M[[i]] <- nullspace(coeff_matrix)
      } else {
        stop("border_M_i is denegerated. Check the dimension.")
      }
      extreme_point_M[[i]] <- t(coeff_matrix)[1:(num - 1), 1:(num - 1)]
    }

    inside_face_detection <- function(extreme_point, test_vector) {
      lambda <- solve(extreme_point, test_vector)
      if (sum(lambda >= -1e-10) == length(lambda)) {
        return(1)
      } else {
        return(0)
      }
    }

    l <- 1
    intersection_vertex <- list()
    side <- c()
    for (i in 1:ncol(combination_S)) {
      vertex_1 <- Span_S[, combination_S[1, i]]
      vertex_2 <- Span_S[, combination_S[2, i]]
      for (j in 1:length(border_M)) {
        n1 <- sum(vertex_1 * border_M[[j]])
        n2 <- sum(vertex_2 * border_M[[j]])

        auxi <- n1 * n2
        if (auxi < -1e-10) {
          lambda <- n2 / (n2 - n1)
          possible <- lambda * vertex_1 + (1 - lambda) * vertex_2

          if (det(extreme_point_M[[j]]) != 0) {
            auxi2 <- inside_face_detection(extreme_point_M[[j]], possible[1:(num - 1)])
            if (auxi2 == 1) {
              intersection_vertex[[l]] <- possible
              side[l] <- j
              l <- l + 1
            }
          }
        }
      }
    }

    if (length(intersection_vertex) > 0) {
      for (i in 1:length(intersection_vertex)) {
        intersection_vertex[[i]] <- normalize(intersection_vertex[[i]])
      }
    }
  }

  return(intersection_vertex)
}

#' function that computes all the extreme points
#' @import dplyr
#' @param A one interaction matrix
#' @param B another interaction matrix
#' @return all the extreme points that generate the intersection region
#' @export
vertex_detection <- function(A, B) {
  num <- ncol(A)
  inside_vertex <- inside_vertex_detection(A, B)
  intersection_vertex <- intersection_vertex_detection(A, B)

  # combine the two vertex lists
  if (length(inside_vertex) > 0) {
    vertex <- matrix(unlist(inside_vertex), nrow = num, byrow = FALSE)
  } else {
    vertex <- matrix(0, nrow = num, ncol = 2)
  }
  if (length(intersection_vertex) > 0) {
    vertex <- cbind(vertex, matrix(unlist(intersection_vertex), nrow = num, byrow = FALSE))
  }

  # delete the points that are nonzero due to numerical error
  delete_zeroes <- c()
  for (i in 1:ncol(vertex)) {
    if (near(sum(vertex[, i]^2), 0)) {
      delete_zeroes <- c(delete_zeroes, i)
    }
  }
  if (length(delete_zeroes) > 0) vertex <- vertex[, -delete_zeroes]


  # delete the same ones
  if (length(vertex) > num) {
    for (test in 1:ncol(vertex)) {
      vertex[, test] <- normalize(vertex[, test])
    }
    delete_duplicates <- c()
    for (i in 1:(ncol(vertex) - 1)) {
      for (j in (i + 1):ncol(vertex)) {
        if (sum(near(vertex[, i], vertex[, j])) == nrow(vertex)) {
          delete_duplicates <- c(delete_duplicates, j)
        }
      }
    }
    if (length(delete_duplicates) > 0) vertex <- vertex[, -unique(delete_duplicates)]
  }
  return(vertex)
}

#' function that computes the overlap of two feasibility domains
#' @param A one interaction matrix
#' @param B another interaction matrix
#' @param raw TRUE: raw omega, FALSE: normalized omega
#' @param nsamples number of sampled points
#' @return the normalize feasibility of the intersection region
#' @export
calculate_omega_overlap <- function(A, B, raw = FALSE, nsamples = 100) {
  num <- nrow(A)

  overlap_vertex <- vertex_detection(A, B) %>%
    cbind(vertex_detection(B, A)) %>%
    unique(MARGIN = 2)

  if (qr(overlap_vertex)$rank < num) {
    volume_overlap <- 0
  } else {
    volume_overlap <- tryCatch(
      {
        calculate_omega(overlap_vertex, raw, nsamples)
      },
      error = function(cond) {
        0
      }
    )
  }

  volume_overlap
}

#' function that computes the overlap of two feasibility domains
#' @param A one interaction matrix
#' @param B the constraint matrix
#' @param raw TRUE: raw omega, FALSE: normalized omega
#' @param nsamples number of sampled points
#' @import purrr
#' @return the normalize feasibility of the intersection region
#' @export
calculate_omega_constraint <- function(A, B, raw = FALSE, nsamples) {
  num <- nrow(A)
  sign <- -diag(B)

  test_feasibility <- function(A) {
    r <- rnorm(num)
    r <- r / sqrt(sum(r^2))
    r <- abs(r) * sign
    N <- -solve(A, r)
    ifelse(sum(N < 0) == 0, 1, 0)
  }

  if (missing(nsamples)) {
    nsamples <- 2^num * 250
  }

  feasibility <- 1:nsamples %>%
    map_dbl(~ test_feasibility(A))

  volume_overlap <- ifelse(raw,
    mean(feasibility),
    mean(feasibility)^(1 / num)
  )

  volume_overlap * calculate_omega(B, method = 'sphere')
}
clsong/feasoverlap documentation built on Jan. 1, 2023, 7:10 a.m.