View source: R/functions_burninthining.R
1 | gridsearch_burninthining_single(partition, theta, nodes, effects, objects, num.steps, neighborhoods, sizes.allowed, sizes.simulated, max.thining, parallel = F, cpus = 1)
|
partition |
Observed partition. |
theta |
Initial model parameters. |
nodes |
Data frame containing the nodes. |
effects |
Effects or sufficient statistics. A list with a vector "names" and a vector "objects". |
objects |
Objects used for statistics calculation. A list with a vector "name" and a vector "object". |
num.steps |
Number of samples wanted. |
neighborhoods |
List of probability vectors (probability actors swap, probability merge/division, probability single actor move). |
sizes.allowed |
Vector of group sizes allowed in sampling (now, it only works for vectors like size_min:size_max). |
sizes.simulated |
Vector of group sizes allowed in the Markov chain but not necessraily sampled (now, it only works for vectors like size_min:size_max). |
parallel |
To run different neighborhoods in parallel. Default = False |
cpus |
Default = 1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | ##---- Should be DIRECTLY executable !! ----
##-- ==> Define data, use random,
##-- or do help(data=index) for the standard data sets.
## The function is currently defined as
function (partition, theta, nodes, effects, objects, num.steps,
neighborhoods, sizes.allowed, sizes.simulated, max.thining,
parallel = F, cpus = 1)
{
if (parallel) {
n <- ceiling(length(neighborhoods)/cpus)
subindexes <- list()
for (c in 1:cpus) {
start <- (c - 1) * n + 1
end <- c * n
if (c == cpus)
end <- length(neighborhoods)
subindexes[[c]] <- start:end
}
sfExport("partition", "theta", "nodes", "effects", "objects",
"num.steps", "neighborhoods", "sizes.allowed", "sizes.simulated",
"max.thining", "subindexes")
res <- sfLapply(1:cpus, fun = function(k) {
subres <- list()
for (i in 1:length(subindexes[[k]])) {
index <- subindexes[[k]][i]
subneighborhood <- neighborhoods[[index]]
subres[[i]] <- simulate_burninthining_single(partition,
theta, nodes, effects, objects, num.steps,
subneighborhood, sizes.allowed, sizes.simulated,
max.thining)
}
return(subres)
})
allsimulations <- list()
for (c in 1:cpus) allsimulations <- append(allsimulations,
res[[c]])
}
else {
allsimulations <- list()
for (i in 1:length(neighborhoods)) {
allsimulations[[i]] <- simulate_burninthining_single(partition,
theta, nodes, effects, objects, num.steps, neighborhoods[[i]],
sizes.allowed, sizes.simulated, max.thining)
}
}
p1 <- c()
p2 <- c()
p3 <- c()
reach0.4 <- c()
firstthining0.4 <- c()
reach0.5 <- c()
firstthining0.5 <- c()
finalmaxautocor <- c()
for (i in 1:length(neighborhoods)) {
p1 <- c(p1, neighborhoods[[i]][1])
p2 <- c(p2, neighborhoods[[i]][2])
p3 <- c(p3, neighborhoods[[i]][3])
allmaxautocors <- apply(allsimulations[[i]]$autocorrelations.smoothed,
1, FUN = max)
finalmaxautocor <- c(finalmaxautocor, allmaxautocors[max.thining])
reach0.4 <- c(reach0.4, sum(allmaxautocors <= 0.4, na.rm = T))
reach0.5 <- c(reach0.5, sum(allmaxautocors <= 0.5, na.rm = T))
if (length(which(allmaxautocors <= 0.4)) > 0) {
firstthining0.4 <- c(firstthining0.4, min(which(allmaxautocors <=
0.4)))
}
else {
firstthining0.4 <- c(firstthining0.4, -1)
}
if (length(which(allmaxautocors <= 0.5)) > 0) {
firstthining0.5 <- c(firstthining0.5, min(which(allmaxautocors <=
0.5)))
}
else {
firstthining0.5 <- c(firstthining0.5, -1)
}
}
results.search <- data.frame(p_swap = p1, p_mergediv = p2,
p_single = p3, final_max_autocorr = finalmaxautocor,
reach_autocorr0.4 = reach0.4, first_thining_autocorr0.4 = firstthining0.4,
reach_autocorr0.5 = reach0.5, first_thining_autocorr0.5 = firstthining0.5)
return(list(results.search = results.search, all.simulations = allsimulations))
}
|
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.