phd_mixture_initialisation = function(varnames,dimSS, dimObs, radar_k, varepsilon_k, init_field = NULL){
if(is.null(init_field)){
J_0 = nrow(varepsilon_k)
P_tmp = as.matrix(varepsilon_k[,varnames])
P_0 = diag(diag(
t(P_tmp - colMeans(P_tmp)) %*% (P_tmp - colMeans(P_tmp))
))
if(any(diag(P_0)==0)){
diag(P_0)[diag(P_0)==0] = 1
}
zeta_new = NULL
for(j in 1:J_0){
mj_0 = unlist(varepsilon_k[j,varnames])
Pj_0 = P_0/J_0
wj_0 = 1
lj_0 = j
zeta_new[[j]] = list(m = mj_0, P = Pj_0, w = wj_0, l = lj_0)
}
} else {
zeta_new = init_field
}
return(list(zeta = zeta_new))
}
phd_mixture_birth = function(varnames,dimSS, dimObs, varepsilon_k, birth_field, P_B, tracks, position_only, maxLabel){
if(is.null(birth_field)){birthOption = "detected"} else {birthOption = "fixed"}
if(birthOption == "fixed"){
zeta_birth = birth_field
}
if(birthOption == "detected"){
Nbirth = nrow(varepsilon_k)
P_tmp = as.matrix(varepsilon_k[,varnames[1:dimObs]])
P_k = diag(diag(
t(P_tmp - colMeans(P_tmp)) %*% (P_tmp - colMeans(P_tmp))
))
if(position_only){
P_tmp = cbind(as.matrix(varepsilon_k[,varnames[1:dimObs]]),matrix(0, nrow = nrow(varepsilon_k), ncol = dimSS - dimObs))
P_k = diag(diag(
t(P_tmp - colMeans(P_tmp)) %*% (P_tmp - colMeans(P_tmp))
))
}
if(any(diag(P_k)==0)){
diag(P_k)[diag(P_k)==0] = 1
}
zeta_birth = list(NULL)
ib = 0
for(i in 1:Nbirth){
mi = unlist(varepsilon_k[i,varnames[1:dimObs]])
if(position_only){
mi = c(mi, rep(0, dimSS - dimObs))
}
ib = ib + 1
zeta_birth[[ib]] = list(m = mi,
P = P_k,
w = P_B,
lo = 0,
l = paste("new_target", maxLabel + ib),
status = "birth")
}
}
return(list(zeta = zeta_birth))
}
phd_mixture_prediction = function(varnames, dimSS, dimObs, radar_k, zeta,zeta_birth, T_s, sigma2_process, P_S, w, P_Sp, type){
J_km = length(zeta)
Fmat = Fmfun(type = type, dimSS = dimSS)(T_s, w)
if(is.vector(sigma2_process) & length(sigma2_process) > 1){
S2_process = diag(sigma2_process)
}else if(is.matrix(sigma2_process)){
S2_process =sigma2_process
}else if(is.vector(sigma2_process) & length(sigma2_process) == 1){
S2_process = diag(rep(sigma2_process,dimSS))
}else{
stop("You need to define an appropriate kinematic covariance")
}
Qmat = Qmfun(type = type, dimSS = dimSS)(T_s, w)
diag(Qmat) = diag(Qmat)*diag(S2_process)
# Existing targets:
zeta_existing = list(NULL)
if(J_km > 0){
for(j in 1:J_km){
mj_kkm = Fmat %*% zeta[[j]]$m
Pj_kkm = Qmat + Fmat %*% zeta[[j]]$P %*% t(Fmat)
wj_km = P_S*zeta[[j]]$w
lj_km = zeta[[j]]$l
zeta_existing[[j]] = list(m = mj_kkm, P = Pj_kkm, w = wj_km, l = lj_km, lo = lj_km, status = "exists")
}
}
# Spawning
zeta_spawning = list(NULL)
if(J_km > 0){
sp = 0
for(j in 1:J_km){
if(runif(1) < P_Sp){
sp = sp + 1
mj_sp = zeta[[j]]$m + matrix(mvtnorm::rmvnorm(1, rep(0, dimSS), zeta[[j]]$P*0.1), ncol = 1)
Pj_sp = zeta[[j]]$P
wj_sp = zeta[[j]]$w*P_Sp
lj_sp = paste("Spawned", zeta[[j]]$l, j)
zeta_spawning[[sp]] = list(m = mj_sp, P = Pj_sp, w = wj_sp, l = lj_sp, lo = zeta[[j]]$l, status = "spawned")
} else{next}
}
}
# Birth
zeta_new = plyr::compact(c(zeta_existing, zeta_spawning, zeta_birth))
return(list(zeta = zeta_new))
}
phd_mixture_update = function(varnames, dimSS, dimObs, varepsilon_k,zeta, zeta_h = NULL, kappa, sigma2_measure, P_D, position_only, estimate_velocity = FALSE, T_s){
H_k = diag(dimObs)
if(position_only){
H_k = cbind(H_k, matrix(0, nrow = nrow(H_k), ncol = dimSS - dimObs))
}
if(is.vector(sigma2_measure) & length(sigma2_measure) == dimObs){
R_k = diag(sigma2_measure)
}else if(is.matrix(sigma2_measure) & length(sigma2_measure) == dimObs^2){
R_k =sigma2_measure
}else if(is.vector(sigma2_measure) & length(sigma2_measure) == 1){
R_k = diag(rep(sigma2_measure,dimObs))
}else{
stop("You need to define an appropriate measurement covariance")
}
J_kkm = length(zeta)
w_kkm = unlist(lapply(zeta, function(z) z$w))
etaj_kkm = function(j) H_k %*% zeta[[j]]$m # [do x ds] x [ds x 1]
Sj_kkm = function(j) H_k %*% zeta[[j]]$P %*% t(H_k) + R_k # [do x ds] x [ds x ds] x [ds x do] + [do x do]
Kj_k = function(j) zeta[[j]]$P %*% t(H_k) %*% solve(Sj_kkm(j)) # [ds x ds] x [ds x do] x [do x do]
Pj_kk = function(j) (diag(dimSS) - Kj_k(j) %*% H_k) %*% zeta[[j]]$P # [[ds x ds] - [ds x do] x [do x ds]] x [ds x ds]
mj_kk = function(z,j) zeta[[j]]$m + Kj_k(j) %*% (z - H_k %*% zeta[[j]]$m) # [ds x 1] + [ds x do] x [[do x 1] - [do x ds] x [ds x 1]]
qj_k = function(z,j) mvtnorm::dmvnorm(c(z), etaj_kkm(j),Sj_kkm(j)) # [1]
# Update in case of miss-detection
zeta_nd = zeta
for(j in 1:J_kkm){
zeta_nd[[j]]$w = zeta[[j]]$w * (1-P_D)
zeta_nd[[j]]$wt = zeta[[j]]$w * (1-P_D)
zeta_nd[[j]]$measure = 0
zeta_nd[[j]]$status = "not detected"
}
# Update when there is a detection
zeta_ell = NULL
ell = 0
nMeasures = nrow(varepsilon_k)
for(im in 1:nMeasures){
p_k = as.matrix(varepsilon_k[im,varnames[1:dimObs]])
z = matrix(p_k, ncol = 1)
sumW = 0
for(j in 1:J_kkm){
well_k = P_D * zeta[[j]]$w * qj_k(z,j)
mell_k = mj_kk(z,j)
Pell_k = Pj_kk(j)
zeta_ell[[ell*J_kkm+j]] = list(m = mell_k, P = Pell_k, w = well_k, l = zeta[[j]]$l, lo = zeta[[j]]$lo, measure = im, status = zeta[[j]]$status)
sumW = sumW + well_k
}
}
zeta_new = plyr::compact(c(zeta_nd, zeta_ell))
listW= do.call(c, lapply(zeta_new, function(z) z$w))
idxm = do.call(c, lapply(zeta_new, function(z) z$measure))
for(idM in sort(unique(idxm))){
if(idM == 0){next}
sumw = sum(listW[idxm == idM]) + kappa
idxMM = which(idxm == idM)
for(zj in idxMM){
zeta_new[[zj]]$w = zeta_new[[zj]]$w / sumw
}
}
# idxl = do.call(c, lapply(zeta_new, function(z) z$l))
#
# for(idL in sort(unique(idxl))){
# if(idL == 0){next}
# sumw = sum(listW[idxl == idL]) + kappa
# idxLL = which(idxl == idL)
# for(zj in idxLL){
# zeta_new[[zj]]$w = zeta_new[[zj]]$w / sumw
# }
# }
# tree = data.frame(w= c(do.call(c, lapply(zeta_ell, function(z) z$w)),do.call(c, lapply(zeta, function(z) z$w))),
# l = c(do.call(c, lapply(zeta_ell, function(z) z$l)),do.call(c, lapply(zeta, function(z) z$l))),
# z= c(rep(1:nMeasures,each = length(zeta)), rep(0, length(zeta))))
# #
#
# ggplot(data = tree, aes(x = w, y = z)) + geom_point() + facet_wrap(~l, scales = "free")
#zeta_new = plyr::compact(zeta_ell)
return(list(zeta = zeta_new))
}
phd_component_pruning = function(varnames, dimSS, dimObs, zeta, tau, Jmax){
weight_list = do.call(c,lapply(zeta, function(z) z$w))
idx_keep <- unique(which(weight_list > tau,arr.ind = TRUE))
if(length(idx_keep)==0){
w_max = sort(weight_list, decreasing = TRUE)[ceiling(nrow(weight_list)/2)]
idx_keep <- unique(which(weight_list > w_max,arr.ind = TRUE))
}
if(length(idx_keep)>Jmax){
idx_keep <- sort(weight_list, decreasing = T, index.return = TRUE)$ix[1:Jmax]
}
zeta_new = zeta[idx_keep]
return(list(zeta = zeta_new))
}
phd_component_merging = function(varnames, dimSS, dimObs, zeta, U, mergingOption, tracks){
J_k = length(zeta)
wb_k = unlist(lapply(zeta, function(zz) zz$w))
w_k = unlist(lapply(zeta, function(zz) zz$w))
l_k = do.call(c, lapply(zeta, function(zz) zz$l))
if(mergingOption == "Panta2009"){
#wt_k = rep(0,J_k)
l = 0
zeta_new = NULL
used_l = NULL
I = 1:J_k
while(max(w_k) > 0 ){
l <- l + 1
j = which.max(w_k)
if(zeta[[j]]$measure == 0){
# No observation close enough, most likely missed target
observed = FALSE
L = j
}else{
observed = TRUE
L = intersect(which(unlist(lapply(zeta, function(x) t(x$m - zeta[[j]]$m) %*% solve(x$P) %*% (x$m - zeta[[j]]$m))) < U), I)
}
wL = do.call(rbind,lapply(zeta[L], function(z) z$w))
mL = do.call(cbind,lapply(zeta[L], function(z) z$m))
mt_k = mL %*% wL / sum(wL)
wt_k = sum(wL)
Pt_k = diag(rep(0,dimSS))
elabel = do.call(c, lapply(zeta[L], function(z) z$l))
label = zeta[[j]]$l
status = zeta[[j]]$status
for(ll in L){
Pt_k = Pt_k + wb_k[ll]/wt_k * (zeta[[ll]]$P + (zeta[[ll]]$m - mt_k) %*% t(zeta[[ll]]$m - mt_k))
}
w_k[L] <- -1
zeta_new[[l]] <- list(m = mt_k, P = Pt_k, w = wt_k, l = label, o = observed, measure = zeta[[j]]$measure, status = zeta[[j]]$status)
used_l = c(used_l, label)
I = setdiff(I, L)
}
zeta_new = plyr::compact(zeta_new)
}
if(mergingOption == "Vo2009"){
# Building Trees.
# For each tree, select the highest weighted branch as the confirmed track
treeList = do.call(c, lapply(zeta, function(z) z$l))
l = 0
zeta_new = NULL
for(tree in sort(unique(treeList))){
l = l + 1
zeta_tree = zeta[treeList == tree]
I = 1:length(zeta_tree)
weight_tree = do.call(c, lapply(zeta_tree, function(z) z$w))
measure_tree = do.call(c, lapply(zeta_tree, function(z) z$measure))
if(sum(weight_tree[measure_tree==0]) > sum(weight_tree[measure_tree!=0])){
# No observation close enough, most likely missed target
observed = FALSE
L = j
}else{
zeta_tree = zeta_tree[measure_tree !=0]
I = 1:length(zeta_tree)
weight_tree = do.call(c, lapply(zeta_tree, function(z) z$w))
measure_tree = do.call(c, lapply(zeta_tree, function(z) z$measure))
j = which.max(weight_tree)
observed = TRUE
L = intersect(which(unlist(lapply(zeta_tree, function(x) t(x$m - zeta_tree[[j]]$m) %*% solve(x$P) %*% (x$m - zeta_tree[[j]]$m))) < U), I)
}
wL = do.call(rbind,lapply(zeta_tree[L], function(z) z$w))
mL = do.call(cbind,lapply(zeta_tree[L], function(z) z$m))
mt_k = mL %*% wL / sum(wL)
wt_k = sum(wL)
Pt_k = diag(rep(0,dimSS))
label = zeta_tree[[j]]$l
for(ll in L){
Pt_k = Pt_k + wb_k[ll]/wt_k * (zeta_tree[[ll]]$P + (zeta_tree[[ll]]$m - mt_k) %*% t(zeta_tree[[ll]]$m - mt_k))
}
w_k[L] <- -1
zeta_new[[l]] <- list(m = mt_k, P = Pt_k, w = wt_k, l = label, o = observed, measure = zeta_tree[[j]]$measure, status = zeta_tree[[j]]$status)
}
zeta_new = plyr::compact(zeta_new)
}
return(zeta_new)
}
phd_mtt_state_number_estimation = function(varnames, dimSS, dimObs, zeta, wt, associationOption, maxLab, T_km){
if(associationOption == "Panta2009"){
idx_l = do.call(c, lapply(zeta, function(z) z$l))
id = 0
zeta_new = list(NULL)
for(ll in sort(unique(idx_l))){
id = id + 1
zeta_tmp = zeta[which(idx_l == ll)]
w_max = which.max(do.call(c, lapply(zeta_tmp, function(l) l$w)))
zeta_new[[id]] <- zeta_tmp[[w_max]]
}
} else if(associationOption == "Munkres"){
kt_l = do.call(c, lapply(zeta, function(z) z$l))
kt_m = do.call(c, lapply(zeta, function(z) z$measure))
kt_w = do.call(c, lapply(zeta, function(z) z$w))
Cost = matrix(0, nrow = max(kt_l), ncol = max(kt_m))
Cost[cbind(kt_l, kt_m)] = kt_w
reSM = solve_LSAP(Cost, maximum = TRUE)
zeta_new = list(NULL)
for(i in 1:max(kt_l)){
idxZ = intersect(which(kt_l == i), which(kt_m == reSM[[i]]))
zeta_new[[i]] <- zeta[[idxZ]]
}
} else if(associationOption == "Pollard2011") {
browser()
if(is.null(T_km)){
zeta_new = zeta
}
} else {
zeta_new = zeta
}
index_k = which(unlist(lapply(zeta_new, function(zz) zz$w)) > wt)
zeta_new = zeta_new[index_k]
labels = do.call(c, lapply(zeta_new, function(z) z$l))
idxBirth = which(labels == 0)
if(length(idxBirth)){
lB = 0
for(iB in idxBirth){
lB = lB + 1
zeta_new[[iB]]$l = maxLab + lB
}
}
return(list(zeta = zeta_new))
}
cphd_mixture_prediction = function(varnames, dimSS, dimObs, radar_k, zeta, nu, zeta_birth, T_s, sigma2_process,P_S, w, P_Sp, P_B, type, nMax){
J_km = length(zeta)
Fmat = Fmfun(type = type, dimSS= dimSS)(T_s, w)
if(is.vector(sigma2_process) & length(sigma2_process) > 1){
S2_process = diag(sigma2_process)
}else if(is.matrix(sigma2_process)){
S2_process =sigma2_process
}else if(is.vector(sigma2_process) & length(sigma2_process) == 1){
S2_process = diag(rep(sigma2_process,dimSS))
}else{
stop("You need to define an appropriate kinematic covariance")
}
Qmat = S2_process %*% Qmfun(type = type, dimSS = dimSS)(T_s, w)
# Existing targets:
zeta_existing = list(NULL)
if(J_km > 0){
for(j in 1:J_km){
mj_kkm = Fmat %*% zeta[[j]]$m
Pj_kkm = Qmat + Fmat %*% zeta[[j]]$P %*% t(Fmat)
wj_km = P_S*zeta[[j]]$w
lj_km = zeta[[j]]$l
zeta_existing[[j]] = list(m = mj_kkm, P = Pj_kkm, w = wj_km, l = lj_km, lo = lj_km, status = "exists")
}
}
# Spawning
zeta_spawning = list(NULL)
if(J_km > 0){
sp = 0
for(j in 1:J_km){
if(runif(1) < P_Sp){
sp = sp + 1
mj_sp = zeta[[j]]$m + matrix(mvtnorm::rmvnorm(1, rep(0, dimSS), zeta[[j]]$P*0.1), ncol = 1)
Pj_sp = zeta[[j]]$P
wj_sp = zeta[[j]]$w*P_Sp
lj_sp = zeta[[j]]$l + 0.1
zeta_spawning[[sp]] = list(m = mj_sp, P = Pj_sp, w = wj_sp, l = lj_sp, lo = zeta[[j]]$l, status = "spawned")
} else{next}
}
}
# Birth
zeta_new = plyr::compact(c(zeta_existing, zeta_spawning, zeta_birth))
#### Target number
nu_new_fun = function(n, nu){
p = 0
for(j in 0:n){
p = p + P_B^(n-j) * sum(sapply(j:nMax, function(k) choose(k,j))*nu[(j:nMax)+1] * P_S^(j) * (1-P_S)^(j:nMax - j))
}
return(p)
}
nu_new = sapply(0:nMax, nu_new_fun, nu = nu)
return(list(zeta = zeta_new, nu = nu_new))
}
cphd_mixture_update = function(varnames, dimSS, dimObs, varepsilon_k,zeta,nu, zeta_h = NULL, kappa, sigma2_measure, P_D, position_only, estimate_velocity = FALSE, T_s, radar_range, nMax, cutDist){
H_k = diag(dimObs)
if(position_only){
H_k = cbind(H_k, matrix(0, nrow = nrow(H_k), ncol = dimSS - dimObs))
}
if(is.vector(sigma2_measure) & length(sigma2_measure) == dimObs){
R_k = diag(sigma2_measure)
}else if(is.matrix(sigma2_measure) & length(sigma2_measure) == dimObs^2){
R_k =sigma2_measure
}else if(is.vector(sigma2_measure) & length(sigma2_measure) == 1){
R_k = diag(rep(sigma2_measure,dimObs))
}else{
stop("You need to define an appropriate measurement covariance")
}
J_kkm = length(zeta)
w_kkm = unlist(lapply(zeta, function(z) z$w))
etaj_kkm = function(j) H_k %*% zeta[[j]]$m # [do x ds] x [ds x 1]
Sj_kkm = function(j) H_k %*% zeta[[j]]$P %*% t(H_k) + R_k # [do x ds] x [ds x ds] x [ds x do] + [do x do]
Kj_k = function(j) zeta[[j]]$P %*% t(H_k) %*% solve(Sj_kkm(j)) # [ds x ds] x [ds x do] x [do x do]
Pj_kk = function(j) (diag(dimSS) - Kj_k(j) %*% H_k) %*% zeta[[j]]$P # [[ds x ds] - [ds x do] x [do x ds]] x [ds x ds]
mj_kk = function(z,j) zeta[[j]]$m + Kj_k(j) %*% (z - H_k %*% zeta[[j]]$m) # [ds x 1] + [ds x do] x [[do x 1] - [do x ds] x [ds x 1]]
qj_k = function(z,j) mvtnorm::dmvnorm(c(z), etaj_kkm(j),Sj_kkm(j)) # [1]
# Update in case of miss-detection
zeta_nd = zeta
for(j in 1:J_kkm){
zeta_nd[[j]]$w = zeta[[j]]$w * (1-P_D)
zeta_nd[[j]]$wt = zeta[[j]]$w * (1-P_D)
zeta_nd[[j]]$measure = 0
zeta_nd[[j]]$status = "not detected"
}
# Update when there is a detection
zeta_ell = NULL
ell = 0
nMeasures = nrow(varepsilon_k)
for(im in 1:nMeasures){
ell = ell + 1
p_k = as.matrix(varepsilon_k[im,varnames[1:dimObs]]) # Observed positions
z = matrix(p_k, ncol = 1)
sumW = 0
for(j in 1:J_kkm){
well_k = P_D * zeta[[j]]$w * qj_k(z,j)
mell_k = mj_kk(z,j)
Pell_k = Pj_kk(j)
dist2m_k = sqrt((mell_k[1:length(p_k)] - p_k) %*% t(mell_k[1:length(p_k)] - p_k))
zeta_ell[[ell*J_kkm+j]] = list(m = mell_k, P = Pell_k, w = well_k, l = zeta[[j]]$l, lo = zeta[[j]]$lo, measure = im, status = zeta[[j]]$status, dist2m = dist2m_k)
sumW = sumW + well_k
}
}
zeta_new = plyr::compact(c(zeta_nd, zeta_ell))
#### Target number
theta_z = NULL
for(im in 1:nMeasures){
p_k = as.matrix(varepsilon_k[im,varnames[1:dimObs]])
z = matrix(p_k, ncol = 1)
theta_z_tmp = P_D * radar_range^2 *sum(w_kkm *sapply(1:length(w_kkm), qj_k, z= z))
theta_z = c(theta_z, theta_z_tmp)
}
gamma_k = function(n,u,theta){
nM = length(theta)
res= 0
for(j in 0:min(nM, n)){
tmp1 = factorial(nM - j) * dpois(x = (nM -j), lambda = kappa * radar_range^2)
if((j+u)<=n){
tmp2 = factorial(n) / factorial(n - (j+u))
}else{tmp2=1}
tmp3 = (1-P_D)^(n-(j+u))*elf(theta = theta, j)
res = res + tmp1*tmp2*tmp3
}
res
}
gammaK = sapply(0:nMax, gamma_k, u = 0, theta = theta_z)
nu_new = nu * gammaK / sum(nu*gammaK)
#### Weight normalization
listW= do.call(c, lapply(zeta_new, function(z) z$w))
idxm = do.call(c, lapply(zeta_new, function(z) z$measure))
for(idM in sort(unique(idxm))){
#sumw = sum(listW[idxm == idM]) + kappa
idxMM = which(idxm == idM)
if(idM>0){thetaZ = theta_z[-idM]}else{next} #thetaZ = NULL}
gammaK1 = sapply(0:nMax, gamma_k, u = 1, theta = thetaZ)
for(zj in idxMM){
zeta_new[[zj]]$w = zeta_new[[zj]]$w * sum(gammaK1*nu) / sum(gammaK*nu) * radar_range^2
}
}
#idxD = which(do.call(c, lapply(zeta_new, function(z) z$dist2m)) < cutDist)
#
# zeta_new = zeta_new[idxD]
return(list(zeta = zeta_new, nu = nu_new))
}
elf = function(theta, order){
if(order == 0 | length(theta)==0){
res = 1
}else{
ntheta = length(theta)
S = combn(ntheta, order)
res = 0
for(i in 1:ncol(S)){
res = res + prod(theta[S[,i]])
}
}
return(res)
}
cphd_component_pruning_merging = function(varnames, dimSS, dimObs, zeta, nu, tau, U, Jmax, mergingOption){
weight_list = do.call(c,lapply(zeta, function(z) z$w))
idx_keep <- unique(which(weight_list > tau,arr.ind = TRUE))
if(length(idx_keep) == 0){
zeta_new = NULL
} else{
zeta_tmp = zeta[idx_keep]
J_k = length(zeta_tmp)
wb_k = unlist(lapply(zeta_tmp, function(zz) zz$w))
w_k = unlist(lapply(zeta_tmp, function(zz) zz$w))
l_k = do.call(c, lapply(zeta_tmp, function(zz) zz$l))
if(mergingOption == "Panta2009"){
l = 0
zeta_new = list(NULL)
used_l = NULL
I = 1:J_k
while(max(w_k) > 0 & l < Jmax){
l <- l + 1
j = which.max(w_k)
distZ = unlist(lapply(zeta_tmp, function(x) t(x$m - zeta_tmp[[j]]$m) %*% solve(x$P) %*% (x$m - zeta_tmp[[j]]$m)))
L = intersect(which(distZ < U), I)
wL = do.call(rbind,lapply(zeta_tmp[L], function(z) z$w))
mL = do.call(cbind,lapply(zeta_tmp[L], function(z) z$m))
mt_k = mL %*% wL / sum(wL)
wt_k = sum(wL)
Pt_k = diag(rep(0,dimSS))
elabel = do.call(c, lapply(zeta_tmp[L], function(z) z$l))
estatus = do.call(c, lapply(zeta_tmp[L], function(z) z$status))
emeasure = do.call(c, lapply(zeta_tmp[L], function(z) z$measure))
idxEX = which(estatus == "exists")
label = zeta_tmp[[j]]$l
if(length(idxEX)>0){
status = "exists"
measure = emeasure[idxEX[which.max(wL[idxEX])]]
} else{
status = zeta_tmp[[j]]$status
measure = zeta_tmp[[j]]$measure
}
for(ll in L){
Pt_k = Pt_k + wb_k[ll]/wt_k * (zeta_tmp[[ll]]$P + (zeta_tmp[[ll]]$m - mt_k) %*% t(zeta_tmp[[ll]]$m - mt_k))
}
w_k[L] <- -1
zeta_new[[l]] <- list(m = mt_k, P = Pt_k, w = wt_k, l = label, status = status, measure = measure)
used_l = c(used_l, label)
I = setdiff(I, L)
}
}
}
nu_new = nu
return(list(zeta = zeta_new, nu = nu_new))
}
cphd_mtt_state_number_estimation = function(varnames, dimSS, dimObs, zeta, nu, wt, associationOption, maxLab, T_km, P_D, kappa, P_B, cutDist){
if(associationOption == "Panta2009"){
idx_l = do.call(c, lapply(zeta, function(z) z$l))
id = 0
zeta_new = list(NULL)
for(ll in sort(unique(idx_l))){
id = id + 1
zeta_tmp = zeta[which(idx_l == ll)]
w_max = which.max(do.call(c, lapply(zeta_tmp, function(l) l$w)))
zeta_new[[id]] <- zeta_tmp[[w_max]]
}
index_k = which(unlist(lapply(zeta_new, function(zz) zz$w)) > wt)
zeta_new = zeta_new[index_k]
} else if(associationOption == "Munkres"){
kt_l = do.call(c, lapply(zeta, function(z) z$l))
kt_m = do.call(c, lapply(zeta, function(z) z$measure))
kt_w = do.call(c, lapply(zeta, function(z) z$w))
Cost = matrix(0, nrow = max(kt_l), ncol = max(kt_m))
Cost[cbind(kt_l, kt_m)] = kt_w
reSM = solve_LSAP(Cost, maximum = TRUE)
zeta_new = list(NULL)
for(i in 1:max(kt_l)){
idxZ = intersect(which(kt_l == i), which(kt_m == reSM[[i]]))
zeta_new[[i]] <- zeta[[idxZ]]
}
} else if(associationOption == "Pollard2011") {
if(is.null(T_km)){
Nhat = which.max(nu*(0:(length(nu)-1)))-1
list_w = do.call(c, lapply(zeta, function(z) z$w))
idx_new = sort(list_w, decreasing = TRUE, index.return = TRUE)$ix[1:Nhat]
zeta_new = zeta[sort(idx_new)]
for(j in 1:length(idx_new)){
zeta_new[[j]]$l = j
zeta_new[[j]]$score <- 23025851*(7-log(P_D / kappa))
}
} else{
T_km_tmp = T_km[T_km$status != "clutter",]
T_km_tmp$status[T_km_tmp$status %in% c("birth", "init")] <- "exists"
Nhat = which.max(nu*(0:(length(nu)-1)))-1
list_w = do.call(c, lapply(zeta, function(z) z$w))
list_m = do.call(c, lapply(zeta, function(z) z$measure))
# Distance Gaussienne / Track
list_gm = t(do.call(cbind, lapply(zeta, function(z) z$m[1:dimObs])))
D = as.matrix(dist(rbind(list_gm, as.matrix(T_km_tmp[,varnames[1:dimObs]]))))[(length(list_w)+1):(length(list_w)+nrow(T_km_tmp)),1:length(list_w)]
A = matrix(D<cutDist, ncol = length(list_m))
W = matrix(list_w, nrow = nrow(A), ncol = ncol(A), byrow= TRUE)*A
C = matrix(NA, nrow = nrow(A), ncol = ncol(A))
Cg = matrix(NA, nrow = nrow(A), ncol = ncol(A))
for(i in 1:nrow(C)){
for(j in 1:ncol(C)){
g_ij = mvtnorm::dmvnorm(c(zeta[[j]]$m), matrix(unlist(T_km_tmp[i,varnames]), ncol = 1),zeta[[j]]$P)
if(A[i,j]==1){
Cg[i,j] = -log(P_D / kappa * g_ij)
} else{Cg[i,j] = 0}
C[i,j] = -log(P_D / kappa * g_ij)
}
}
if(Nhat > nrow(T_km_tmp) & P_B > 0){
W = rbind(W, matrix(1, ncol = ncol(W), nrow = Nhat - nrow(T_km_tmp)))
A = rbind(A, matrix(1, ncol = ncol(A), nrow = Nhat - nrow(T_km_tmp)))
C = rbind(C, matrix(log(P_D * P_B / kappa), ncol = ncol(C), nrow = Nhat - nrow(T_km_tmp)))
Cg = rbind(Cg, matrix(log(P_D * P_B / kappa), ncol = ncol(Cg), nrow = Nhat - nrow(T_km_tmp)))
T_km_tmp <- bind_rows(T_km_tmp, data.frame(score = rep(0, Nhat - nrow(T_km_tmp)),
target_id = "0", status = "birth"))
}
if(nrow(W) > ncol(W)){
idxShort = sort(T_km_tmp$score,index.return = TRUE)
C = as.matrix(C[-idxShort$ix[1:(nrow(W)- ncol(W))],])
Cg = as.matrix(Cg[-idxShort$ix[1:(nrow(W)- ncol(W))],])
A = as.matrix(A[-idxShort$ix[1:(nrow(W)- ncol(W))],])
T_km_tmp = T_km_tmp[-idxShort$ix[1:(nrow(W)- ncol(W))],]
W = as.matrix(W[-idxShort$ix[1:(nrow(W)- ncol(W))],])
}
#reSM = solve_LSAP(W, maximum = TRUE)
#print(T_km_tmp$time[1])
#if(T_km_tmp$time[1]==190){browser()}
reSM = solve_assignment(W, C= C, A = A)
# reSM0 = solve_assignment_v0(W, C= C, A = A)
#if(any(reSM - reSM0>0)){browser()}
zeta_new = list(NULL)
for(i in 1:length(reSM)){
zeta_new[[i]] <- zeta[[reSM[i]]]
idxTkm = as.numeric(names(reSM)[i])
if(T_km_tmp$status[idxTkm] != "birth"){
zeta_new[[i]]$l <- T_km_tmp$target_id[idxTkm]
new_score = log(P_D / (kappa+P_B) * mvtnorm::dmvnorm(c(zeta[[reSM[i]]]$m), matrix(unlist(T_km_tmp[idxTkm ,varnames]), ncol = 1),zeta[[reSM[i]]]$P))
zeta_new[[i]]$score <- T_km_tmp$score[idxTkm]*0.5 + 0.5*new_score
zeta_new[[i]]$status <- T_km_tmp$status[idxTkm]
if(is.na(zeta_new[[i]]$l)){browser()}
if(is.na(zeta_new[[i]]$score)){browser()}
} else{
zeta_new[[i]]$l <- "0"
zeta_new[[i]]$score <- log(P_D * P_B / kappa)
zeta_new[[i]]$status <- "birth"
if(is.na(zeta_new[[i]]$l)){browser()}
}
}
}
} else {
zeta_new = zeta
}
labels = do.call(c, lapply(zeta_new, function(z) z$l))
idxBirth = which(labels == "0")
if(length(idxBirth)){
lB = 0
for(iB in idxBirth){
lB = lB + 1
zeta_new[[iB]]$l = paste("new target", maxLab + lB)
zeta_new[[iB]]$status = "birth"
}
}
return(list(zeta = zeta_new))
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.