R/kppm.R

Defines functions is.poissonclusterprocess.kppm is.poissonclusterprocess.default is.poissonclusterprocess reach.kppm psib.kppm psib nobs.dppm AIC.dppm logLik.dppm model.frame.dppm model.matrix.dppm model.images.dppm Window.dppm as.owin.dppm as.ppm.dppm is.poisson.kppm is.stationary.dppm Kpcf.kppm pcfmodel.kppm Kmodel.kppm coef.dppm as.fv.dppm unitname.dppm updateData.kppm update.dppm labels.dppm terms.dppm formula.dppm residuals.dppm fitted.dppm predict.dppm print.dppm is.kppm kppmCLadap kppmPalmLik kppmComLik clusterfit kppmMinCon kppm.quad kppm.formula kppm

Documented in AIC.dppm as.fv.dppm as.owin.dppm as.ppm.dppm clusterfit coef.dppm fitted.dppm formula.dppm is.kppm is.poissonclusterprocess is.poissonclusterprocess.default is.poissonclusterprocess.kppm is.poisson.kppm is.stationary.dppm Kmodel.kppm Kpcf.kppm kppm kppmCLadap kppmComLik kppm.formula kppmMinCon kppmPalmLik kppm.quad labels.dppm logLik.dppm model.frame.dppm model.images.dppm model.matrix.dppm nobs.dppm pcfmodel.kppm predict.dppm print.dppm psib psib.kppm reach.kppm residuals.dppm terms.dppm unitname.dppm updateData.kppm update.dppm Window.dppm

#
# kppm.R
#
# kluster/kox point process models
#
# $Revision: 1.230 $ $Date: 2023/03/26 10:08:44 $
#

kppm <- function(X, ...) {
  UseMethod("kppm")
}

kppm.formula <-
  function(X, clusters = c("Thomas","MatClust","Cauchy","VarGamma","LGCP"),
           ..., data=NULL) {
  ## remember call
  callstring <- short.deparse(sys.call())
  cl <- match.call()

  ########### INTERPRET FORMULA ##############################
  
  if(!inherits(X, "formula"))
    stop(paste("Argument 'X' should be a formula"))
  formula <- X
  
  if(spatstat.options("expand.polynom"))
    formula <- expand.polynom(formula)

  ## check formula has LHS and RHS. Extract them
  if(length(formula) < 3)
    stop(paste("Formula must have a left hand side"))
  Yexpr <- formula[[2L]]
  trend <- formula[c(1L,3L)]
  
  ## FIT #######################################
  thecall <- call("kppm", X=Yexpr, trend=trend,
                  data=data, clusters=clusters)
  ncall <- length(thecall)
  argh <- list(...)
  nargh <- length(argh)
  if(nargh > 0) {
    thecall[ncall + 1:nargh] <- argh
    names(thecall)[ncall + 1:nargh] <- names(argh)
  }
##  result <- eval(thecall, 
##                 envir=if(!is.null(data)) data else parent.frame(),
##                 enclos=if(!is.null(data)) parent.frame() else baseenv())
  callenv <- list2env(as.list(data), parent=parent.frame())
  result <- eval(thecall, envir=callenv, enclos=baseenv())

  result$call <- cl
  result$callframe <- parent.frame()
  if(!("callstring" %in% names(list(...))))
    result$callstring <- callstring
  
  return(result)
}

kppm.ppp <- kppm.quad <-
  function(X, trend = ~1,
           clusters = c("Thomas","MatClust","Cauchy","VarGamma","LGCP"),
           data=NULL,
           ...,
           covariates = data,
           subset, 
           method = c("mincon", "clik2", "palm", "adapcl"),
           penalised = FALSE,
           improve.type = c("none", "clik1", "wclik1", "quasi"),
           improve.args = list(),
           weightfun=NULL,
           control=list(),
           stabilize=TRUE,
           algorithm,
           trajectory=FALSE,
           statistic="K",
           statargs=list(),
           rmax = NULL,
           epsilon=0.01,
           covfunargs=NULL,
           use.gam=FALSE,
           nd=NULL, eps=NULL,
           ppm.improve.type = c("none", "ho", "enet"),
           ppm.improve.args=list()) {
  cl <- match.call()
  callstring <- paste(short.deparse(sys.call()), collapse="")
  Xname <- short.deparse(substitute(X))
  clusters <- match.arg(clusters)
  improve.type <- match.arg(improve.type)
  method <- match.arg(method)
  if(method == "mincon")
    statistic <- pickoption("summary statistic", statistic,
                            c(K="K", g="pcf", pcf="pcf"))
  
  if(missing(algorithm)) {
    algorithm <- if(method == "adapcl") "Broyden" else "Nelder-Mead"
  } else check.1.string(algorithm)
  
  if(isTRUE(trajectory) && method == "adapcl")
    warning("trajectory=TRUE is not supported for method 'adapcl'", call.=FALSE)

  ClusterArgs <- list(method = method,
                      improve.type = improve.type,
                      improve.args = improve.args,
                      weightfun=weightfun,
                      control=control,
                      stabilize=stabilize,
                      algorithm=algorithm,
                      statistic=statistic,
                      statargs=statargs,
                      rmax = rmax)
  Xenv <- list2env(as.list(covariates), parent=parent.frame())
  X <- eval(substitute(X), envir=Xenv, enclos=parent.frame())
  isquad <- is.quad(X)
  if(!is.ppp(X) && !isquad)
    stop("X should be a point pattern (ppp) or quadrature scheme (quad)")
  if(is.marked(X))
    stop("Sorry, cannot handle marked point patterns")
  if(!missing(subset)) {
    W <- eval(subset, covariates, parent.frame())
    if(!is.null(W)) {
      if(is.im(W)) {
        W <- solutionset(W)
      } else if(!is.owin(W)) {
        stop("Argument 'subset' should yield a window or logical image",
             call.=FALSE)
      }
      X <- X[W]
    }
  }
  po <- ppm(Q=X, trend=trend, covariates=covariates,
            forcefit=TRUE, rename.intercept=FALSE,
            covfunargs=covfunargs, use.gam=use.gam, nd=nd, eps=eps,
            improve.type=ppm.improve.type, improve.args=ppm.improve.args)
  XX <- if(isquad) X$data else X
  
  if(is.character(weightfun)) {
    RmaxW <- (rmax %orifnull% rmax.rule("K", Window(XX), intensity(XX))) / 2
    switch(weightfun,
           threshold = {
             weightfun <- function(d) { as.integer(d <= RmaxW) }
             attr(weightfun, "selfprint") <-
               paste0("Indicator(distance <= ", RmaxW, ")")
           },
           taper = {
             weightfun <- function(d) { pmin(1, RmaxW/d)^2 }
             attr(weightfun, "selfprint") <-
               paste0("min(1, ", RmaxW, "/d)^2")
           },
           stop(paste("Unrecognised option", sQuote(weightfun), "for weightfun"))
           )
  }
           
  ## default weight function
  if(is.null(weightfun))
    switch(method,
           adapcl = {
             weightfun <- function(d) { as.integer(abs(d) <= 1)*exp(1/(d^2-1)) }
             attr(weightfun, "selfprint") <-
               "Indicator(-1 <= distance <= 1) * exp(1/(distance^2-1))"
           },
           mincon = { },
           {
             RmaxW <- (rmax %orifnull% rmax.rule("K", Window(XX),
                                                 intensity(XX))) / 2
             weightfun <- function(d) { as.integer(d <= RmaxW) }
             attr(weightfun, "selfprint") <-
               paste0("Indicator(distance <= ", RmaxW, ")")
           })
  
  ## fit
  out <- switch(method,
         mincon = kppmMinCon(X=XX, Xname=Xname, po=po, clusters=clusters,
                             control=control, stabilize=stabilize,
                             statistic=statistic,
                             statargs=statargs, rmax=rmax,
                             algorithm=algorithm,
                             penalised = penalised,
                             trajectory = trajectory,
                             ...),
         clik2   = kppmComLik(X=XX, Xname=Xname, po=po, clusters=clusters,
                             control=control, stabilize=stabilize,
                             weightfun=weightfun, 
                             rmax=rmax, algorithm=algorithm,
                             penalised = penalised,
                             trajectory = trajectory,
                             ...),
         palm   = kppmPalmLik(X=XX, Xname=Xname, po=po, clusters=clusters,
                             control=control, stabilize=stabilize,
                             weightfun=weightfun, 
                             rmax=rmax, algorithm=algorithm,
                             penalised = penalised,
                             trajectory = trajectory,
                             ...),
         adapcl   = kppmCLadap(X=XX, Xname=Xname, po=po, clusters=clusters,
                             control=control, epsilon=epsilon, 
                             weightfun=weightfun, rmax=rmax,
                             algorithm=algorithm,
                             ...))
  ##
  h <- attr(out, "h")
  out <- append(out, list(ClusterArgs=ClusterArgs,
                          call=cl,
                          callframe=parent.frame(),
                          callstring=callstring))
  # Detect DPPs
  DPP <- list(...)$DPP
  class(out) <- c(ifelse(is.null(DPP), "kppm", "dppm"), class(out))

  # Update intensity estimate with improve.kppm if necessary:
  if(improve.type != "none")
    out <- do.call(improve.kppm,
                   append(list(object = out, type = improve.type),
                          improve.args))
  
  if(!is.null(h)) class(h) <- unique(c("traj", class(h)))

  attr(out, "h") <- h
  return(out)
}

## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
##                M  i  n  i  m  u  m       C  o  n  t  r  a  s  t
## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

kppmMinCon <- function(X, Xname, po, clusters, control=list(), stabilize=TRUE, statistic, statargs,
                       algorithm="Nelder-Mead", DPP=NULL, ...,
                       pspace=NULL) {
  # Minimum contrast fit
  stationary <- is.stationary(po)
  pspace <- do.call(make.pspace,
                    resolve.defaults(
                      list(fitmethod="mincon", clusters=clusters),
                      list(...), ## ellipsis arguments override pspace
                      as.list(pspace),
                      .MatchNull=FALSE))
  # compute intensity
  if(stationary) {
    lambda <- summary(po)$trend$value
  } else {
    # compute intensity at high resolution if available
    w <- as.owin(po, from="covariates")
    if(!is.mask(w)) w <- NULL
    lambda <- predict(po, locations=w)
  }
  # Detect DPP model and change clusters and intensity correspondingly
  if(!is.null(DPP)){
    tmp <- dppmFixIntensity(DPP, lambda, po)
    clusters <- tmp$clusters
    lambda <- tmp$lambda
    po <- tmp$po
  }
  mcfit <- clusterfit(X, clusters, lambda = lambda,
                      dataname = Xname, control = control,  stabilize=stabilize,
                      statistic = statistic, statargs = statargs,
                      algorithm=algorithm, pspace=pspace, ...)
  fitinfo <- attr(mcfit, "info")
  attr(mcfit, "info") <- NULL
  # all info that depends on the fitting method:
  Fit <- list(method       = "mincon",
              statistic    = statistic,
              Stat         = fitinfo$Stat,
              StatFun      = fitinfo$StatFun,
              StatName     = fitinfo$StatName,
              FitFun       = fitinfo$FitFun,
              statargs     = statargs,
              pspace.given = pspace,
              pspace.used  = fitinfo$pspace.used, 
              mcfit        = mcfit,
              maxlogcl     = NULL)
  # results
  if(!is.null(DPP)){
    clusters <- update(clusters, as.list(mcfit$par))
    out <- list(Xname      = Xname,
                X          = X,
                stationary = stationary,
                fitted     = clusters,
                po         = po,
                Fit        = Fit)
  } else{
    out <- list(Xname      = Xname,
                X          = X,
                stationary = stationary,
                clusters   = clusters,
                modelname  = fitinfo$modelname,
                isPCP      = fitinfo$isPCP,
                po         = po,
                lambda     = lambda,
                mu         = mcfit$mu,
                par        = mcfit$par,
                par.canon  = mcfit$par.canon,
                clustpar   = mcfit$clustpar,
                clustargs  = mcfit$clustargs,
                modelpar   = mcfit$modelpar,
                covmodel   = mcfit$covmodel,
                Fit        = Fit)
  }
  h <- attr(mcfit, "h")
  if(!is.null(h)) class(h) <- unique(c("traj", class(h)))
  attr(out, "h") <- h
  return(out)
}

clusterfit <- function(X, clusters, lambda = NULL, startpar = NULL,
                       ...,
                       q=1/4, p=2, rmin=NULL, rmax=NULL, 
                       ctrl=list(q=q, p=p, rmin=rmin, rmax=rmax),
                       statistic = NULL, statargs = NULL,
                       algorithm="Nelder-Mead", verbose=FALSE,
                       pspace=NULL){
  if(verbose) splat("Fitting cluster model")
  ## If possible get dataname from dots
  dataname <- list(...)$dataname
  ## Cluster info:
  info <- spatstatClusterModelInfo(clusters)
  if(verbose) splat("Retrieved cluster model information")
  ## Determine model type
  isPCP <- isTRUE(info$isPCP)
  isDPP <- inherits(clusters, "detpointprocfamily")

  ## resolve algorithm parameters
  default.ctrl <- list(q=if(isDPP) 1/2 else 1/4,
                       p=2,
                       rmin=NULL,
                       rmax=NULL)
  given.ctrl <- if(missing(ctrl)) list() else ctrl[names(default.ctrl)]
  given.args <- c(if(missing(q)) NULL else list(q=q),
                  if(missing(p)) NULL else list(p=p),
                  if(missing(rmin)) NULL else list(rmin=rmin),
                  if(missing(rmax)) NULL else list(rmax=rmax))
  ctrl <- resolve.defaults(given.args, given.ctrl, default.ctrl)
  if(verbose) {
    splat("Algorithm parameters:")
    print(ctrl)
  }
  ##
  if(inherits(X, "ppp")){
      if(verbose) 
        splat("Using point pattern data")
      if(is.null(dataname))
         dataname <- getdataname(short.deparse(substitute(X), 20), ...)
      if(is.null(statistic))
          statistic <- "K"
      # Startpar:
      if(is.null(startpar))
          startpar <- info$selfstart(X)
      stationary <- is.null(lambda) || (is.numeric(lambda) && length(lambda)==1)
      if(verbose) {
        splat("Starting parameters:")
        print(startpar)
        cat("Calculating summary function...")
      }
      # compute summary function
      if(stationary) {
          if(is.null(lambda)) lambda <- intensity(X)
          StatFun <- if(statistic == "K") "Kest" else "pcf"
          StatName <-
              if(statistic == "K") "K-function" else "pair correlation function"
          Stat <- do.call(StatFun,
                          resolve.defaults(list(X=quote(X)),
                                           statargs,
                                           list(correction="best")))
      } else {
          StatFun <- if(statistic == "K") "Kinhom" else "pcfinhom"
          StatName <- if(statistic == "K") "inhomogeneous K-function" else
          "inhomogeneous pair correlation function"
          Stat <- do.call(StatFun,
                          resolve.defaults(list(X=quote(X), lambda=lambda),
                                           statargs,
                                           list(correction="best")))
      }
      if(verbose) splat("Done.")
  } else if(inherits(X, "fv")){
      if(verbose) 
        splat("Using the given summary function")
      Stat <- X
      ## Get statistic type
      stattype <- attr(Stat, "fname")
      StatFun <- paste0(stattype)
      StatName <- NULL
      if(is.null(statistic)){
          if(is.null(stattype) || !is.element(stattype[1L], c("K", "pcf")))
              stop("Cannot infer the type of summary statistic from argument ",
                   sQuote("X"), " please specify this via argument ",
                   sQuote("statistic"))
          statistic <- stattype[1L]
      }
      if(stattype[1L]!=statistic)
          stop("Statistic inferred from ", sQuote("X"),
               " not equal to supplied argument ",
               sQuote("statistic"))
      # Startpar:
      if(is.null(startpar)){
          if(isDPP)
              stop("No rule for starting parameters in this case. Please set ",
                   sQuote("startpar"), " explicitly.")
          startpar <- info$checkpar(startpar, native=FALSE)
          startpar[["scale"]] <- mean(range(Stat[[fvnames(Stat, ".x")]]))
      }
  } else{
      stop("Unrecognised format for argument X")
  }
  
  ## avoid using g(0) as it may be infinite
  if(statistic=="pcf"){
      if(verbose) splat("Checking g(0)")
      argu <- fvnames(Stat, ".x")
      rvals <- Stat[[argu]]
      if(rvals[1L] == 0 && (is.null(rmin) || rmin == 0)) {
          if(verbose) splat("Ignoring g(0)")
          rmin <- rvals[2L]
      }
  }

  ## DPP resolving algorithm and checking startpar
  changealgorithm <- length(startpar)==1 && algorithm=="Nelder-Mead"
  if(isDPP){
    if(verbose) splat("Invoking dppmFixAlgorithm")
    alg <- dppmFixAlgorithm(algorithm, changealgorithm, clusters, startpar)
    algorithm <- alg$algorithm
  }

  #' determine initial values of parameters
  startpar <- info$checkpar(startpar, native=TRUE)
  #' code to compute the theoretical summary function of the model
  theoret <- info[[statistic]]
  #' explanatory text
  desc <- paste("minimum contrast fit of", info$descname)

  #' determine shape parameters if any
  dots <- info$resolveshape(...)
  margs <- dots$margs
  
  #' ............ permit negative parameter values .........................
  strict <- !isFALSE(pspace$strict)
  sargs <- if(strict) list() else list(strict=FALSE)
  
  #' ............ adjust K function or pair correlation function ...........
  do.adjust <- isTRUE(pspace$adjusted)
  if(do.adjust) {
    if(verbose) splat("Applying kppm adjustment")
    W <- Window(X)
    delta <- if(is.null(rmax)) NULL else rmax/4096
    ## pack up precomputed information needed for adjustment
    adjdata <- list(paircorr = info[["pcf"]],
                    pairWcdf = distcdf(W, delta=delta),
		    tohuman  = NULL)
    adjfun <- function(theo, par, auxdata, ..., margs=NULL) {
      with(auxdata, {
        if(!is.null(tohuman))
	  par <- tohuman(par, ..., margs=margs)
        a <- as.numeric(stieltjes(paircorr, pairWcdf, par=par, ..., margs=margs))
	return(theo/a)
      })
    }
    pspace$adjustment <- list(fun=adjfun, auxdata=adjdata)
  } 

  #' parameter vector corresponding to Poisson process
  if(isDPP) {
    poispar <- NULL
  } else if(isPCP) {
    if(!("kappa" %in% names(startpar)))
      stop("Internal error: startpar does not include 'kappa'")
    poispar <- startpar
    poispar[["kappa"]] <- Inf
  } else {
    #' LGCP
    if(!("sigma2" %in% names(startpar)))
      stop("Internal error: startpar does not include 'sigma2'")
    poispar <- startpar
    poispar[["sigma2"]] <- .Machine$double.eps # i.e. 0
  }
  
  #' ............ use canonical parameters .........................
  usecanonical <- isTRUE(pspace$canonical)
  if(usecanonical) {
    if(verbose) splat("Converting to canonical parameters")
     tocanonical <- info$tocanonical
     tohuman <- info$tohuman
     if(is.null(tocanonical) || is.null(tohuman)) {
       warning("Canonical parameters are not yet supported for this model")
       usecanonical <- FALSE
     } 

  }
  startpar.human <- startpar
  poispar.human <- poispar
  if(usecanonical) {
    htheo <- theoret
    startpar <- tocanonical(startpar, margs=margs)
    if(!is.null(poispar)) poispar <- tocanonical(poispar, margs=margs)
    theoret <- function(par, ...) { htheo(tohuman(par, ...), ...) }
    if(do.adjust)
      pspace$adjustment$auxdata$tohuman <- tohuman
  }
  #' ............ penalty .........................
  penalty    <- pspace$penalty
  penal.args <- pspace$penal.args
  tau        <- pspace$tau %orifnull% 1
  if(is.function(penalty)) {
    # penalised optimisation
    if(usecanonical) {
      penalty.human <- penalty
      penalty <- function(par, ...) { penalty.human(tohuman(par, ...), ...) }
    }
    ## data-dependent arguments in penalty
    if(is.function(penal.args)) 
      penal.args <- penal.args(X)
    ## exchange rate (defer evaluation if it is a function)
    if(!is.function(tau)) check.1.real(tau)
    ## reinsert in 'pspace' to pass to 'mincontrast'
    pspace$penalty <- penalty
    pspace$penal.args <- penal.args
    pspace$tau <- tau
    ## unpenalised version
    pspace.unpen <- pspace
    pspace.unpen[c("penalty", "penal.args", "tau")] <- NULL
  }
  #' ...................................................

  #'
  mcargs <- resolve.defaults(list(observed=Stat,
                                  theoretical=theoret,
                                  startpar=startpar,
                                  ctrl=ctrl,
                                  method=algorithm,
                                  fvlab=list(label="%s[fit](r)",
                                      desc=desc),
                                  explain=list(dataname=dataname,
                                      fname=statistic,
                                      modelname=info$modelname),
                                  margs=dots$margs,
                                  model=dots$model,
                                  pspace=pspace), ## As modified above
                             list(...),
                             sargs)


  if(isDPP && algorithm=="Brent" && changealgorithm)
    mcargs <- resolve.defaults(mcargs, list(lower=alg$lower, upper=alg$upper))

  if(is.function(penalty) && is.function(tau)) {
    ## data-dependent exchange rate 'tau': evaluate now
    if("poisval" %in% names(formals(tau))) {
      ## New style: requires value of (unpenalised) objective function at Poisson process
      mcargs.unpen <- mcargs
      mcargs.unpen$pspace <- pspace.unpen
      ## Evaluate using undocumented argument 'evalpar' to mincontrast
      mcargs.unpen$evalpar <- poispar
      poisval <- do.call(mincontrast, mcargs.unpen)
      tau <- tau(X, poisval=poisval)
    } else {
      ## old style
      tau <- tau(X)
    }
    check.1.real(tau)
    ## update 'tau' in argument list
    pspace$tau <- tau
    mcargs$pspace$tau <- tau
  }
  
  ## .............. FIT .......................
  if(verbose) splat("Starting minimum contrast fit")
  mcfit <- do.call(mincontrast, mcargs)
  if(verbose) splat("Returned from minimum contrast fit")
  ## ..........................................

  ## extract fitted parameters and reshape
  if(!usecanonical) {
    optpar.canon <- NULL
    optpar.human <- mcfit$par
    names(optpar.human) <- names(startpar.human)
  } else {
    optpar.canon <- mcfit$par
    names(optpar.canon) <- names(startpar)
    optpar.human <- tohuman(optpar.canon, margs=margs)
    names(optpar.human) <- names(startpar.human)
  }
  mcfit$par       <- optpar.human
  mcfit$par.canon <- optpar.canon
  ## extract fitted parameters
  optpar        <- mcfit$par
  names(optpar) <- names(startpar)
  mcfit$par     <- optpar
  
  # Return results for DPPs
  if(isDPP){
    extra <- list(Stat      = Stat,
                  StatFun   = StatFun,
                  StatName  = StatName,
                  modelname  = info$modelabbrev,
                  lambda     = lambda)
    attr(mcfit, "info") <- extra
    if(verbose) splat("Returning from clusterfit (DPP case)")
    return(mcfit)
  }
  ## Extra stuff for ordinary cluster/lgcp models
  ## imbue with meaning
  ## infer model parameters
  mcfit$modelpar <- info$interpret(optpar.human, lambda)
  mcfit$internal <- list(model=ifelse(isPCP, clusters, "lgcp"))
  mcfit$covmodel <- dots$covmodel
  
  if(isPCP) {
    # Poisson cluster process: extract parent intensity kappa
    kappa <- mcfit$par[["kappa"]]
    # mu = mean cluster size
    mu <- lambda/kappa
  } else {
    # LGCP: extract variance parameter sigma2
    sigma2 <- mcfit$par[["sigma2"]]
    # mu = mean of log intensity 
    mu <- log(lambda) - sigma2/2
  }
  ## Parameter values (new format)
  mcfit$mu <- mu
  mcfit$clustpar <- info$checkpar(mcfit$par, native=FALSE, strict=strict)
  mcfit$clustargs <- info$outputshape(dots$margs)

  ## The old fit fun that would have been used (DO WE NEED THIS?)
  FitFun <- paste0(tolower(clusters), ".est", statistic)

  extra <- list(FitFun       = FitFun,
                Stat         = Stat,
                StatFun      = StatFun,
                StatName     = StatName,
                modelname    = info$modelabbrev,
                isPCP        = isPCP,
                lambda       = lambda,
                pspace.used  = pspace) # Modified from call to 'clusterfit'
  attr(mcfit, "info") <- extra
  if(verbose) splat("Returning from clusterfit")
  return(mcfit)
}

## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
##                C  o  m  p  o  s  i  t  e    L  i  k  e  l  i  h  o  o  d
## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

kppmComLik <- function(X, Xname, po, clusters, control=list(), stabilize=TRUE,
                       weightfun, rmax, algorithm="Nelder-Mead",
                       DPP=NULL, ..., 
                       pspace=NULL) {
  pspace <- do.call(make.pspace,
                    resolve.defaults(
                      list(fitmethod="clik2", clusters=clusters),
                      list(...), ## ellipsis arguments override pspace
                      as.list(pspace),
                      .MatchNull=FALSE))
  W <- as.owin(X)
  if(is.null(rmax))
    rmax <- rmax.rule("K", W, intensity(X))
  ## identify unordered pairs of points that contribute
  cl <- closepairs(X, rmax, what="ijd", twice=FALSE, neat=FALSE)
  dIJ <- cl$d
  # compute weights for unordered pairs of points
  if(is.function(weightfun)) {
    wIJ <- weightfun(dIJ)
    sumweight <- safePositiveValue(sum(wIJ))
  } else {
    npairs <- length(dIJ)
    wIJ <- rep.int(1, npairs)
    sumweight <- npairs
  }
  # convert window to mask, saving other arguments for later
  dcm <- do.call.matched(as.mask,
                         append(list(w=W), list(...)),
                         sieve=TRUE)
  M         <- dcm$result
  otherargs <- dcm$otherargs

  ## Detect DPP usage
  isDPP <- inherits(clusters, "detpointprocfamily")

  # compute intensity at pairs of data points
  # and c.d.f. of interpoint distance in window
  if(stationary <- is.stationary(po)) {
    # stationary unmarked Poisson process
    lambda <- intensity(X)
#    lambdaIJ <- lambda^2
    # compute cdf of distance between two uniform random points in W
    g <- distcdf(W, delta=rmax/4096)
    # scaling constant is (area * intensity)^2
    gscale <- npoints(X)^2  
  } else {
    # compute fitted intensity at data points and in window
#    lambdaX <- fitted(po, dataonly=TRUE)
    lambda <- lambdaM <- predict(po, locations=M)
    # lambda(x_i) * lambda(x_j)
#    lambdaIJ <- lambdaX[I] * lambdaX[J]
    # compute cdf of distance between two random points in W
    # with density proportional to intensity function
    g <- distcdf(M, dW=lambdaM, delta=rmax/4096)
    # scaling constant is (integral of intensity)^2
    gscale <- safePositiveValue(integral.im(lambdaM)^2, default=npoints(X)^2)
  }

  # Detect DPP model and change clusters and intensity correspondingly
  isDPP <- !is.null(DPP)
  if(isDPP){
    tmp <- dppmFixIntensity(DPP, lambda, po)
    clusters <- tmp$clusters
    lambda <- tmp$lambda
    po <- tmp$po
  }

  # trim 'g' to [0, rmax] 
  g <- g[with(g, .x) <= rmax,]
  # get pair correlation function (etc) for model
  info <- spatstatClusterModelInfo(clusters)
  pcfun        <- info$pcf
  selfstart    <- info$selfstart
  isPCP        <- info$isPCP
  resolveshape <- info$resolveshape
  modelname    <- info$modelname
  # Assemble information required for computing pair correlation
  if(is.function(resolveshape)) {
    # Additional 'shape' parameters of cluster model are required.
    # These may be given as individual arguments,
    # or in a list called 'covmodel'
    clustargs <- if("covmodel" %in% names(otherargs))
                 otherargs[["covmodel"]] else otherargs
    shapemodel <- do.call(resolveshape, clustargs)$covmodel
  } else shapemodel <- NULL
  pcfunargs <- shapemodel
  margs <- pcfunargs$margs
  # determine starting parameter values
  startpar <- selfstart(X)
  
  pspace.given <- pspace
  #' ............ permit negative parameter values ...................
  strict <- !isFALSE(pspace$strict)
  if(!strict) pcfunargs <- append(pcfunargs, list(strict=FALSE))
  #' ............ parameter corresponding to Poisson process .........
  if(isDPP) {
    poispar <- NULL
  } else if(isPCP) {
    if(!("kappa" %in% names(startpar)))
      stop("Internal error: startpar does not include 'kappa'")
    poispar <- startpar
    poispar[["kappa"]] <- Inf
  } else {
    ## LGCP
    if(!("sigma2" %in% names(startpar)))
      stop("Internal error: startpar does not include 'sigma2'")
    poispar <- startpar
    poispar[["sigma2"]] <- .Machine$double.eps # i.e. 0
  }
  #' ............ use canonical parameters .........................
  usecanonical <- isTRUE(pspace$canonical)
  if(usecanonical) {
     tocanonical <- info$tocanonical
     tohuman <- info$tohuman
     if(is.null(tocanonical) || is.null(tohuman)) {
       warning("Canonical parameters are not yet supported for this model")
       usecanonical <- FALSE
     }
  }
  startpar.human <- startpar
  poispar.human <- poispar
  if(usecanonical) {
    pcftheo <- pcfun
    startpar <- tocanonical(startpar, margs=margs)
    if(!is.null(poispar)) poispar <- tocanonical(poispar, margs=margs)
    pcfun <- function(par, ...) { pcftheo(tohuman(par, ...), ...) }
  }
  #' ............ penalty ......................................
  penalty    <- pspace$penalty
  penal.args <- pspace$penal.args
  tau        <- pspace$tau %orifnull% 1
  if(is.function(penalty)) {
    ## penalised optimisation
    if(usecanonical) {
      penalty.human <- penalty
      penalty <- function(par, ...) { penalty.human(tohuman(par, ...), ...) }
    }
    ## data-dependent arguments in penalty
    if(is.function(penal.args)) 
      penal.args <- penal.args(X)
    ## exchange rate (defer evaluation if it is a function)
    if(!is.function(tau)) check.1.real(tau)
    ## reinsert in 'pspace' for insurance
    pspace$penalty <- penalty
    pspace$penal.args <- penal.args
    pspace$tau <- tau
  }
  #' ............ debugger .....................................
  TRACE <- isTRUE(pspace$trace)
  if(SAVE <- isTRUE(pspace$save)) {
    saveplace <- new.env()
    assign("h", NULL, envir=saveplace)
  } else saveplace <- NULL
  
  # .....................................................
  # create local function to evaluate pair correlation
  #  (with additional parameters 'pcfunargs' in its environment)
  paco <- function(d, par) {
    do.call(pcfun, append(list(par=par, rvals=d), pcfunargs))
  }
  #' ..........  define objective function ......................
  if(!is.function(weightfun)) {
    # pack up necessary information
    objargs <- list(dIJ=dIJ, sumweight=sumweight, g=g, gscale=gscale, 
                    envir=environment(paco),
                    ## The following variables are temporarily omitted
                    ## in order to calculate the objective function
                    ## without using them, or their side effects.
                    penalty=NULL,   # updated below
                    penal.args=NULL,   # updated below
                    tau=NULL,   # updated below
                    TRACE=FALSE, # updated below
                    saveplace=NULL, # updated below
                    BIGVALUE=1, # updated below
                    SMALLVALUE=.Machine$double.eps)
    # define objective function (with 'paco' in its environment)
    # This is the log composite likelihood minus the constant term 
    #       2 * (sum(log(lambdaIJ)) - npairs * log(gscale))
    obj <- function(par, objargs) {
      with(objargs, {
        logprod <- sum(log(safePositiveValue(paco(dIJ, par))))
        integ <- unlist(stieltjes(paco, g, par=par))
        integ <- pmax(SMALLVALUE, integ)
        logcl <- 2*(logprod - sumweight * log(integ))
        logcl <- safeFiniteValue(logcl, default=-BIGVALUE)
        ## penalty
        if(hasPenalty <- is.function(penalty)) {
          straf <- do.call(penalty, append(list(par), penal.args))
          logclPEN <- logcl - tau * straf
        }
        ## debugger
        if(isTRUE(TRACE)) {
          cat("Parameters:", fill=TRUE)
          print(par)
          splat("\tlogprod:", logprod)
          splat("\tinteg:", integ)
          splat("log composite likelihood:", logcl)
          if(hasPenalty)
            splat("penalised log composite likelihood:", logclPEN)
        }
        ## save state
        if(is.environment(saveplace)) {
          h <- get("h", envir=saveplace)
          value <- list(logcl=logcl)
          if(hasPenalty)
            value <- append(value, list(logclPEN=logclPEN))
          hplus <- as.data.frame(append(par, value))
          h <- rbind(h, hplus)
          assign("h", h, envir=saveplace)
        }
        return(if(hasPenalty) logclPEN else logcl)
      },
      enclos=objargs$envir)
    }
    ## Determine the values of some parameters
    ## (1) Determine a suitable large number to replace Inf
    objargs$BIGVALUE <- bigvaluerule(obj, objargs, startpar)
    ## (2) Evaluate exchange rate 'tau'
    if(is.function(penalty) && is.function(tau)) {
      ## data-dependent exchange rate 'tau': evaluate now
      if("poisval" %in% names(formals(tau))) {
        ## New style: requires value of (unpenalised) objective function at Poisson process
        poisval <- obj(poispar, objargs)
        tau <- tau(X, poisval=poisval)
      } else {
        tau <- tau(X)
      }
      check.1.real(tau)
    }
    ## Now insert the penalty, etc
    objargs <- resolve.defaults(list(penalty    = penalty,
                                     penal.args = penal.args,
                                     tau        = tau,
                                     saveplace  = saveplace,
                                     TRACE      = TRACE),
                                objargs)
  } else {
    # create local function to evaluate  pair correlation(d) * weight(d)
    #  (with additional parameters 'pcfunargs', 'weightfun' in its environment)
    force(weightfun)
    wpaco <- function(d, par) {
      y <- do.call(pcfun, append(list(par=par, rvals=d), pcfunargs))
      w <- weightfun(d)
      return(y * w)
    }
    # pack up necessary information
    objargs <- list(dIJ=dIJ, wIJ=wIJ, sumweight=sumweight, g=g, gscale=gscale, 
                    envir=environment(wpaco),
                    penalty=NULL,   # updated below
                    penal.args=NULL,   # updated below
                    tau=NULL,   # updated below
                    TRACE=FALSE, # updated below
                    saveplace=NULL, # updated below
                    BIGVALUE=1, # updated below
                    SMALLVALUE=.Machine$double.eps)
    # define objective function (with 'paco', 'wpaco' in its environment)
    # This is the log composite likelihood minus the constant term 
    #       2 * (sum(wIJ * log(lambdaIJ)) - sumweight * log(gscale))
    obj <- function(par, objargs) {
      with(objargs,
      {
        integ <- unlist(stieltjes(wpaco, g, par=par))
        integ <- pmax(SMALLVALUE, integ)
        logcl <- safeFiniteValue(
          2*(sum(wIJ * log(safePositiveValue(paco(dIJ, par))))
            - sumweight * log(integ)),
          default=-BIGVALUE)
        ## penalty
        if(hasPenalty <- is.function(penalty)) {
          straf <- do.call(penalty, append(list(par), penal.args))
          logclPEN <- logcl - tau * straf
        }
        ## debugger
        if(isTRUE(TRACE)) {
          cat("Parameters:", fill=TRUE)
          print(par)
          splat("\tinteg:", integ)
          splat("log composite likelihood:", logcl)
          if(hasPenalty)
            splat("penalised log composite likelihood:", logclPEN)
        }
        if(is.environment(saveplace)) {
          h <- get("h", envir=saveplace)
          value <- list(logcl=logcl)
          if(hasPenalty)
            value <- append(value, list(logclPEN=logclPEN))
          hplus <- as.data.frame(append(par, value))
          h <- rbind(h, hplus)
          assign("h", h, envir=saveplace)
        }
        return(if(hasPenalty) logclPEN else logcl)
      },
      enclos=objargs$envir)
    }
    ## Determine the values of some parameters
    ## (1) Determine a suitable large number to replace Inf
    objargs$BIGVALUE <- bigvaluerule(obj, objargs, startpar)
    ## (2) Evaluate exchange rate 'tau'
    if(is.function(penalty) && is.function(tau)) {
      ## data-dependent exchange rate 'tau': evaluate now
      if("poisval" %in% names(formals(tau))) {
        ## New style: requires value of (unpenalised) objective function at Poisson process
        poisval <- obj(poispar, objargs)
        tau <- tau(X, poisval=poisval)
      } else {
        tau <- tau(X)
      }
      check.1.real(tau)
    }
    ## Now insert the penalty, etc
    objargs <- resolve.defaults(list(penalty    = penalty,
                                     penal.args = penal.args,
                                     tau        = tau,
                                     saveplace  = saveplace,
                                     TRACE      = TRACE),
                                objargs)
  }

  ## ......................  Optimization settings  ........................

  if(stabilize) {
    ## Numerical stabilisation 
    ## evaluate objective at starting state
    startval <- obj(startpar, objargs)
    ## use to determine appropriate global scale
    smallscale <- sqrt(.Machine$double.eps)
    fnscale <- -max(abs(startval), smallscale)
    parscale <- pmax(abs(startpar), smallscale)
    scaling <- list(fnscale=fnscale, parscale=parscale)
  } else {
    scaling <- list(fnscale=-1)
  }

  ## Update list of algorithm control arguments
  control.updated <- resolve.defaults(control, scaling, list(trace=0))

  ## Initialise list of all arguments to 'optim'
  optargs <- list(par=startpar, fn=obj, objargs=objargs,
                  control=control.updated, method=algorithm)

  ## DPP case: check startpar and modify algorithm
  changealgorithm <- length(startpar)==1 && algorithm=="Nelder-Mead"
  if(isDPP){
    alg <- dppmFixAlgorithm(algorithm, changealgorithm, clusters,
                            startpar.human)
    algorithm <- optargs$method <- alg$algorithm
    if(algorithm=="Brent" && changealgorithm){
      optargs$lower <- alg$lower
      optargs$upper <- alg$upper
    }
  }
  
  if(isTRUE(pspace$debug)) {
    splat("About to optimize... Objective function arguments:")
    print(objargs)
  }
  
  ## ..........   optimize it ..............................
  opt <- do.call(optim, optargs)

  ## raise warning/error if something went wrong
  signalStatus(optimStatus(opt), errors.only=TRUE)
  
  ## .......... extract fitted parameters .....................
  if(!usecanonical) {
    optpar.canon <- NULL
    optpar.human <- opt$par
    names(optpar.human) <- names(startpar.human)
  } else {
    optpar.canon <- opt$par
    names(optpar.canon) <- names(startpar)
    optpar.human <- tohuman(optpar.canon, margs=margs)
    names(optpar.human) <- names(startpar.human)
  }
  opt$par             <- optpar.human
  opt$par.canon       <- optpar.canon
  ## save starting values in 'opt' for consistency with mincontrast()
  opt$startpar        <- startpar.human

  ## Finish in DPP case
  if(!is.null(DPP)){
    ## all info that depends on the fitting method:
    Fit <- list(method       = "clik2",
                clfit        = opt,
                weightfun    = weightfun,
                rmax         = rmax,
                objfun       = obj,
                objargs      = objargs,
                maxlogcl     = opt$value,
                pspace.given = pspace.given,
                pspace.used  = pspace)
    # pack up
    clusters <- update(clusters, as.list(opt$par))
    result <- list(Xname      = Xname,
                   X          = X,
                   stationary = stationary,
                   fitted     = clusters,
                   modelname  = modelname,
                   po         = po,
                   lambda     = lambda,
                   Fit        = Fit)
    if(SAVE) {
      h <- get("h", envir=saveplace)
      if(!is.null(h)) class(h) <- unique(c("traj", class(h)))
      attr(result, "h") <- h
    }
    return(result)
  }
  
  ## meaningful model parameters
  modelpar <- info$interpret(optpar.human, lambda)
  # infer parameter 'mu'
  if(isPCP) {
    # Poisson cluster process: extract parent intensity kappa
    kappa <- optpar.human[["kappa"]]
    # mu = mean cluster size
    mu <- if(stationary) lambda/kappa else eval.im(lambda/kappa)
  } else {
    # LGCP: extract variance parameter sigma2
    sigma2 <- optpar.human[["sigma2"]]
    # mu = mean of log intensity 
    mu <- if(stationary) log(lambda) - sigma2/2 else
          eval.im(log(lambda) - sigma2/2)    
  }
  # all info that depends on the fitting method:
  Fit <- list(method       = "clik2",
              clfit        = opt,
              weightfun    = weightfun,
              rmax         = rmax,
              objfun       = obj,
              objargs      = objargs,
              maxlogcl     = opt$value,
              pspace.given = pspace.given,
              pspace.used  = pspace)
  # pack up
  result <- list(Xname      = Xname,
                 X          = X,
                 stationary = stationary,
                 clusters   = clusters,
                 modelname  = modelname,
                 isPCP      = isPCP,
                 po         = po,
                 lambda     = lambda,
                 mu         = mu,
                 par        = optpar.human,
                 par.canon  = optpar.canon,
                 clustpar   = info$checkpar(par=optpar.human, native=FALSE, strict=strict),
                 clustargs  = info$outputshape(shapemodel$margs),
                 modelpar   = modelpar,
                 covmodel   = shapemodel,
                 Fit        = Fit)
  
  if(SAVE) {
    h <- get("h", envir=saveplace)
    if(!is.null(h)) class(h) <- unique(c("traj", class(h)))
    attr(result, "h") <- h
  }

  return(result)
}


## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
##                P  a  l  m     L  i  k  e  l  i  h  o  o  d              
## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<


kppmPalmLik <- function(X, Xname, po, clusters, control=list(), stabilize=TRUE, weightfun, rmax,
                        algorithm="Nelder-Mead", DPP=NULL, ...,
                        pspace=NULL) {
  pspace <- do.call(make.pspace,
                    resolve.defaults(
                      list(fitmethod="palm", clusters=clusters),
                      list(...), ## ellipsis arguments override pspace
                      as.list(pspace),
                      .MatchNull=FALSE))
  W <- as.owin(X)
  if(is.null(rmax))
    rmax <- rmax.rule("K", W, intensity(X))
  ## identify unordered pairs of points that contribute
  cl <- closepairs(X, rmax, twice=FALSE, neat=FALSE)
  dIJ <- cl$d
  ## compute weights for unordered pairs of points. Must be symmetric.
  if(is.function(weightfun)) {
    wIJ <- weightfun(dIJ)
  } else {
    npairs <- length(dIJ)
    wIJ <- rep.int(1, npairs)
  }
  ## first point in each *ordered* pair
  J <- c(cl$i, cl$j)
  
  ## convert window to mask, saving other arguments for later
  dcm <- do.call.matched(as.mask,
                         append(list(w=W), list(...)),
                         sieve=TRUE)
  M         <- dcm$result
  otherargs <- dcm$otherargs

  ## Detect DPP usage
  isDPP <- inherits(clusters, "detpointprocfamily")

  # compute intensity at data points
  # and c.d.f. of interpoint distance in window
  if(stationary <- is.stationary(po)) {
    # stationary unmarked Poisson process
    lambda <- intensity(X)
    lambdaJ <- rep(lambda, length(J))
    # compute cdf of distance between a randomly-selected point in X 
    # and a uniform random point in W
    g <- distcdf(X, M, delta=rmax/4096)
    # scaling constant is (integral of intensity) * (number of points)
    gscale <- npoints(X)^2
  } else {
    # compute fitted intensity at data points and in window
    lambdaX <- fitted(po, dataonly=TRUE)
    lambda <- lambdaM <- predict(po, locations=M)
    lambdaJ <- lambdaX[J] 
    # compute cdf of distance between a randomly-selected point in X 
    # and a random point in W with density proportional to intensity function
    g <- distcdf(X, M, dV=lambdaM, delta=rmax/4096)
    # scaling constant is (integral of intensity) * (number of points)
    gscale <- safePositiveValue(integral.im(lambdaM) * npoints(X),
                        default=npoints(X)^2)
  }

  # Detect DPP model and change clusters and intensity correspondingly
  isDPP <- !is.null(DPP)
  if(isDPP){
    tmp <- dppmFixIntensity(DPP, lambda, po)
    clusters <- tmp$clusters
    lambda <- tmp$lambda
    po <- tmp$po
  }

  # trim 'g' to [0, rmax] 
  g <- g[with(g, .x) <= rmax,]
  # get pair correlation function (etc) for model
  info <- spatstatClusterModelInfo(clusters)
  pcfun        <- info$pcf
  selfstart    <- info$selfstart
  isPCP        <- info$isPCP
  resolveshape <- info$resolveshape
  modelname    <- info$modelname
  # Assemble information required for computing pair correlation
  if(is.function(resolveshape)) {
    # Additional 'shape' parameters of cluster model are required.
    # These may be given as individual arguments,
    # or in a list called 'covmodel'
    clustargs <- if("covmodel" %in% names(otherargs))
                 otherargs[["covmodel"]] else otherargs
    shapemodel <- do.call(resolveshape, clustargs)$covmodel
  } else shapemodel <- NULL
  pcfunargs <- shapemodel
  margs <- pcfunargs$margs
  # determine starting parameter values
  startpar <- selfstart(X)
  
  pspace.given <- pspace
  #' ............ permit negative parameter values ................
  strict <- !isFALSE(pspace$strict)
  if(!strict) pcfunargs <- append(pcfunargs, list(strict=strict))
  #' ............ parameter corresponding to Poisson process ......
  if(isDPP) {
    poispar <- NULL
  } else if(isPCP) {
    if(!("kappa" %in% names(startpar)))
      stop("Internal error: startpar does not include 'kappa'")
    poispar <- startpar
    poispar[["kappa"]] <- Inf
  } else {
    #' LGCP
    if(!("sigma2" %in% names(startpar)))
      stop("Internal error: startpar does not include 'sigma2'")
    poispar <- startpar
    poispar[["sigma2"]] <- .Machine$double.eps # i.e. 0
  }
  #' ............ use canonical parameters .........................
  usecanonical <- isTRUE(pspace$canonical)
  if(usecanonical) {
     tocanonical <- info$tocanonical
     tohuman <- info$tohuman
     if(is.null(tocanonical) || is.null(tohuman)) {
       warning("Canonical parameters are not yet supported for this model")
       usecanonical <- FALSE
     }
  }
  startpar.human <- startpar
  poispar.human <- poispar
  if(usecanonical) {
    pcftheo <- pcfun
    startpar <- tocanonical(startpar, margs=margs)
    if(!is.null(poispar)) poispar <- tocanonical(poispar, margs=margs)
    pcfun <- function(par, ...) { pcftheo(tohuman(par, ...), ...) }
  }
  #' ............ penalty .......................................
  penalty <- pspace$penalty
  penal.args <- pspace$penal.args
  tau <- pspace$tau %orifnull% 1
  if(is.function(penalty)) {
    ## penalised optimisation
    if(usecanonical) {
      penalty.human <- penalty
      penalty <- function(par, ...) { penalty.human(tohuman(par, ...), ...) }
    }
    ## data-dependent arguments in penalty
    if(is.function(penal.args)) 
      penal.args <- penal.args(X)
    ## exchange rate (defer evaluation if it is a function)
    if(!is.function(tau)) check.1.real(tau)
    ## reinsert in 'pspace' for insurance
    pspace$penalty <- penalty
    pspace$penal.args <- penal.args
    pspace$tau <- tau
  }
  #' ............ debugger ......................................
  TRACE <- isTRUE(pspace$trace)
  if(SAVE <- isTRUE(pspace$save)) {
    saveplace <- new.env()
    assign("h", NULL, envir=saveplace)
  } else saveplace <- NULL

  ## .....................................................
  # create local function to evaluate pair correlation
  #  (with additional parameters 'pcfunargs' in its environment)
  paco <- function(d, par) {
    do.call(pcfun, append(list(par=par, rvals=d), pcfunargs))
  }
  # define objective function 
  if(!is.function(weightfun)) {
    # pack up necessary information
    objargs <- list(dIJ=dIJ, g=g, gscale=gscale,
                    sumloglam=safeFiniteValue(sum(log(lambdaJ))),
                    envir=environment(paco),
                    ## The following variables are temporarily omitted
                    ## in order to calculate the objective function
                    ## without using them, or their side effects.
                    penalty=NULL,   # updated below
                    penal.args=NULL,   # updated below
                    tau=NULL,   # updated below
                    TRACE=FALSE, # updated below
                    saveplace=NULL, # updated below
                    BIGVALUE=1, # updated below
                    SMALLVALUE=.Machine$double.eps)
    # define objective function (with 'paco' in its environment)
    # This is the log Palm likelihood
    obj <- function(par, objargs) {
      with(objargs, {
        integ <- unlist(stieltjes(paco, g, par=par))
        integ <- pmax(SMALLVALUE, integ)
        logplik <- safeFiniteValue(
                sumloglam + 2 * sum(log(safePositiveValue(paco(dIJ, par))))
                - gscale * integ,
          default=-BIGVALUE)
        ## penalty
        if(hasPenalty <- is.function(penalty)) {
          straf <- do.call(penalty, append(list(par), penal.args))
          logplikPEN <- logplik - tau * straf
        }
        ## debugger
        if(isTRUE(TRACE)) {
          cat("Parameters:", fill=TRUE)
          print(par)
          splat("integral:", integ)
          splat("log Palm likelihood:", logplik)
          if(hasPenalty)
            splat("penalised log Palm likelihood:", logplikPEN)
        }
        if(is.environment(saveplace)) {
          h <- get("h", envir=saveplace)
          value <- list(logplik=logplik)
          if(hasPenalty)
            value <- append(value, list(logplikPEN=logplikPEN))
          hplus <- as.data.frame(append(par, value))
          h <- rbind(h, hplus)
          assign("h", h, envir=saveplace)
        }
        return(if(hasPenalty) logplikPEN else logplik)
      },
      enclos=objargs$envir)
    }
    ## Determine the values of some parameters
    ## (1) Determine a suitable large number to replace Inf
    objargs$BIGVALUE <- bigvaluerule(obj, objargs, startpar)
    ## (2) Evaluate exchange rate 'tau'
    if(is.function(penalty) && is.function(tau)) {
      ## data-dependent exchange rate 'tau': evaluate now
      if("poisval" %in% names(formals(tau))) {
        ## New style: requires value of (unpenalised) objective function at Poisson process
        poisval <- obj(poispar, objargs)
        tau <- tau(X, poisval=poisval)
      } else {
        tau <- tau(X)
      }
      check.1.real(tau)
    }
    objargs <- resolve.defaults(list(penalty    = penalty,
                                     penal.args = penal.args,
                                     tau        = tau,
                                     saveplace  = saveplace,
                                     TRACE      = TRACE),
                                objargs)
  } else {
    # create local function to evaluate  pair correlation(d) * weight(d)
    #  (with additional parameters 'pcfunargs', 'weightfun' in its environment)
    force(weightfun)
    wpaco <- function(d, par) {
      y <- do.call(pcfun, append(list(par=par, rvals=d), pcfunargs))
      w <- weightfun(d)
      return(y * w)
    }
    # pack up necessary information
    objargs <- list(dIJ=dIJ, wIJ=wIJ, g=g, gscale=gscale,
                    wsumloglam=safeFiniteValue(
                      sum(c(wIJ,wIJ) * safeFiniteValue(log(lambdaJ)))
                    ),
                    envir=environment(wpaco),
                    penalty=NULL,   # updated below
                    penal.args=NULL,   # updated below
                    tau=NULL,   # updated below
                    TRACE=FALSE, # updated below
                    saveplace=NULL, # updated below
                    BIGVALUE=1, # updated below
                    SMALLVALUE=.Machine$double.eps)
    # define objective function (with 'paco', 'wpaco' in its environment)
    # This is the log Palm likelihood
    obj <- function(par, objargs) {
      with(objargs, {
        integ <- unlist(stieltjes(wpaco, g, par=par))
        integ <- pmax(SMALLVALUE, integ)
        logplik <- safeFiniteValue(wsumloglam +
                             2 * sum(wIJ * log(safePositiveValue(paco(dIJ, par))))
                             - gscale * integ,
                             default=-BIGVALUE)
        ## penalty
        if(hasPenalty <- is.function(penalty)) {
          straf <- do.call(penalty, append(list(par), penal.args))
          logplikPEN <- logplik - tau * straf
        }
        ## debugger
        if(isTRUE(TRACE)) {
          cat("Parameters:", fill=TRUE)
          print(par)
          splat("integral:", integ)
          splat("log Palm likelihood:", logplik)
          if(hasPenalty)
            splat("penalised log Palm likelihood:", logplikPEN)
        }
        if(is.environment(saveplace)) {
          h <- get("h", envir=saveplace)
          value <- list(logplik=logplik)
          if(hasPenalty)
            value <- append(value, list(logplikPEN=logplikPEN))
          hplus <- as.data.frame(append(par, value))
          h <- rbind(h, hplus)
          assign("h", h, envir=saveplace)
        }
        return(if(hasPenalty) logplikPEN else logplik)        
      },
      enclos=objargs$envir)
    }
    ## Determine the values of some parameters
    ## (1) Determine a suitable large number to replace Inf
    objargs$BIGVALUE <- bigvaluerule(obj, objargs, startpar)
    ## (2) Evaluate exchange rate 'tau'
    if(is.function(penalty) && is.function(tau)) {
      ## data-dependent exchange rate 'tau': evaluate now
      if("poisval" %in% names(formals(tau))) {
        ## New style: requires value of (unpenalised) objective function at Poisson process
        poisval <- obj(poispar, objargs)
        tau <- tau(X, poisval=poisval)
      } else {
        tau <- tau(X)
      }
      check.1.real(tau)
    }
    ## Now insert penalty, etc.
    objargs <- resolve.defaults(list(penalty    = penalty,
                                     penal.args = penal.args,
                                     tau        = tau,
                                     saveplace  = saveplace,
                                     TRACE      = TRACE),
                                objargs)
  }

  ## ......................  Optimization settings  ........................

  if(stabilize) {
    ## Numerical stabilisation 
    ## evaluate objective at starting state
    startval <- obj(startpar, objargs)
    ## use to determine appropriate global scale
    smallscale <- sqrt(.Machine$double.eps)
    fnscale <- -max(abs(startval), smallscale)
    parscale <- pmax(abs(startpar), smallscale)
    scaling <- list(fnscale=fnscale, parscale=parscale)
  } else {
    scaling <- list(fnscale=-1)
  }

  ## Update list of algorithm control arguments
  control.updated <- resolve.defaults(control, scaling, list(trace=0))

  ## Initialise list of all arguments to 'optim'
  optargs <- list(par=startpar, fn=obj, objargs=objargs,
                  control=control.updated, method=algorithm)

  ## DPP case: check startpar and modify algorithm
  changealgorithm <- length(startpar)==1 && algorithm=="Nelder-Mead"
  if(isDPP){
    alg <- dppmFixAlgorithm(algorithm, changealgorithm, clusters,
                            startpar.human)
    algorithm <- optargs$method <- alg$algorithm
    if(algorithm=="Brent" && changealgorithm){
      optargs$lower <- alg$lower
      optargs$upper <- alg$upper
    }
  }

  ## .......................................................................
  
  if(isTRUE(pspace$debug)) {
    splat("About to optimize... Objective function arguments:")
    print(objargs)
  }

  # optimize it
  opt <- do.call(optim, optargs)
  # raise warning/error if something went wrong
  signalStatus(optimStatus(opt), errors.only=TRUE)
  
  ## Extract optimal values of parameters
  if(!usecanonical) {
    optpar.canon <- NULL
    optpar.human <- opt$par
    names(optpar.human) <- names(startpar.human)
  } else {
    optpar.canon <- opt$par
    names(optpar.canon) <- names(startpar)
    optpar.human <- tohuman(optpar.canon, margs=margs)
    names(optpar.human) <- names(startpar.human)
  }
  opt$par            <- optpar.human
  opt$par.canon      <- optpar.canon
  ## save starting values in 'opt' for consistency with minconfit()
  opt$startpar       <- startpar.human

  ## Finish in DPP case
  if(!is.null(DPP)){
    ## all info that depends on the fitting method:
    Fit <- list(method       = "palm",
                clfit        = opt,
                weightfun    = weightfun,
                rmax         = rmax,
                objfun       = obj,
                objargs      = objargs,
                maxlogcl     = opt$value,
                pspace.given = pspace.given,
                pspace.used  = pspace)
    # pack up
    clusters <- update(clusters, as.list(optpar.human))
    result <- list(Xname      = Xname,
                   X          = X,
                   stationary = stationary,
                   fitted     = clusters,
                   modelname  = modelname,
                   po         = po,
                   lambda     = lambda,
                   Fit        = Fit)
    if(SAVE) {
      h <- get("h", envir=saveplace)
      if(!is.null(h)) class(h) <- unique(c("traj", class(h)))
      attr(result, "h") <- h
    }
    return(result)
  }
  # meaningful model parameters
  modelpar <- info$interpret(optpar.human, lambda)
  # infer parameter 'mu'
  if(isPCP) {
    # Poisson cluster process: extract parent intensity kappa
    kappa <- optpar.human[["kappa"]]
    # mu = mean cluster size
    mu <- if(stationary) lambda/kappa else eval.im(lambda/kappa)
  } else {
    # LGCP: extract variance parameter sigma2
    sigma2 <- optpar.human[["sigma2"]]
    # mu = mean of log intensity 
    mu <- if(stationary) log(lambda) - sigma2/2 else
          eval.im(log(lambda) - sigma2/2)    
  }
  # all info that depends on the fitting method:
  Fit <- list(method       = "palm",
              clfit        = opt,
              weightfun    = weightfun,
              rmax         = rmax,
              objfun       = obj,
              objargs      = objargs,
              maxlogcl     = opt$value,
              pspace.given = pspace.given,
              pspace.used  = pspace)
  # pack up
  result <- list(Xname      = Xname,
                 X          = X,
                 stationary = stationary,
                 clusters   = clusters,
                 modelname  = modelname,
                 isPCP      = isPCP,
                 po         = po,
                 lambda     = lambda,
                 mu         = mu,
                 par        = optpar.human,
                 par.canon  = optpar.canon,
                 clustpar   = info$checkpar(par=optpar.human, native=FALSE, strict=strict),
                 clustargs  = info$outputshape(shapemodel$margs),
                 modelpar   = modelpar,
                 covmodel   = shapemodel,
                 Fit        = Fit)
  if(SAVE) {
    h <- get("h", envir=saveplace)
    if(!is.null(h)) class(h) <- unique(c("traj", class(h)))
    attr(result, "h") <- h
  }
  return(result)
}

## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
##       A  d  a  p  t  i  v  e     C  o  m  p  o  s  i  t  e    L  i  k  e  l  i  h  o  o  d
## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<


## ........... contributed by Chiara Fend ...................
## needs nonlinear equation solver nleqslv

kppmCLadap <- function(X, Xname, po, clusters, control, weightfun, 
                       rmax=NULL, epsilon=0.01, DPP=NULL,
                       algorithm="Broyden", ..., 
                       startpar=NULL, globStrat="dbldog") {

  if(!requireNamespace("nleqslv", quietly=TRUE))
    stop(paste("The package", sQuote("nleqslv"), "is required"),
           call.=FALSE)
  
  W <- as.owin(X)

  if(is.null(rmax)) # specified for numerical stability
    rmax <- shortside(Frame(W))

  # identify pairs of points that might contribute
  cl <- closepairs(X, rmax)
  dIJ <- cl$d #pairwise distances
  Rmin <- min(dIJ)
  indexmin <- which(dIJ==Rmin) #for later use
  
  # convert window to mask, saving other arguments for later
  dcm <- do.call.matched(as.mask,
                         append(list(w=W), list(...)),
                         sieve=TRUE)
  M         <- dcm$result
  otherargs <- dcm$otherargs

  #' penalised estimation is not supported
  if(any(m <- (c("penalised", "pspace") %in% names(otherargs)))) 
    warning(paste(ngettext(sum(m), "Argument", "Arguments"),
                  commasep(sQuote(c("penalised", "pspace")[m])),
                  ngettext(sum(m), "is", "are"),
                  "not supported for adaptive composite likelihood"),
            call.=FALSE)
    
  # compute intensity at pairs of data points
  # and c.d.f. of interpoint distance in window
  if(stationary <- is.stationary(po)) {
    # stationary unmarked Poisson process
    lambda <- intensity(X)
    # compute cdf of distance between two uniform random points in W
    g <- distcdf(W, delta=rmax/4096)
    # scaling constant is (area * intensity)^2
    gscale <- npoints(X)^2  
  } else {
    # compute fitted intensity at data points and in window
    #    lambdaX <- fitted(po, dataonly=TRUE)
    lambda <- lambdaM <- predict(po, locations=M)
    # compute cdf of distance between two random points in W
    # with density proportional to intensity function
    g <- distcdf(M, dW=lambdaM, delta=rmax/4096)
    # scaling constant is (integral of intensity)^2
    gscale <- safePositiveValue(integral.im(lambdaM)^2, default=npoints(X)^2)
  }
  
  isDPP <- !is.null(DPP)
  if(isDPP){
    tmp <- dppmFixIntensity(DPP, lambda, po)
    clusters <- tmp$clusters
    lambda <- tmp$lambda
    po <- tmp$po
  }
  
  # get pair correlation function (etc) for model
  info <- spatstatClusterModelInfo(clusters)
  pcfun        <- info$pcf
  dpcfun       <- info$Dpcf
  selfstart    <- info$selfstart
  isPCP        <- info$isPCP
  resolveshape <- info$resolveshape
  modelname    <- info$modelname
  # Assemble information required for computing pair correlation
  if(is.function(resolveshape)) {
    # Additional parameters of cluster model are required.
    # These may be given as individual arguments,
    # or in a list called 'covmodel'
    clustargs <- if("covmodel" %in% names(otherargs))
      otherargs[["covmodel"]] else otherargs
    shapemodel <- do.call(resolveshape, clustargs)$covmodel
  } else shapemodel <- NULL
  pcfunargs <- shapemodel
  
  ## determine starting parameter values
  if(is.null(startpar)) {
    startpar <- selfstart(X)
  } else if(!isDPP){
    startpar <- info$checkpar(startpar, native=TRUE)
  }
  ## optimization will be over the logarithms of the parameters
  startparLog <- log(startpar)
  pcfunLog <- function(par, ...) { pcfun(exp(par), ...) }
  dpcfunLog <- function(par, ...) { dpcfun(exp(par), ...) }
  
  # create local functions to evaluate pair correlation and its gradient
  #  (with additional parameters 'pcfunargs' in its environment)
  paco <- function(d, par) {
    do.call(pcfunLog, append(list(par=par, rvals=d), pcfunargs))
  }
  
  dpaco <- function(d, par) {
    do.call(dpcfunLog, append(list(par=par, rvals=d), pcfunargs))
  }
  
  # trim 'g' to [0, rmax] 
  g <- g[with(g, .x) <= rmax,]
  
  
  #' ..........  define objective function ......................
  # create local function to evaluate  weight(epsilon*M/(pcf(d)-1))
  weight <- function(d, par) {
    y <- paco(d=d, par=par)
    # calculate M (only needs to be calculated for cluster models)
    M <- 1
    if(!isDPP){
      M <- abs(paco(d=0, par=par)-1)
    }
    return(weightfun(epsilon*M/(y-1)))
  }

  wlogcl2score <- function(par, paco, dpaco, dIJ, gscale, epsilon, cdf=g){
    p <- length(par)
    temp <- rep(0, p)
    
    # check if current parameter is valid, if not return inf
    if(isDPP){
      if(length(par)==1 && is.null(names(par)))
        names(par) <- clusters$freepar
      mod <- update(clusters, as.list(exp(par)))
      if(!valid(mod)){
        return(rep(Inf, p))
      }
    }
    
    # everything can be computed
    wdIJ <- weight(d=dIJ, par=par)
    index <- unique(c(which(wdIJ!=0), indexmin))
    dIJcurrent <- dIJ[index]
    
    for(i in 1:p){
      parname <- names(par)[i]
      # weighted derivatives wrt log of parname
      dpcfweighted <- function(d, par){ 
        y <- dpaco(d = d, par = par)[parname,]*exp(par[i])
        return(y*weight(d = d, par = par))
      } 
      temp[i] <- sum(dpcfweighted(d = dIJcurrent, par=par)/paco(d = dIJcurrent, par = par)) - gscale * stieltjes(dpcfweighted,cdf, par=par)$f
    }
    return(temp)
  }
  
  ## .................   optimize it ..............................
  opt <- nleqslv::nleqslv(x = startparLog, fn = wlogcl2score, 
                          method = algorithm,
                          global = globStrat, control = control, 
                          paco=paco, dpaco=dpaco, 
                          dIJ=dIJ, gscale=gscale, epsilon=epsilon)
    
  ## .......... extract fitted parameters on original scale ...............
  optpar        <- exp(opt$x)
  names(optpar) <- names(startpar)
  ## insert entries expected in 'opt'
  opt$par      <- optpar
  opt$startpar <- startpar

  ## Finish in DPP case
  if(isDPP){
    # all info that depends on the fitting method:
    Fit <- list(method    = "adapcl",
                cladapfit = opt,
                weightfun = weightfun,
                rmax      = rmax,
                epsilon   = epsilon,
                objfun    = wlogcl2score,
                objargs   = control,
                estfunc  = opt$fvec)
    # pack up
    clusters <- update(clusters, as.list(exp(opt$x)))
    result <- list(Xname      = Xname,
                   X          = X,
                   stationary = stationary,
                   fitted     = clusters,
                   modelname  = modelname,
                   po         = po,
                   lambda     = lambda,
                   Fit        = Fit)
    return(result)
  }
  
  ## meaningful model parameters
  modelpar <- info$interpret(optpar, lambda)
  # infer parameter 'mu'
  if(isPCP) {
    # Poisson cluster process: extract parent intensity kappa
    kappa <- optpar[["kappa"]]
    # mu = mean cluster size
    mu <- if(stationary) lambda/kappa else eval.im(lambda/kappa)
  } else {
    # LGCP: extract variance parameter sigma2
    sigma2 <- optpar[["sigma2"]]
    # mu = mean of log intensity 
    mu <- if(stationary) log(lambda) - sigma2/2 else
      eval.im(log(lambda) - sigma2/2)    
  }
  # all info that depends on the fitting method:
  Fit <- list(method    = "adapcl",
              cladapfit = opt,
              weightfun = weightfun,
              rmax      = rmax,
              epsilon   = epsilon,
              objfun    = wlogcl2score,
              objargs   = control,
              estfunc  = opt$fvec)
  # pack up
  result <- list(Xname      = Xname,
                 X          = X,
                 stationary = stationary,
                 clusters   = clusters,
                 modelname  = modelname,
                 isPCP      = isPCP,
                 po         = po,
                 lambda     = lambda,
                 mu         = mu,
                 par        = optpar,
                 clustpar   = info$checkpar(par=optpar, native=FALSE),
                 clustargs  = info$outputshape(shapemodel$margs),
                 modelpar   = modelpar,
                 covmodel   = shapemodel,
                 Fit        = Fit)
  return(result)
}


improve.kppm <- local({

  fnc <- function(r, eps, g){ (g(r) - 1)/(g(0) - 1) - eps}

  improve.kppm <- function(object, type=c("quasi", "wclik1", "clik1"),
                           rmax = NULL, eps.rmax = 0.01,
                           dimyx = 50, maxIter = 100, tolerance = 1e-06,
                           fast = TRUE, vcov = FALSE, fast.vcov = FALSE,
                           verbose = FALSE,
                           save.internals = FALSE) {
    verifyclass(object, "kppm")
    type <- match.arg(type)
    gfun <- pcfmodel(object)
    X <- object$X
    win <- as.owin(X)
    ## simple (rectangular) grid quadrature scheme
    ## (using pixels with centers inside owin only)
    mask <- as.mask(win, dimyx = dimyx)
    wt <- pixellate(win, W = mask)
    wt <- wt[mask]
    Uxy <- rasterxy.mask(mask)
    U <- ppp(Uxy$x, Uxy$y, window = win, check=FALSE)
    U <- U[mask]
#    nU <- npoints(U)
    Yu <- pixellate(X, W = mask)
    Yu <- Yu[mask]
    
    ## covariates at quadrature points
    po <- object$po
    Z <- model.images(po, mask)
    Z <- sapply(Z, "[", i=U)

    ##obtain initial beta estimate using composite likelihood
    beta0 <- coef(po)
    
    ## determining the dependence range
    if (type != "clik1" && is.null(rmax))
      {
        diamwin <- diameter(win)
        rmax <- if(fnc(diamwin, eps.rmax, gfun) >= 0) diamwin else
                uniroot(fnc, lower = 0, upper = diameter(win),
                        eps=eps.rmax, g=gfun)$root
        if(verbose)
          splat(paste0("type: ", type, ", ",
                       "dependence range: ", rmax, ", ",
                       "dimyx: ", dimyx, ", g(0) - 1:", gfun(0) -1))
      }
    ## preparing the WCL case
    if (type == "wclik1")
      Kmax <- 2*pi * integrate(function(r){r * (gfun(r) - 1)},
                               lower=0, upper=rmax)$value * exp(c(Z %*% beta0))
    ## the g()-1 matrix without tapering
    if (!fast || (vcov && !fast.vcov)){
      if (verbose)
        cat("computing the g(u_i,u_j)-1 matrix ...")
      gminus1 <- matrix(gfun(c(pairdist(U))) - 1, U$n, U$n)
      if (verbose)
        cat("..Done.\n")
    }
    
    if ( (fast && type == "quasi") | fast.vcov ){
      if (verbose)
        cat("computing the sparse G-1 matrix ...\n")
      ## Non-zero gminus1 entries (when using tapering)
      cp <- crosspairs(U,U,rmax,what="ijd")
      if (verbose)
        cat("crosspairs done\n")
      Gtap <- (gfun(cp$d) - 1)
      if(vcov){
        if(fast.vcov){
          gminus1 <- Matrix::sparseMatrix(i=cp$i, j=cp$j,
                                          x=Gtap, dims=c(U$n, U$n))
        } else{
          if(fast)
            gminus1 <- matrix(gfun(c(pairdist(U))) - 1, U$n, U$n)
        }
      }
      if (verbose & type!="quasi")
        cat("..Done.\n")
    }
       
    if (type == "quasi" && fast){
      mu0 <- exp(c(Z %*% beta0)) * wt
      mu0root <- sqrt(mu0)
      sparseG <- Matrix::sparseMatrix(i=cp$i, j=cp$j,
                                      x=mu0root[cp$i] * mu0root[cp$j] * Gtap,
                                      dims=c(U$n, U$n))
      Rroot <- Matrix::Cholesky(sparseG, perm = TRUE, Imult = 1)
      ##Imult=1 means that we add 1*I
      if (verbose)
        cat("..Done.\n")
    }
    
    ## iterative weighted least squares/Fisher scoring
    bt <- beta0
    noItr <- 1
    repeat {
      mu <- exp(c(Z %*% bt)) * wt
      mu.root <- sqrt(mu)
      ## the core of estimating equation: ff=phi
      ## in case of ql, \phi=V^{-1}D=V_\mu^{-1/2}x where (G+I)x=V_\mu^{1/2} Z
      ff <- switch(type,
                   clik1 = Z,
                   wclik1= Z/(1 + Kmax),
                   quasi = if(fast){
                     Matrix::solve(Rroot, mu.root * Z)/mu.root
                   } else{
                     solve(diag(U$n) + t(gminus1 * mu), Z)
                   }
                   )
      ##alternative
      ##R=chol(sparseG+sparseMatrix(i=c(1:U$n),j=c(1:U$n),
      ##                            x=rep(1,U$n),dims=c(U$n,U$n)))
      ##ff2 <- switch(type,
      ##              clik1 = Z,
      ##              wclik1= Z/(1 + Kmax),
      ##              quasi = if (fast)
      ##                         solve(R,solve(t(R), mu.root * Z))/mu.root
      ##                      else solve(diag(U$n) + t(gminus1 * mu), Z))
      ## print(summary(as.numeric(ff)-as.numeric(ff2)))
      ## the estimating equation: u_f(\beta)
      uf <- (Yu - mu) %*% ff
      ## inverse of minus expectation of Jacobian matrix: I_f
      Jinv <- solve(t(Z * mu) %*% ff)
      if(maxIter==0){
        ## This is a built-in early exit for vcov internal calculations
        break
      }
      deltabt <- as.numeric(uf %*% Jinv)
      if (any(!is.finite(deltabt))) {
        warning(paste("Infinite value, NA or NaN appeared",
                      "in the iterative weighted least squares algorithm.",
                      "Returning the initial intensity estimate unchanged."),
                call.=FALSE)
        return(object)
      }
      ## updating the present estimate of \beta
      bt <- bt + deltabt
      if (verbose)
        splat(paste0("itr: ", noItr, ",\nu_f: ", as.numeric(uf),
                     "\nbeta:", bt, "\ndeltabeta:", deltabt))
      if (max(abs(deltabt/bt)) <= tolerance || max(abs(uf)) <= tolerance)
        break
      if (noItr > maxIter)
        stop("Maximum number of iterations reached without convergence.")
      noItr <- noItr + 1
    }
    out <- object
    out$po$coef.orig <- beta0
    out$po$coef <- bt
    loc <- if(is.sob(out$lambda)) as.mask(out$lambda) else mask
    out$lambda <- predict(out$po, locations = loc)
    out$improve <- list(type = type,
                        rmax = rmax,
                        dimyx = dimyx,
                        fast = fast,
                        fast.vcov = fast.vcov)
    if(save.internals){
      out$improve <- append(out$improve, list(ff=ff, uf=uf, J.inv=Jinv))
    }
    if(vcov){
      if (verbose)
        cat("computing the asymptotic variance ...\n")
      ## variance of the estimation equation: Sigma_f = Var(u_f(bt))
      trans <- if(fast) Matrix::t else t
      Sig <- trans(ff) %*% (ff * mu) + trans(ff * mu) %*% gminus1 %*% (ff * mu)
      ## note Abdollah's G does not have mu.root inside...
      ## the asymptotic variance of \beta:
      ##         inverse of the Godambe information matrix
      out$vcov <- as.matrix(Jinv %*% Sig %*% Jinv)
    }
    return(out)
  }
  improve.kppm
})


is.kppm <- function(x) { inherits(x, "kppm")}

print.kppm <- print.dppm <- function(x, ...) {

  isPCP <- x$isPCP
  # detect DPP
  isDPP <- inherits(x, "dppm")
  # handle outdated objects - which were all cluster processes
  if(!isDPP && is.null(isPCP)) isPCP <- TRUE

  terselevel <- spatstat.options('terse')
  digits <- getOption('digits')
  
  splat(if(x$stationary) "Stationary" else "Inhomogeneous",
        if(isDPP) "determinantal" else if(isPCP) "cluster" else "Cox",
        "point process model")

  Xname <- x$Xname
  if(waxlyrical('extras', terselevel) && nchar(Xname) < 20) {
    has.subset <- ("subset" %in% names(x$call))
    splat("Fitted to",
          if(has.subset) "(a subset of)" else NULL,
          "point pattern dataset", sQuote(Xname))
  }

  if(waxlyrical('gory', terselevel)) {
    fittedby <- "Fitted by"
    #' detect whether fit used a penalty
    if(!is.null(x$Fit$pspace$penalty))
      fittedby <- "Fitted by penalised"
    switch(x$Fit$method,
           mincon = {
             splat(fittedby, "minimum contrast")
             splat("\tSummary statistic:", x$Fit$StatName)
           },
           clik  =,
           clik2 = {
             splat(fittedby, "maximum second order composite likelihood")
             splat("\trmax =", x$Fit$rmax)
             if(!is.null(wtf <- x$Fit$weightfun)) {
               a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
               splat("\tweight function:", a)
             }
           },
           palm = {
             splat(fittedby, "maximum Palm likelihood")
             splat("\trmax =", x$Fit$rmax)
             if(!is.null(wtf <- x$Fit$weightfun)) {
               a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
               splat("\tweight function:", a)
             }
           },
           adapcl = {
             splat("Fitted by adaptive second order composite likelihood")
             splat("\tepsilon =", x$Fit$epsilon)
             if(!is.null(wtf <- x$Fit$weightfun)) {
               a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
               splat("\tweight function:", a)
             }
           },
           warning(paste("Unrecognised fitting method", sQuote(x$Fit$method)))
           )
  }

  #' optimization trace
  if(!is.null(attr(x, "h")))
    splat("[Includes history of evaluations of objective function]")
  
  parbreak(terselevel)
  
  # ............... trend .........................

  if(!(isDPP && is.null(x$fitted$intensity)))
    print(x$po, what="trend")

  # ..................... clusters ................

  # DPP case
  if(isDPP){
      splat("Fitted DPP model:")
      print(x$fitted)
      return(invisible(NULL))
  }

  tableentry <- spatstatClusterModelInfo(x$clusters)
  
  splat(if(isPCP) "Cluster" else "Cox",
        "model:", tableentry$printmodelname(x))
  cm <- x$covmodel
  if(!isPCP) {
    # Covariance model - LGCP only
    splat("\tCovariance model:", cm$model)
    margs <- cm$margs
    if(!is.null(margs)) {
      nama <- names(margs)
      tags <- ifelse(nzchar(nama), paste(nama, "="), "")
      tagvalue <- paste(tags, margs)
      splat("\tCovariance parameters:",
            paste(tagvalue, collapse=", "))
    }
  }
  pc <- x$par.canon
  if(!is.null(pc)) {
    splat("Fitted canonical parameters:")
    print(pc, digits=digits)
  }
  pa <- x$clustpar
  if (!is.null(pa)) {
    splat("Fitted",
          if(isPCP) "cluster" else "covariance",
          "parameters:")
    print(pa, digits=digits)
  }

  if(!is.null(mu <- x$mu)) {
    if(isPCP) {
      splat("Mean cluster size: ",
            if(!is.im(mu)) paste(signif(mu, digits), "points") else "[pixel image]")
    } else {
      splat("Fitted mean of log of random intensity:",
            if(!is.im(mu)) signif(mu, digits) else "[pixel image]")
    }
  }
  if(isDPP) {
    rx <- repul(x)
    splat(if(is.im(rx)) "(Average) strength" else "Strength",
          "of repulsion:", signif(mean(rx), 4))
  }
  if(isPCP) {
    parbreak(terselevel)
    g <- pcfmodel(x)
    phi <- g(0) - 1
    splat("Cluster strength: phi = ", signif(phi, 4))
    psib <- phi/(1+phi)
    splat("Sibling probability: psib = ", signif(psib, 4))
    
  }
  invisible(NULL)
}

plot.kppm <- local({

  plotem <- function(x, ..., main=dmain, dmain) { plot(x, ..., main=main) }
  
  plot.kppm <- function(x, ...,
                        what=c("intensity", "statistic", "cluster"),
                        pause=interactive(),
                        xname) {
    ## catch objectname from dots if present otherwise deparse x:
    if(missing(xname)) xname <- short.deparse(substitute(x))
    nochoice <- missing(what)
    what <- pickoption("plot type", what,
                       c(statistic="statistic",
                         intensity="intensity",
                         cluster="cluster"),
                       multi=TRUE)
    ## handle older objects
    Fit <- x$Fit
    if(is.null(Fit)) {
      warning("kppm object is in outdated format")
      Fit <- x
      Fit$method <- "mincon"
    }
    ## Catch locations for clusters if given
    loc <- list(...)$locations
    inappropriate <- (nochoice & ((what == "intensity") & (x$stationary))) |
             ((what == "statistic") & (Fit$method != "mincon")) |
             ((what == "cluster") & (identical(x$isPCP, FALSE))) | 
             ((what == "cluster") & (!x$stationary) & is.null(loc))

    if(!nochoice && !x$stationary && "cluster" %in% what && is.null(loc))
      stop("Please specify additional argument ", sQuote("locations"),
           " which will be passed to the function ",
           sQuote("clusterfield"), ".")

    if(any(inappropriate)) {
      what <- what[!inappropriate]
      if(length(what) == 0){
        message("Nothing meaningful to plot. Exiting...")
        return(invisible(NULL))
      }
    }
    pause <- pause && (length(what) > 1)
    if(pause) {
       opa <- par(ask=TRUE)
       on.exit(par(opa))
    }
    for(style in what)
      switch(style,
             intensity={
               plotem(x$po, ...,
                      dmain=c(xname, "Intensity"),
                      how="image", se=FALSE)
             },
             statistic={
               plotem(Fit$mcfit, ...,
                      dmain=c(xname, Fit$StatName))
             },
             cluster={
               plotem(clusterfield(x, locations = loc, verbose=FALSE), ...,
                      dmain=c(xname, "Fitted cluster"))
             })
    return(invisible(NULL))
  }

  plot.kppm
})


predict.kppm <- predict.dppm <- function(object, ...) {
  se <- resolve.1.default(list(se=FALSE), list(...))
  interval <- resolve.1.default(list(interval="none"), list(...))
  if(se) warning("Standard error calculation assumes a Poisson process")
  if(interval != "none")
    warning(paste(interval, "interval calculation assumes a Poisson process"))
  predict(as.ppm(object), ...)
}

fitted.kppm <- fitted.dppm <- function(object, ...) {
  fitted(as.ppm(object), ...)
}

residuals.kppm <- residuals.dppm <- function(object, ...) {
  type <- resolve.1.default(list(type="raw"), list(...))
  if(type != "raw")
    warning(paste("calculation of", type, "residuals",
                  "assumes a Poisson process"))
  residuals(as.ppm(object), ...)
}


formula.kppm <- formula.dppm <- function(x, ...) {
  formula(x$po, ...)
}

terms.kppm <- terms.dppm <- function(x, ...) {
  terms(x$po, ...)
}

labels.kppm <- labels.dppm <- function(object, ...) {
  labels(object$po, ...)
}

update.kppm <- update.dppm <- function(object, ..., evaluate=TRUE, envir=environment(terms(object))) {
  argh <- list(...)
  nama <- names(argh)
  callframe <- object$callframe
  #' look for a formula argument
  fmla <- formula(object)
  jf <- integer(0)
  if(!is.null(trend <- argh$trend)) {
    if(!can.be.formula(trend))
      stop("Argument \"trend\" should be a formula")
    fmla <- newformula(formula(object), trend, callframe, envir)
    jf <- which(nama == "trend")
  } else if(any(isfo <- sapply(argh, can.be.formula))) {
    if(sum(isfo) > 1) {
      if(!is.null(nama)) isfo <- isfo & nzchar(nama)
      if(sum(isfo) > 1)
        stop(paste("Arguments not understood:",
                   "there are two unnamed formula arguments"))
    }
    jf <- which(isfo)
    fmla <- argh[[jf]]
    fmla <- newformula(formula(object), fmla, callframe, envir)
  }

  #' look for a point pattern or quadscheme
  if(!is.null(X <- argh$X)) {
    if(!inherits(X, c("ppp", "quad")))
      stop(paste("Argument X should be a formula,",
                 "a point pattern or a quadrature scheme"))
    jX <- which(nama == "X")
  } else if(any(ispp <- sapply(argh, inherits, what=c("ppp", "quad")))) {
    if(sum(ispp) > 1) {
      if(!is.null(nama)) ispp <- ispp & nzchar(nama)
      if(sum(ispp) > 1)
        stop(paste("Arguments not understood:",
                   "there are two unnamed point pattern/quadscheme arguments"))
    }
    jX <- which(ispp)
    X <- argh[[jX]]
  } else {
    X <- object$X
    jX <- integer(0)
  }
  Xexpr <- if(length(jX) > 0) sys.call()[[2L + jX]] else NULL
  #' remove arguments just recognised, if any
  jused <- c(jf, jX)
  if(length(jused) > 0) {
    argh <- argh[-jused]
    nama <- names(argh)
  }
  #' update the matched call
  thecall <- getCall(object)
  methodname <- as.character(thecall[[1L]])
  switch(methodname,
         kppm.formula = {
	   ## original call has X = [formula with lhs]
	   if(!is.null(Xexpr)) {
	     lhs.of.formula(fmla) <- Xexpr
	   } else if(is.null(lhs.of.formula(fmla))) {
	     lhs.of.formula(fmla) <- as.name('.')
	   }
           oldformula <- getCall(object)$X
           oldformula <- eval(oldformula, callframe)
           thecall$X <- newformula(oldformula, fmla, callframe, envir)
         },
         {
	   ## original call has X = ppp and trend = [formula without lhs]
           oldformula <- getCall(object)$trend %orifnull% (~1)
           oldformula <- eval(oldformula, callframe)
	   fom <-  newformula(oldformula, fmla, callframe, envir)
	   if(!is.null(Xexpr))
	      lhs.of.formula(fom) <- Xexpr
	   if(is.null(lhs.of.formula(fom))) {
	      # new call has same format
	      thecall$trend <- fom
  	      if(length(jX) > 0)
  	        thecall$X <- X
	   } else {
	      # new call has formula with lhs
	      thecall$trend <- NULL
	      thecall$X <- fom
	   }
         })
  knownnames <- unique(c(names(formals(kppm.ppp)),
                         names(formals(mincontrast)),
                         names(formals(optim))))
  knownnames <- setdiff(knownnames,
                        c("X", "trend",
                          "observed", "theoretical",
                          "fn", "gr", "..."))
  ok <- nama %in% knownnames
  thecall <- replace(thecall, nama[ok], argh[ok])
  thecall$formula <- NULL # artefact of 'step', etc
  thecall[[1L]] <- as.name("kppm")
  if(!evaluate)
    return(thecall)
  out <- eval(thecall, envir=parent.frame(), enclos=envir)
  #' update name of data
  if(length(jX) == 1) {
    mc <- match.call()
    Xlang <- mc[[2L+jX]]
    out$Xname <- short.deparse(Xlang)
  }
  #'
  return(out)
}

updateData.kppm <- function(model, X, ...) {
  update(model, X=X)
}

unitname.kppm <- unitname.dppm <- function(x) {
  return(unitname(x$X))
}

"unitname<-.kppm" <- "unitname<-.dppm" <- function(x, value) {
  unitname(x$X) <- value
  if(!is.null(x$Fit$mcfit)) {
    unitname(x$Fit$mcfit) <- value
  } else if(is.null(x$Fit)) {
    warning("kppm object in outdated format")
    if(!is.null(x$mcfit))
      unitname(x$mcfit) <- value
  }
  return(x)
}

as.fv.kppm <- as.fv.dppm <- function(x) {
  if(x$Fit$method == "mincon") 
    return(as.fv(x$Fit$mcfit))
  gobs <- if(is.stationary(x)) {
            pcf(x$X, correction="good")
          } else {
            pcfinhom(x$X, lambda=x, correction="good", update=FALSE)
          }
  gname <- attr(gobs, "fname")
  gfit <- (pcfmodel(x))(gobs$r)
  g <- bind.fv(gobs,
               data.frame(fit=gfit),
               labl = makefvlabel(fname=gname, sub="fit"),
               desc = "predicted %s for fitted model")
  return(g)
}

coef.kppm <- coef.dppm <- function(object, ...) {
  return(coef(object$po))
}


Kmodel.kppm <- function(model, ...) {
  Kpcf.kppm(model, what="K")
}

pcfmodel.kppm <- function(model, ...) {
  Kpcf.kppm(model, what="pcf")
}

Kpcf.kppm <- function(model, what=c("K", "pcf", "kernel")) {
  what <- match.arg(what)
  # Extract function definition from internal table
  clusters <- model$clusters
  tableentry <- spatstatClusterModelInfo(clusters)
  if(is.null(tableentry))
    stop("No information available for", sQuote(clusters), "cluster model")
  fun <- tableentry[[what]]
  if(is.null(fun))
    stop("No expression available for", what, "for", sQuote(clusters),
         "cluster model")
  # Extract model parameters
  par <- model$par
  # Extract covariance model (if applicable)
  cm <- model$covmodel
  model <- cm$model
  margs <- cm$margs
  #
  f <- function(r) as.numeric(fun(par=par, rvals=r,
                                  model=model, margs=margs))
  return(f)
}

is.stationary.kppm <- is.stationary.dppm <- function(x) {
  return(x$stationary)
}

is.poisson.kppm <- function(x) {
  switch(x$clusters,
         Cauchy=,
         VarGamma=,
         Thomas=,
         MatClust={
           # Poisson cluster process
           mu <- x$mu
           return(!is.null(mu) && (max(mu) == 0))
         },
         LGCP = {
           # log-Gaussian Cox process
           sigma2 <- x$par[["sigma2"]]
           return(sigma2 == 0)
         },
         return(FALSE))
}

# extract ppm component

as.ppm.kppm <- as.ppm.dppm <- function(object) {
  object$po
}

# other methods that pass through to 'ppm'

as.owin.kppm <- as.owin.dppm <- function(W, ..., from=c("points", "covariates"), fatal=TRUE) {
  from <- match.arg(from)
  as.owin(as.ppm(W), ..., from=from, fatal=fatal)
}

domain.kppm <- Window.kppm <- domain.dppm <-
  Window.dppm <- function(X, ..., from=c("points", "covariates")) {
  from <- match.arg(from)
  as.owin(X, from=from)
}

model.images.kppm <-
  model.images.dppm <- function(object, W=as.owin(object), ...) {
  model.images(as.ppm(object), W=W, ...)
}

model.matrix.kppm <-
  model.matrix.dppm <- function(object,
                                data=model.frame(object, na.action=NULL), ...,
                                Q=NULL, 
                                keepNA=TRUE) {
  if(missing(data)) data <- NULL
  model.matrix(as.ppm(object), data=data, ..., Q=Q, keepNA=keepNA)
}

model.frame.kppm <- model.frame.dppm <- function(formula, ...) {
  model.frame(as.ppm(formula), ...)
}

logLik.kppm <- logLik.dppm <- function(object, ...) {
  cl <- object$Fit$maxlogcl
  if(is.null(cl))
    stop(paste("logLik is only available for kppm objects fitted with",
               "method='palm' or method='clik2'"),
         call.=FALSE)
  ll <- logLik(as.ppm(object)) # to inherit class and d.f.
  ll[] <- cl
  return(ll)
}
 
AIC.kppm <- AIC.dppm <- function(object, ..., k=2) {
  cl <- logLik(object)
  df <- attr(cl, "df")
  return(- 2 * as.numeric(cl) + k * df)
}

extractAIC.kppm <- extractAIC.dppm <- function (fit, scale = 0, k = 2, ...) {
  cl <- logLik(fit)
  edf <- attr(cl, "df")
  aic <- - 2 * as.numeric(cl) + k * edf
  return(c(edf, aic))
}

nobs.kppm <- nobs.dppm <- function(object, ...) { nobs(as.ppm(object)) }

psib <- function(object) UseMethod("psib")

psib.kppm <- function(object) {
  clus <- object$clusters
  info <- spatstatClusterModelInfo(clus)
  if(!info$isPCP) {
    warning("The model is not a cluster process")
    return(NA)
  }
  g <- pcfmodel(object)
  p <- 1 - 1/g(0)
  return(p)
}

reach.kppm <- function(x, ..., epsilon) {
  thresh <- if(missing(epsilon)) NULL else epsilon
  if(x$isPCP) 
    return(2 * clusterradius.kppm(x, ..., thresh=thresh))
  ## use pair correlation
  g <- pcfmodel(x)
  ## find upper bound
  if(is.null(thresh)) thresh <- 0.01
  f <- function(r) { g(r) - 1 - thresh }
  scal <- as.list(x$par)$scale %orifnull% 1
  for(a in scal * 2^(0:10)) { if(f(a) < 0) break; }
  if(f(a) > 0) return(Inf)
  ## solve g(r) = 1 + epsilon
  b <- uniroot(f, c(0, a))$root
  return(b)
}

is.poissonclusterprocess <- function(model) {
  UseMethod("is.poissonclusterprocess")
}

is.poissonclusterprocess.default <- function(model) { FALSE }
  
is.poissonclusterprocess.kppm <- function(model) { isTRUE(model$isPCP) }

Try the spatstat.model package in your browser

Any scripts or data that you put into this service are public.

spatstat.model documentation built on May 29, 2024, 2:42 a.m.