content
stringlengths
0
14.9M
filename
stringlengths
44
136
# This is based on AIC in package stats # There are AICc functions in other packages, which don't all work with # 'wiqid' objects. # stats::AIC.default does work with 'wiqid' objects AICc <- function(object, ..., nobs, df) UseMethod("AICc") AICc.default <- function (object, ..., nobs, df) { if(!missing(nobs) && length(nobs) > 1) stop("'nobs' must have a single value.") if (length(list(...))) { lls <- lapply(list(object, ...), logLik) vals <- sapply(lls, function(el) { no <- attr(el, "nobs") c(as.numeric(el), attr(el, "df"), if (is.null(no)) NA_integer_ else no) }) #val <- data.frame(df = vals[2L, ], ll = vals[1L, ]) nos <- na.omit(vals[3L, ]) if (length(nos) && any(nos != nos[1L])) warning("models are not all fitted to the same number of observations") # val <- data.frame(df = val$df, AIC = -2 * val$ll + k * val$df) if(missing(df)) { df <- vals[2L, ] } else { if(length(df) == 1) df <- rep(df, ncol(vals)) if(any(is.na(df))) df[is.na(df)] <- vals[2L, is.na(df)] } if(missing(nobs)) nobs <- vals[3L, ] val <- data.frame(df = df, AICc = -2 * vals[1L, ] + 2 * df * nobs / pmax(0, nobs - df - 1)) Call <- match.call() Call$nobs <- NULL Call$df <- NULL row.names(val) <- as.character(Call[-1L]) } else { lls <- logLik(object) if(missing(df)) df <- attr(lls, "df") if(missing(nobs)) nobs <- attr(lls, "nobs") if(is.null(nobs) || is.na(nobs)) { val <- NA_real_ } else { val <- -2 * as.numeric(lls) + 2 * df * nobs / max(0, nobs - df - 1) } } return(val) } # .......................................................................... # Creates a table from output of AIC or AICc # Exported AICtable <- function(x, digits=3, sort) { if(is.vector(x)) { name <- deparse(substitute(x)) IC <- x x <- data.frame(x) colnames(x) <- name } else if (is.data.frame(x) && ncol(x) > 1 ) { IC <- x[, 2] } else { stop("x must be a vector or a data frame with > 1 column") } Delta <- IC - min(IC, na.rm=TRUE) ModelLik <- exp( - Delta / 2) ModelWt <- ModelLik / sum(ModelLik, na.rm=TRUE) out <- round(cbind(x, Delta, ModelLik, ModelWt), digits) if(missing(sort)) sort <- !is.null(rownames(out)) # sort if rows are named if(sort) { ord <- order(IC) out <- out[ord, ] } return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/AICc.R
# Function to generate formulae for all combinations of covariates. allCombinations <- function(response="", covars, formulae=TRUE) { # response : a character vector of length 1 specifying the response variable # covars : a character vector specifying the covariates/predictors # formulae : if TRUE, only the formulae are returned; otherwise a TRUE/FALSE matrix # is returned, with the formulae as row names. if(!is.character(response) || !is.character(covars)) stop("'response' and 'covars' must be character vectors") if(length(response) > 1) stop("Only one response variable possible.") if(any(response == covars)) stop("The response cannot also be a covariate.") covars <- unique(covars) ncovs <- length(covars) tfmat <- matrix(FALSE, 2^ncovs, ncovs) for(i in 1:ncovs) tfmat[, i] <- rep(c(FALSE, TRUE), each=2^(i-1)) # Sort (should this be optional?) if(ncovs > 1) tfmat <- tfmat[order(rowSums(tfmat)), ] RHS <- apply(tfmat, 1, function(x) paste(covars[x], collapse=" + ")) RHS[1] <- "1" forms <- paste(response, RHS, sep=" ~ ") if(formulae) { return(forms) } else { colnames(tfmat) <- covars rownames(tfmat) <- forms return(tfmat) } } if(FALSE) { longNames <- colnames(swiss) longNames # Eeek! All too long. names(swiss) <- abbreviate(longNames) vars <- colnames(swiss) vars # ok # Get the formulae for all combinations of covars: formulae <- allCombinations(vars[1], vars[-1]) formulae class(formulae) # actually character, but coerced to formula as needed. # Run all the models with 'lm', put results into a list: # lms <- lapply(formulae, lm, data=swiss) # This works, but the call is a mess! lms <- vector('list', 32) for(i in 1:32) lms[[i]] <- lm(formulae[i], data=swiss) names(lms) <- formulae # uses fact that 'formulae' is actually character. # Extract AICs and look at top model: AICs <- sapply(lms, AIC) sort(AICs) lms[[which.min(AICs)]] # Do a nice table of results: DeltaAIC <- AICs - min(AICs) AICllh <- exp(-DeltaAIC/2) AICwt <- AICllh / sum(AICllh) order <- order(AICs) round(cbind(AIC=AICs, DeltaAIC, AICllh, AICwt)[order, ], 3) # Get AIC weights for each of the covars: is.in <- allCombinations(vars[1], vars[-1], form=FALSE) is.in # shows which covars are in each model covarWts <- AICwt %*% is.in round(sort(covarWts[1, ], dec=TRUE), 3) # the [1, ] is needed because %*% returns a 1-row matrix; 'sort' will coerce # that to a vector but strips out the names in the process. }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/AllCombinations.R
# Simulated draws from posterior of a binomial likelihood with beta prior # ======================================================================= # The beta prior is specified by mode and concentration. # Bbinom is retained for backward compatibility but will shortly be deprecated. Bbinomial <- function(y, n, priors=NULL, draws=100000, ...) { if(!is.null(list(...)$sample)) { message("*The 'sample' argument is deprecated, please use 'draws'.*") draws <- list(...)$sample } if(!is.null(priors$conc) && priors$conc < 2) stop("priors$conc must not be less than 2.") if(!is.null(priors$mode) && (priors$mode < 0 || priors$mode > 1 )) stop("priors$mode must be between 0 and 1.") if(y > n) stop("Number of successes (y) cannot be greater than the number of trials (n).") if(!is.null(priors$conc) && !is.null(priors$mode)) { pr1 <- priors$mode * (priors$conc - 2) + 1 pr2 <- (1 - priors$mode) * (priors$conc - 2) + 1 } else { pr1 <- pr2 <- 1 } po1 <- pr1 + y po2 <- pr2 + n - y post <- rbeta(draws, po1, po2) out <- mcmcOutput(data.frame(p = post), header = "Values drawn from beta posterior distribution") attr(out, "call") <- match.call() return(out) } # .............................................................. Bbinom <- function(...) { warning("[Bbinom] is deprecated; please use [Bbinomial] instead.", call. = FALSE) Bbinomial(...) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bbinomial.R
# Wrappers for dbeta, pbeta, etc which use mean and sd OR mode and concentration. # Using mean and sd # ================= getBeta2Par <- function(mean, sd) { if(any(mean <= 0 | mean >= 1)) stop("'mean' must be between 0 and 1.") if(any(sd <= 0)) stop("'sd' must be greater than 0.") nu <- mean * (1-mean) / sd^2 - 1 if(any(nu <= 0)) { warning("sd is too large; some shape parameters will be NA.", call.=FALSE) nu[nu <= 0] <- NA } alpha <- mean * nu beta <- (1-mean) * nu cbind(shape1=alpha, shape2=beta) } dbeta2 <- function(x, mean, sd) { shapes <- getBeta2Par(mean,sd) return(dbeta(x, shapes[,1], shapes[,2])) } pbeta2 <- function(q, mean, sd, lower.tail=TRUE, log.p=FALSE) { shapes <- getBeta2Par(mean,sd) return(pbeta(q, shapes[,1], shapes[,2], lower.tail=lower.tail, log.p=log.p)) } qbeta2 <- function(p, mean, sd, lower.tail=TRUE, log.p=FALSE) { shapes <- getBeta2Par(mean,sd) return(qbeta(p, shapes[,1], shapes[,2], lower.tail=lower.tail, log.p=log.p)) } rbeta2 <- function(n, mean, sd) { shapes <- getBeta2Par(mean,sd) return(rbeta(n, shapes[,1], shapes[,2])) } # Using mode and concentration # ============================ getBeta3Par <- function(mode, concentration) { if(any(mode < 0 | mode > 1)) stop("'mode' must be between 0 and 1.") if(any(concentration < 2)) stop("'concentration' must be 2 or more.") alpha <- mode * (concentration - 2) + 1 beta <- (1 - mode) * (concentration - 2) + 1 cbind(shape1=alpha, shape2=beta) } dbeta3 <- function(x, mode, concentration) { shapes <- getBeta3Par(mode, concentration) return(dbeta(x, shapes[,1], shapes[,2])) } pbeta3 <- function(q, mode, concentration, lower.tail=TRUE, log.p=FALSE) { shapes <- getBeta3Par(mode, concentration) return(pbeta(q, shapes[,1], shapes[,2], lower.tail=lower.tail, log.p=log.p)) } qbeta3 <- function(p, mode, concentration, lower.tail=TRUE, log.p=FALSE) { shapes <- getBeta3Par(mode, concentration) return(qbeta(p, shapes[,1], shapes[,2], lower.tail=lower.tail, log.p=log.p)) } rbeta3 <- function(n, mode, concentration) { shapes <- getBeta3Par(mode, concentration) return(rbeta(n, shapes[,1], shapes[,2])) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/BetaDist.R
# Bayesian modelling of normal distribution with Gibbs sampler # ============================================================ # This version allows for a gamma prior for the precision, tau, # though the default is an improper flat prior. # The gamma prior is specified by shape and rate. Bnormal <- function(y, priors=NULL, chains=3, draws=10000, burnin=100, ...) { if(!is.null(list(...)$sample)) { message("* The 'sample' argument is deprecated, please use 'draws'. *") draws <- list(...)$sample } startTime <- Sys.time() # Data summaries if(!is.numeric(y)) stop("'y' must be a numeric vector.") n <- length(y) if(n < 2) stop("'y' must contain at least 2 values") y.bar <- mean(y) m0 <- t0 <- a <- b <- 0 if(!is.null(priors$muMean) && !is.null(priors$muSD)) { m0 <- priors$muMean t0 <- 1/(priors$muSD)^2 if(y.bar > priors$muMean + priors$muSD || y.bar < priors$muMean - priors$muSD) warning("Sample mean is outside the prior range mMean \u00B1 sMean.") } if(!is.null(priors$tauShape) && !is.null(priors$tauRate)) { a <- priors$tauShape b <- priors$tauRate } aBit <- a + n / 2 # This doesn't change # Objects to hold results n.iter <- draws + burnin chainList <- vector('list', chains) chain <- matrix(nrow=n.iter, ncol=2) # will hold output colnames(chain) <- c("mu", "sigma") for(ch in 1:chains) { tau <- 1 # starting values for (t in 1:n.iter){ # Draw mu from conjugate posterior with known sigma v <- 1 / (tau * n + t0) m <- v * (tau * sum(y) + t0 * m0) mu <- rnorm(1, m, sqrt(v)) # Draw tau from conjugate posterior with known mean tau <- rgamma(1, aBit, b + sum((y - mu)^2)/2) chain[t, ] <- c(mu, sqrt(1/tau)) } chainList[[ch]] <- chain[(burnin+1):n.iter, ] } MCMC <- do.call(rbind, chainList) out <- mcmcOutput(MCMC, header = "Model fitted in R with a Gibbs sampler") attr(out, "call") <- match.call() attr(out, "nChains") <- chains attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bnormal.R
# Bayesian modelling of normal distribution with JAGS # =================================================== # This version allows for a gamma prior for the scale, sigma, # though the default is a uniform prior over a wide range. # The gamma prior is specified by mode and SD. # Similar to the one-sample version of BEST::BESTmcmc # but with normal instead of t-distribution. Bnormal2 <- function(y, priors=NULL, chains=3, draws=3e4, burnin=0, thin=1, adapt=1000, doPriorsOnly=FALSE, parallel=NULL, seed=NULL, ...) { if(!is.null(list(...)$sample)) { message("* The 'sample' argument is deprecated, please use 'draws'. *") draws <- list(...)$sample } startTime <- Sys.time() if(doPriorsOnly) warning("The output shows the prior distributions, NOT the posterior distributions for your data.") # Data checks if(!all(is.finite(y))) stop("The input data include NA or Inf.") if(length(unique(y)) < 2 && # sd(y) will be 0 or NA; ok if priors specified. (is.null(priors) || is.null(priors$muSD) || is.null(priors$sigmaMode) || is.null(priors$sigmaSD))) stop("If priors are not specified, data must include at least 2 (non-equal) values.") # Prior checks: if(!is.null(priors)) { if(!is.list(priors)) { stop("'priors' must be a list (or NULL).") } nameOK <- names(priors) %in% c("muMean", "muSD", "sigmaMode", "sigmaSD") if(!all(nameOK)) stop("Invalid items in prior specification: ", paste(sQuote(names(priors)[!nameOK]), collapse=", ")) if(!all(sapply(priors, is.numeric))) stop("All items in 'priors' must be numeric.") if(!is.null(priors$muSD) && priors$muSD <= 0) stop("muSD must be > 0") } # THE PRIORS if(is.null(priors$muMean)) priors$muMean <- mean(y) if(is.null(priors$muSD)) priors$muSD <- sd(y) * 1000 if(mean(y) > priors$muMean + priors$muSD || mean(y) < priors$muMean - priors$muSD) warning("Sample mean is outside the prior range mMean \u00B1 sMean.") priors$muP <- 1/priors$muSD^2 # convert to precision priors$muSD <- NULL # and remove SD useUniformPrior <- is.null(priors$sigmaMode) || is.null(priors$sigmaSD) if(useUniformPrior) { priors$sigmaLo <- sd(y) / 1000 priors$sigmaHi <- sd(y) * 1000 } else { # Convert to Shape/Rate rate <- (priors$sigmaMode + sqrt(priors$sigmaMode^2 + 4 * priors$sigmaSD^2)) / (2 * priors$sigmaSD^2) shape <- 1 + priors$sigmaMode * rate priors$Sh <- shape priors$Ra <- rate } priors$sigmaMode <- NULL priors$sigmaSD <- NULL # THE MODEL. modelFile <- file.path(tempdir(), "BESTmodel.txt") if(useUniformPrior) { modelString = " model { for ( i in 1:Ntotal ) { y[i] ~ dnorm(mu, tau) } mu ~ dnorm(muMean, muP) tau <- pow(sigma, -2) sigma ~ dunif(sigmaLo, sigmaHi) } " # close quote for modelString } else { # use gamma priors modelString = " model { for ( i in 1:Ntotal ) { y[i] ~ dnorm(mu, tau) } mu ~ dnorm(muMean, muP) tau <- pow(sigma, -2) sigma ~ dgamma(Sh, Ra) } " # close quote for modelString } # Write out modelString to a text file writeLines( modelString , con=modelFile ) # THE DATA. # add priors and data to dataForJAGS: dataForJAGS <- priors if(!doPriorsOnly) dataForJAGS$y <- y dataForJAGS$Ntotal <- length(y) # INTIALIZE THE CHAINS. # Initial values of MCMC chains based on data: inits <- function() list(mu=mean(y), sigma=sd(y)) # RUN THE CHAINS codaSamples <- justRunJags(dataForJAGS, inits, c("mu", "sigma"), modelFile, chains, draws, burnin, thin, adapt, modules = c("glm"), parallel = parallel, seed=seed) out <- mcmcOutput(codaSamples, header = "Model fitted in JAGS with 'rjags' functions") attr(out, "call") <- match.call() attr(out, "doPriorsOnly") <- doPriorsOnly if(!is.null(priors)) attr(out, "priors") <- priors attr(out, "nChains") <- chains attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bnormal_2.R
# Single season occupancy with site and survey covariates. # Bayesian version, using Dorazio & Rodriguez (2011) algorithm BoccSS <- function(DH, model=NULL, data=NULL, priors=list(), chains=3, draws=30000, burnin=1000, thin=1, parallel, seed=NULL, doWAIC=FALSE, ...) { # single-season occupancy models with site and survey covariates # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** model is a list of 2-sided formulae for psi and p; can also be a single # 2-sided formula, eg, model = psi ~ habitat. # ** data is a DATA FRAME with single columns for site covariates and a column for each survey occasion for each survey covariate. # ** priors is a list with elements for prior mean and variance for coefficients. startTime <- Sys.time() if(!is.null(list(...)$sample)) { message("* The 'sample' argument is deprecated, please use 'draws'. *") draws <- list(...)$sample } # Check DH: tst <- try(range(DH, na.rm=TRUE), silent=TRUE) if(inherits(tst, "try-error") || tst[1] < 0 || tst[2] > 1) stop("DH is not a valid detection history matrix (or data frame).") # Deal with parallel (order of the if statements is important!) if(chains == 1) parallel <- FALSE if(missing(parallel)) parallel <- chains < detectCores() if(parallel) { coresToUse <- min(chains, detectCores() - 1) if(coresToUse < 2) { warning("Multiple cores not available; running chains sequentially.") parallel <- FALSE } } if(parallel) { if(chains > coresToUse) warning(paste("Running", chains, "chains on", coresToUse, "cores.")) cl <- makeCluster(coresToUse) on.exit(stopCluster(cl)) } # Standardise the model: model <- stdModel(model, list(psi=~1, p=~1)) # Summarize detection history site.names <- rownames(DH) DH <- as.matrix(DH) nSites <- nrow(DH) nSurv <- ncol(DH) if (nSurv < 2) stop("More than one survey occasion is needed") if(is.null(site.names)) site.names <- 1:nSites # Convert the covariate data frame into a list dataList <- stddata(data, nSurv, scaleBy = 1) time <- rep(1:nSurv, each=nSites) dataList$.Time <- standardize(time) dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 dataList$.time <- as.factor(time) before <- cbind(0L, DH[, 1:(nSurv - 1)]) # 1 if species seen on previous occasion dataList$.b <- as.vector(before) survey.done <- !is.na(as.vector(DH)) DHvec <- as.vector(DH)[survey.done] siteID <- row(DH)[survey.done] # need both numeric vector siteIDfac <- as.factor(siteID) # and factor (for tapply) # survID <- col(DH)[survey.done] psiDf <- selectCovars(model$psi, dataList, nSites) if (nrow(psiDf) != nSites) stop("Number of site covars doesn't match sites.\nAre you using survey covars?") psiModMat <- modelMatrix(model$psi, psiDf) if(nrow(psiModMat) != nrow(psiDf)) stop("Missing site covariates are not allowed.") psiK <- ncol(psiModMat) pDf0 <- selectCovars(model$p, dataList, nSites*nSurv) pDf <- pDf0[survey.done, , drop=FALSE] pModMat <- modelMatrix(model$p, pDf) if(nrow(pModMat) != nrow(pDf)) stop("Missing survey covariates are not allowed when a survey was done.") pK <- ncol(pModMat) K <- psiK + pK # Organise and check priors if(!is.null(priors)) { priorErrorFlag <- FALSE priorsDefault <- list(muPsi=0, sigmaPsi=1, muP=0, sigmaP=1) priors <- replace (priorsDefault, names(priors), priors) ### TODO ### check for NAs and sigma <= 0 muPsi <- priors$muPsi if(length(muPsi) == 1) muPsi <- rep(muPsi, psiK) if(length(muPsi) != psiK) { message("Wrong length for priors$muPsi, should have values for:") message(paste(colnames(psiModMat), collapse=" ")) priorErrorFlag <- TRUE } sigmaPsi <- priors$sigmaPsi if(!is.matrix(sigmaPsi)) { if(length(sigmaPsi) == 1) sigmaPsi <- rep(sigmaPsi, psiK) sigmaPsi <- diag(sigmaPsi, nrow=psiK) } if(ncol(sigmaPsi) != psiK || nrow(sigmaPsi) != psiK) { message("Wrong dimensions for priors$sigmaPsi, should have values for:") message(paste(colnames(psiModMat), collapse=" ")) priorErrorFlag <- TRUE } muP <- priors$muP if(length(muP) == 1) muP <- rep(muP, pK) if(length(muP) != pK) { message("Wrong length for priors$muP, should have values for:") message(paste(colnames(pModMat), collapse=" ")) priorErrorFlag <- TRUE } sigmaP <- priors$sigmaP if(!is.matrix(sigmaP)) { if(length(sigmaP) == 1) sigmaP <- rep(sigmaP, pK) sigmaP <- diag(sigmaP, nrow=pK) } if(ncol(sigmaP) != pK || nrow(sigmaP) != pK) { message("Wrong dimensions for priors$sigmaP, should have values for:") message(paste(colnames(pModMat), collapse=" ")) priorErrorFlag <- TRUE } if(priorErrorFlag) stop("Invalid prior specification") } # Run MLE to get starting values # Negative log likelihood function nll <- function(param){ psiBeta <- param[1:psiK] pBeta <- param[(psiK+1):K] psiProb <- as.vector(pnorm(psiModMat %*% psiBeta)) pProb <- pnorm(pModMat %*% pBeta) Lik1 <- DHvec*pProb + (1-DHvec) * (1-pProb) Lik2 <- tapply(Lik1, siteIDfac, prod) llh <- sum(log(psiProb * Lik2 + (1 - psiProb) * (rowSums(DH, na.rm=TRUE) == 0))) return(min(-llh, .Machine$double.xmax)) } # Run mle estimation with nlm: param <- rep(0, K) mle <- nlm(nll, param)$estimate # Gibbs sampler variables XprimeX <- t(psiModMat) %*% psiModMat if(is.null(priors)) { V.beta <- chol2inv(chol(XprimeX)) ScaledMuPsi <- 0 SigmaInvP <- 0 ScaledMuP <- 0 } else { SigmaInvPsi = chol2inv(chol(sigmaPsi)) V.beta = chol2inv(chol(SigmaInvPsi + XprimeX)) # prior here ScaledMuPsi = SigmaInvPsi %*% muPsi SigmaInvP = chol2inv(chol(sigmaP)) ScaledMuP = SigmaInvP %*% muP } # Starting values - use MLEs set.seed(seed) starters <- vector('list', chains) for(i in 1:chains) starters[[i]] <- mle * runif(K, 0.95, 1.05) ##### y <- rowSums(DH, na.rm=TRUE) z <- as.integer(y > 0) ## (Starting value for) occupancy state n.iter <- ceiling(draws / chains) * thin + burnin message("Starting MCMC run for ", chains, " chains with ", n.iter, " iterations.") flush.console() # Function to do 1 chain run1chain <- function(start) { beta <- matrix(start[1:psiK], ncol=1) # why matrix? alpha <- matrix(start[-(1:psiK)], ncol=1) if(doWAIC) { chain <- matrix(nrow=n.iter, ncol=K+nSites) # to hold MCMC chains colnames(chain) <- c(paste("psi", colnames(psiModMat), sep="_"), paste("p", colnames(pModMat), sep="_"), site.names) } else { chain <- matrix(nrow=n.iter, ncol=K) # to hold MCMC chains colnames(chain) <- c(paste("psi", colnames(psiModMat), sep="_"), paste("p", colnames(pModMat), sep="_")) } v <- rep(NA, psiK) for (draw in 1:n.iter) { # draw z for sites with y = 0, using conditional (on data) prob of occupancy psi <- as.vector(pnorm(psiModMat %*% beta)) q <- 1 - as.vector(pnorm(pModMat %*% alpha)) pMissed <- tapply(q, siteIDfac, prod) z.prob <- pmin(psi * pMissed / (psi * pMissed + 1 - psi), 1) # if psi close to 1, z.prob comes out to > 1. z <- ifelse(y, 1, rbinom(length(y), size=1, prob=z.prob)) present <- z == 1 # draw v and beta ## coefficients for psi vmean <- as.vector(psiModMat %*% beta) # same as 'psi' above v[present] <- truncnorm::rtruncnorm(sum(present), a=0, mean=vmean[present], sd=1) if(!all(present)) v[!present] <- truncnorm::rtruncnorm(sum(!present), b=0, mean=vmean[!present], sd=1) betaMean = V.beta %*% (ScaledMuPsi + (t(psiModMat) %*% v) ) # prior here beta <- matrix(MASS::mvrnorm(1, betaMean, V.beta), ncol=1) # draw u and alpha ## coefficients for p ## This section ONLY needs data from occupied sites, ie, z == 1, present == TRUE needed <- present[siteID] DHneeded <- DHvec[needed] Wmat <- pModMat[needed, , drop=FALSE] umean <- as.vector(Wmat %*% alpha) u <- rep(NA, nrow(Wmat)) ind.y <- DHneeded == 1 u[ind.y] <- truncnorm::rtruncnorm(sum(ind.y), a=0, mean=umean[ind.y], sd=1) u[!ind.y] <- truncnorm::rtruncnorm(sum(!ind.y), b=0, mean=umean[!ind.y], sd=1) WprimeW <- t(Wmat) %*% Wmat V.alpha = chol2inv(chol(SigmaInvP + WprimeW)) # prior here alphaMean = V.alpha %*% (ScaledMuP + (t(Wmat) %*% u) ) # prior here alpha <- matrix(MASS::mvrnorm(1, alphaMean, V.alpha), ncol=1) # Calculate ppd for each site if(doWAIC) { p.vec <- as.vector(pnorm(pModMat %*% alpha)) lik1 <- dbinom(DHvec, 1, p.vec) lik2 <- tapply(lik1, siteIDfac, prod) ppd <- psi * lik2 + (1 - psi) * (y == 0) chain[draw, ] <- c(beta, alpha, ppd) } else { chain[draw, ] <- c(beta, alpha) } } return(coda::mcmc(chain[(burnin+1):n.iter, ], thin=thin)) } if(parallel) { clusterExport(cl, c("K", "psiK", "n.iter", "burnin", "thin", "psiModMat", "pModMat", "siteID", "siteIDfac", "y", "DHvec", "ScaledMuPsi", "V.beta", "SigmaInvP", "ScaledMuP", "doWAIC"), envir=environment()) clusterSetRNGStream(cl, seed) chainList <- parLapply(cl, starters, run1chain) } else { chainList <- lapply(starters, run1chain) } message("MCMC run complete.") MCMC <- mcmc.list(chainList) if(doWAIC) { ppd <- as.matrix(MCMC[, -(1:K)]) MCMC <- MCMC[, 1:K] lppd <- log(ppd) tmp.sum <- -2 * sum(log(colMeans(ppd))) # first term of eqn 45 pD <- sum(apply(lppd, 2, var)) # eqn 44 WAIC <- tmp.sum + 2 * pD } out <- mcmcOutput(MCMC, header = "Model fitted in R with a Gibbs sampler") attr(out, "call") <- match.call() if(doWAIC) attr(out, "WAIC") <- c(WAIC = WAIC, pD = pD) attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/BoccSS.R
# Bayes version of single-season occupancy model with no covariates # This version uses a Gibbs sampler coded in R BoccSS0 <- function(y, n, psiPrior=c(1,1), pPrior=c(1,1), chains=3, draws=30000, burnin=100, ...) { startTime <- Sys.time() if(!is.null(list(...)$sample)) { message("* The 'sample' argument is deprecated, please use 'draws'. *") draws <- list(...)$sample } if(!is.null(dim(y)) && dim(y)[2] > 1) { # detection history n <- rowSums(!is.na(y)) y <- rowSums(y, na.rm=TRUE) } nSites <- length(y) if(length(n) == 1) n <- rep(n, nSites) if(length(n) != length(y)) stop("Lengths of 'y' and 'n' must be equal.") if(any(y > n)) stop("No value of 'y' can be greater than the corresponding 'n'.") known <- y > 0 # sites known to be occupied detected <- sum(y) n.iter <- ceiling(draws / chains) + burnin chain <- matrix(nrow=n.iter, ncol=2) colnames(chain) <- c("psi", "p") chain[1, ] <- rep(0.5, 2) chainList <- vector('list', chains) for(ch in 1:chains) { for(i in 2:n.iter) { # 1. Calculate prob(occupied | y = 0), draw new z vector psi.y0 <- (chain[i-1,1] * (1 - chain[i-1,2])^n) / (chain[i-1,1] * (1 - chain[i-1,2])^n + (1 - chain[i-1,1])) z <- ifelse(known, 1, rbinom(nSites, 1, psi.y0)) # 2. Update psi from beta(occupied+1, unoccupied+1) chain[i,1] <- rbeta(1, sum(z) + psiPrior[1], sum(z == 0) + psiPrior[2]) # 3. Update p from beta(detected+1, undetected+1) for occupied sites only. chain[i,2] <- rbeta(1, detected + pPrior[1], sum(n[z == 1]) - detected + pPrior[2]) } chainList[[ch]] <- mcmc(chain[(burnin+1):n.iter, ]) } # Diagnostics MCMC <- mcmc.list(chainList) out <- mcmcOutput(MCMC, header = "Model fitted in R with a Gibbs sampler") attr(out, "call") <- match.call() attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/BoccSS0.R
# Simulated draws from posterior of a Poisson likelihood with gamma prior # ======================================================================= # The gamma prior is specified by mode and SD. # y is total count, n is the sample size, eg, y=10 ticks on n=6 rats. Bpoisson <- function(y, n, priors=NULL, draws=10000, ...) { if(!is.null(list(...)$sample)) { message("*The 'sample' argument is deprecated, please use 'draws'.*") draws <- list(...)$sample } if(!is.null(priors$mode) && priors$mode <= 0) stop("priors$mode must be greater than 0.") if(!is.null(priors$sd) && (priors$sd <= 0 )) stop("priors$sd must be greater than 0.") if(is.null(priors$mode) || is.null(priors$sd)) { pr_shape <- 1 pr_rate <- 0 } else { pr_rate <- with(priors, mode + sqrt(mode^2 + 4 * sd^2) / (2 * sd^2) ) pr_shape <- 1 + priors$mode * pr_rate } po_shape <- pr_shape + y po_rate <- pr_rate + n post <- rgamma(draws, po_shape, po_rate) out <- mcmcOutput(data.frame(lambda = post), header = "Values drawn from gamma posterior distribution") attr(out, "call") <- match.call() return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bpoisson.R
# Bayesian version of secr to work with stoats data Bsecr0 <- function(capthist, buffer = 100, start=NULL, nAug = NA, maxSig = 2*buffer, chains=3, draws=1e4, burnin=0, thin=1, adapt=1000, priorOnly=FALSE, parallel=NULL, seed=NULL, ...) { if(!is.null(list(...)$sample)) { message("*The 'sample' argument is deprecated, please use 'draws'.*") draws <- list(...)$sample } if(!inherits(capthist, "capthist")) stop("'capthist' is not a valid capthist object.") if(length(dim(capthist)) < 3) stop("'capthist' object not compatible with secr v.3, please upgrade.") startTime <- Sys.time() if (priorOnly) warning("The prior distributions will be produced, not the posterior distributions!") traps <- secr::traps(capthist) J <- nrow(traps) xl <- min(traps$x) - buffer xu <- max(traps$x) + buffer yl <- min(traps$y) - buffer yu <- max(traps$y) + buffer A <- (xu-xl)*(yu-yl) / 1e4 # ha # Get starting values, etc from secr.fit if(!is.null(start) && inherits(start, "secr")) { mle.res <- predict(start)[, 2] } else { mask <- secr::make.mask(traps, buffer) mle.res <- unlist(secr::autoini(capthist, mask)) } if(is.na(nAug)) nAug <- ceiling(3 * mle.res[1] * A) psistart <- (mle.res[1] * A) / nAug # Convert capture histories into an Animals x Traps matrix nInd <- dim(capthist)[1] nOcc <- dim(capthist)[2] yMat <- matrix(0, nAug, J) yMat[1:nInd,] <- apply(capthist, c(1,3), sum) # Get initial locations of animals SX <- SY <- numeric(nInd) for(i in 1:nInd) { where <- colMeans(traps[which(yMat[i, ] > 0), , drop=FALSE]) SX[i] <- where[1] SY[i] <- where[2] } # Define the model modelFile <- file.path(tempdir(), "JAGSmodel.txt") modeltext <- " model { sigma ~ dunif(0, maxSig) # need to set good max sigma2 <- 2*sigma^2 g0 ~ dunif(0, 1) psi ~ dunif(0, 1) for (i in 1:M){ #loop through the augmented population z[i] ~ dbern(psi) #state of individual i (real or imaginary) SX[i] ~ dunif(xl, xu) #priors for the activity centre for each individual SY[i] ~ dunif(yl, yu) # xl, yl = lower coordinate; xu, yu = upper value for(j in 1:J) { #loop through the J detector locations Dsq[i,j] <- pow(SX[i]-trapmat[j,1], 2) + pow(SY[i]-trapmat[j,2],2) g[i,j] <- z[i] * g0 * exp(-Dsq[i,j]/sigma2) y[i,j] ~ dbin(g[i,j], K) } } N <- sum(z[1:M]) # derive number (check against M) D <- N / A # derive density } " writeLines(modeltext, con=modelFile) # organise the data: jagsData <- list(M = nAug, xl=xl, xu=xu, yl=yl, yu=yu, J=J, trapmat=as.matrix(traps), K=nOcc, A = A, maxSig = maxSig) if (!priorOnly) jagsData$y <- yMat inits <- function() {list(z=rep(1, nAug), SX=c(SX, runif(nAug-nInd, xl, xu)), SY=c(SY, runif(nAug-nInd, yl, yu)), sigma=mle.res[3], g0=mle.res[2], psi=psistart)} wanted <- c("D", "g0", "sigma", "SX", "SY", "z") # Run the model: resB <- justRunJags(jagsData, inits, wanted, modelFile, chains, draws, burnin, thin, adapt, modules = c("glm"), parallel = parallel, seed=seed) resMat <- as.matrix(resB) forB <- colnames(resMat) == "D" | colnames(resMat) == "g0" | colnames(resMat) == "sigma" AC0 <- resMat[, !forB][, 1:(nAug*2)] dim(AC0) <- c(dim(AC0)[1], nAug, 2) w <- resMat[, !forB][, nAug*2 + 1:nAug] w[w==0] <- NA AC <- sweep(AC0, 1:2, w, "*") animalIDs <- sprintf("id%03d", 1:nAug) aid <- dimnames(capthist)[[1]] if(!is.null(aid)) animalIDs[1:length(aid)] <- aid dimnames(AC) <- list(NULL, animalIDs, c("x", "y")) out <- mcmcOutput(resMat[, forB], nChains=chains, header = "Model fitted in JAGS with 'rjags' functions") attr(out, "ACs") <- AC attr(out, "traps") <- secr::traps(capthist) attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) attr(out, "call") <- match.call() # check augmentation if(ceiling(max(out$D) * A) >= nAug) warning(paste("Augmentation may not be adequate; rerun with nAug >>", nAug)) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bsecr.R
# Bayesian version of CJS models # This uses rjags via the local justRunJags function BsurvCJS <- function(DH, model=list(phi~1, p~1), data=NULL, freq=1, priors=NULL, chains=3, draws=1e4, burnin=1000, thin=1, adapt=1000, parallel = NULL, seed=NULL, priorOnly=FALSE, ...) { # phi(t) p(t) model or models with time covariates for Cormack-Joly-Seber # estimation of apparent survival. # ** DH is detection history matrix/data frame, animals x occasions. # ** freq is vector of frequencies for each detection history # ** model is a list of 2-sided formulae for psi and p; can also be a single # 2-sided formula, eg, model = psi ~ habitat. # ** data a data frame with the covariates. # ** ci is required confidence interval. startTime <- Sys.time() if(!is.null(list(...)$sample)) { message("*The 'sample' argument is deprecated, please use 'draws'.*") draws <- list(...)$sample } # Sanity checks: if (priorOnly) warning("The prior distributions will be produced, not the posterior distributions!") ni <- ncol(DH) - 1 # number of survival intervals and REcapture occasions if(!is.null(data) && nrow(data) != ni) stop("'DH' and 'data' must have the same number of rows.") # Convert detection history to m-array to facilitate use of multinomial likelihood mArray <- ch2mArray(CH=DH, freq=freq) # Standardise the model: model <- stdModel(model, defaultModel=list(phi=~1, p=~1)) # Standardize the data dataList <- stddata(data, NULL) dataList$.Time <- standardize(1:ni) dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 dataList$.time <- as.factor(1:ni) # Set up model matrices phiDf <- selectCovars(model$phi, dataList, ni) phiMat <- modelMatrix(model$phi, phiDf) phiK <- ncol(phiMat) pDf <- selectCovars(model$p, dataList, ni) pMat <- modelMatrix(model$p, pDf) pK <- ncol(pMat) K <- phiK + pK if(nrow(phiMat) != ni || nrow(pMat) != ni) stop("Missing values not allowed in covariates.") # Deal with priors: defaultPriors <- list(muPhi = rep(0, phiK), sigmaPhi = rep(1, phiK), muP = rep(0, pK), sigmaP = rep(1, pK)) priors <- checkPriors(priors, defaultPriors) # Run MLE version to get starting values nll <- function(param){ phiBeta <- param[1:phiK] pBeta <- param[(phiK+1):K] log_phi <- plogis(phiMat %*% phiBeta, log.p=TRUE) link_p <- pMat %*% pBeta log_p <- plogis(link_p, log.p=TRUE) log_1mp <- plogis( -link_p, log.p=TRUE) # Output the negative log(likelihood) value: nll <- -sum(mArray * log_qArray(log_phi, log_p, log_1mp)) return(min(nll, .Machine$double.xmax)) } # Run mle estimation with nlm: param <- rep(0, K) res <- nlm(nll, param) # if(res$code > 2) # exit code 1 or 2 is ok. # stop("MLE estimation failed.") start <- res$estimate # Do the model: modelFile <- file.path(tempdir(), "JAGSmodel.txt") modeltext <- " model{ # priors for beta parameters for(i in 1:phiK) { phiBeta[i] ~ dnorm(muPhi[i], tauPhi[i]) } for(i in 1:pK) { pBeta[i] ~ dnorm(muP[i], tauP[i]) } # Calculate p and phi for(t in 1:(nocc-1)) { probit(phi[t]) <- sum(phiBeta[] * phiMat[t, ]) probit(p[t]) <- sum(pBeta[] * pMat[t, ]) } # Multinomial likelihood for(t in 1:(nocc-1)) { marr[t, 1:nocc] ~ dmulti(pr[t, ], rel[t]) } # Cell probs of the m-array for(t in 1:(nocc-1)) { q[t] <- 1 - p[t] # prob of non-recapture # main diagonal pr[t, t] <- phi[t] * p[t] # above main diagonal for(j in (t+1):(nocc-1)) { pr[t, j] <- prod(phi[t:j]) * prod(q[t:(j-1)]) * p[j] } # below main diagonal for(j in 1:(t-1)) { pr[t, j] <- 0 } } # last column, prob of never recaptured for(t in 1:(nocc-1)) { pr[t, nocc] <- 1 - sum(pr[t, 1:(nocc-1)]) } } " writeLines(modeltext, con=modelFile) # organise the data: jagsData <- list(nocc = ncol(mArray), rel=rowSums(mArray), pK = pK, phiK = phiK, pMat=pMat, phiMat=phiMat, muPhi = priors$muPhi, tauPhi = 1/(priors$sigmaPhi)^2, muP = priors$muP, tauP = 1/(priors$sigmaP)^2) if(!priorOnly) jagsData$marr <- mArray inits <- function() { start1 <- start * runif(K, 0.9, 1.1) list(phiBeta = start1[1:phiK], pBeta = start1[(phiK+1):K]) } wanted <- c("phi", "p") # Run the model: resB <- justRunJags(jagsData, inits, wanted, modelFile, chains, draws, burnin, thin, adapt, modules = c("glm"), parallel = parallel, seed=seed) out <- mcmcOutput(resB, header = "Model fitted in JAGS with 'rjags' functions") attr(out, "call") <- match.call() attr(out, "timeTaken") <- unclass(difftime(Sys.time(), startTime, units="secs")) return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/BsurvCJS.R
# Print, summary, plot, window, head and tail methods for class Bwiqid, ie. MCMC output print.Bwiqid <- function(x, digits=3, ...) { if(!inherits(x, "data.frame")) stop("x is not a valid Bwiqid object") # call <- attr(x, "call") header <- attr(x, "header") n.chains <- attr(x, "n.chains") if(is.null(n.chains)) n.chains <- 1 Rhat <- attr(x, "Rhat") if(is.null(Rhat) & n.chains > 1) Rhat <- simpleRhat(x, n.chains) MCerror <- attr(x, "MCerror") if(is.null(MCerror)) MCerror <- getMCerror(x, n.chains) # n.eff <- attr(x, "n.eff") timetaken <- attr(x, "timetaken") toPrint <- cbind( mean = colMeans(x), sd = apply(x, 2, sd), median = apply(x, 2, median), t(hdi(x))) colnames(toPrint)[4:5] <- c("HDIlo", "HDIup") if(!is.null(Rhat)) toPrint <- cbind(toPrint, Rhat = Rhat) if(!is.null(MCerror)) toPrint <- cbind(toPrint, 'MCE%' = round(100 * MCerror/toPrint[, 'sd'], 1)) # if(!is.null(n.eff)) # toPrint <- cbind(toPrint, n.eff = round(n.eff)) toPrint0 <- unique(toPrint) # if(!is.null(call)) # cat("Call:", call, "\n") if(is.null(header)) header <- "MCMC fit results:" cat(header, "\n") cat(nrow(x), "draws saved.\n") if(nrow(toPrint0) < nrow(toPrint)) cat("(Duplicate rows removed.)\n") print(toPrint0, digits = digits) cat("\n'HDIlo' and 'HDIup' are the limits of a 95% HDI credible interval.\n") if(!is.null(Rhat)) cat("'Rhat' is the potential scale reduction factor (at convergence, Rhat=1).\n") if(!is.null(MCerror)) cat("'MCE%' is the Monte Carlo error as a %age of the SD (should be less than 5%).\n") # if(!is.null(n.eff)) # cat("'n.eff' is a crude measure of effective sample size.\n") if(!is.null(timetaken)) { took <- format(round(timetaken, 1)) cat("MCMC chain generation:", took, "\n") } } # ......................................................... summary.Bwiqid <- function(object, digits=3, ...) { if(!inherits(object, "data.frame")) stop("object is not a valid Bwiqid object") call <- attr(object, "call") header <- attr(object, "header") n.chains <- attr(object, "n.chains") if(is.null(n.chains)) n.chains <- 1 MCerror <- attr(object, "MCerror") if(is.null(MCerror)) MCerror <- getMCerror(object, n.chains) Rhat <- attr(object, "Rhat") if(is.null(Rhat) & n.chains > 1) Rhat <- simpleRhat(object, n.chains) # n.eff <- attr(object, "n.eff") timetaken <- attr(object, "timetaken") toPrint <- cbind( mean = colMeans(object), sd = apply(object, 2, sd), median = apply(object, 2, median), t(hdi(object))) colnames(toPrint)[4:5] <- c("HDIlo", "HDIup") if(!is.null(Rhat)) toPrint <- cbind(toPrint, Rhat = Rhat) if(!is.null(MCerror)) toPrint <- cbind(toPrint, 'MCE%' = round(100 * MCerror/toPrint[, 'sd'], 1)) # if(!is.null(n.eff)) # toPrint <- cbind(toPrint, n.eff = round(n.eff)) if(is.null(header)) header <- "MCMC fit results:" cat(header, "\n") if(is.null(n.chains)) { cat(nrow(object), "draws saved.\n") } else { cat(sprintf("%.0f chains x %.0f draws = %.0f total.\n", n.chains, nrow(object)/n.chains, nrow(object))) } if(!is.null(timetaken)) { took <- format(round(timetaken, 1)) cat("MCMC chain generation:", took, "\n") } if(!is.null(Rhat)) { t1 <- sum(Rhat>1.1, na.rm=TRUE) t2 <- sum(is.na(Rhat)) txt <- sprintf("\nRhat: largest is %.2f", max(Rhat, na.rm=TRUE)) if(t1) { txt <- c(txt, sprintf("; %.0f (%.0f%%) are greater than 1.10", t1, 100*t1/length(Rhat))) } else { txt <- c(txt, "; NONE are greater than 1.10") } if(t2 > 0) txt <- c(txt, sprintf("; %.0f (%.0f%%) are NA", t2, 100*t2/length(Rhat))) txt <- c(txt, ".\n") cat(paste0(txt, collapse="")) } if(!is.null(MCerror)) { MCEpc <- round(100 * MCerror/apply(object, 2, sd), 1) t1 <- sum(MCEpc > 5, na.rm=TRUE) t2 <- sum(is.na(MCEpc)) txt <- sprintf("\nMCerror (%% of SD): largest is %.1f%%", max(MCEpc, na.rm=TRUE)) if(t1) { txt <- c(txt, sprintf("; %.0f (%.0f%%) are greater than 5", t1, 100*t1/length(MCEpc))) } else { txt <- c(txt, "; NONE are greater than 5%") } if(t2 > 0) txt <- c(txt, sprintf("; %.0f (%.0f%%) are NA", t2, 100*t2/length(MCEpc))) txt <- c(txt, ".\n") cat(paste0(txt, collapse="")) } # if(!is.null(n.eff)) { # n.eff[n.eff == 1] <- NA # t1 <- sum(n.eff < 1000, na.rm=TRUE) # t2 <- sum(is.na(n.eff)) # txt <- sprintf("\nn.eff: smallest is %.0f", min(n.eff, na.rm=TRUE)) # if(t1) { # txt <- c(txt, sprintf("; %.0f (%.0f%%) are smaller than 1000", t1, 100*t1/length(n.eff))) # } else { # txt <- c(txt, "; NONE are smaller than 1000") # } # if(t2 > 0) # txt <- c(txt, sprintf("; %.0f (%.0f%%) are 1 or NA", t2, 100*t2/length(Rhat))) # txt <- c(txt, ".\n") # cat(paste0(txt, collapse="")) # } cat("\n") return(invisible(round(toPrint, digits=digits))) } # ......................................................... plot.Bwiqid <- function(x, which=NULL, credMass=0.95, ROPE=NULL, compVal=NULL, showCurve=FALSE, showMode=FALSE, shadeHDI=NULL, ...) { # This function plots the posterior distribution for one selected item. # Description of arguments: # x is mcmc.list object of the type returned by B* functions in 'wiqid'. # which indicates which item should be displayed; if NULL, looks for a 'toPlot' # attribute in x; if missing does first column. # ROPE is a two element vector, such as c(-1,1), specifying the limit # of the ROPE. # compVal is a scalar specifying the value for comparison. # showCurve if TRUE the posterior should be displayed as a fitted density curve # instead of a histogram (default). # TODO additional sanity checks. # Sanity checks: if(!inherits(x, "data.frame")) stop("x is not a valid Bwiqid object") # Deal with ... argument dots <- list(...) # if(length(dots) == 1 && class(dots[[1]]) == "list") if(length(dots) == 1 && inherits(dots[[1]], "list")) # Fixed 2022-06-06 dots <- dots[[1]] if(is.null(which)) # && !is.null(attr(x, "defaultPlot"))) which <- attr(x, "defaultPlot") if(is.null(which)) which <- colnames(x)[1] if(!is.character(which)) stop("'which' must be an object of class 'character'.") # Added 2017-09-27 if(is.na(match(which, colnames(x)))) stop(paste("Could not find", which, "in the output")) if(is.null(dots$xlab)) dots$xlab <- which # Plot posterior distribution of selected item: out <- plotPost(x[[which]], credMass=credMass, ROPE=ROPE, compVal=compVal, showCurve=showCurve, showMode=showMode, shadeHDI=shadeHDI, graphicPars=dots) return(invisible(out)) } # ......................................................... head.Bwiqid <- function(x, n=6L, ...) { head(as.data.frame(x), n=n, ...) } tail.Bwiqid <- function(x, n=6L, ...) { tail(as.data.frame(x), n=n, ...) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/Bwiqid_methods.R
# Wrappers for dgamma, pgamma, etc which use mean and sd as parameters. getGammaPar <- function(mean, sd) { if(any(mean <= 0)) stop("'mean' must be greater than zero.") if(any(sd <= 0)) stop("'sd' must be greater than zero.") rate <- mean / sd^2 shape <- rate * mean cbind(shape=shape, rate=rate) } dgamma2 <- function(x, mean, sd) { sr <- getGammaPar(mean, sd) return(dgamma(x, sr[,1], sr[,2])) } pgamma2 <- function(q, mean, sd, lower.tail=TRUE, log.p=FALSE) { sr <- getGammaPar(mean, sd) return(pgamma(q, sr[,1], sr[,2], lower.tail=lower.tail, log.p=log.p)) } qgamma2 <- function(p, mean, sd, lower.tail=TRUE, log.p=FALSE) { sr <- getGammaPar(mean, sd) return(qgamma(p, sr[,1], sr[,2], lower.tail=lower.tail, log.p=log.p)) } rgamma2 <- function(n, mean, sd) { sr <- getGammaPar(mean, sd) return(rgamma(n, sr[,1], sr[,2])) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/GammaDist.R
# Update 2019-10-27: for t-distribution, sd != scale. The 'scale' parameter # was incorrectly named 'sd'. Also mean is undefined for df <= 1. # *t3 group added, which does correctly implement sd. # The functions 'dt', 'pt', etc in R calculate the (cumulative) probability # density for a t-distribution given the t-statistic and the number of # degrees of freedom (df). That means you have to calculate the t-stat first, # unlike 'dnorm'/'pnorm', where you put in the mean and sd. # The function 'dt2' and 'pt2' calculate these values with given location, scale, # and df, just like 'dnorm'/'dnorm' with the addition of 'df'. dt2 <- function(x, location, scale, df) { checkScaleDf(scale=scale, df=df) tstat <- (x - location)/scale return(dt(tstat, df)/scale) } pt2 <- function(x, location, scale, df, lower.tail=TRUE, log.p=FALSE) { checkScaleDf(scale=scale, df=df) tstat <- (x - location)/scale return(pt(tstat, df, lower.tail=lower.tail, log.p=log.p)) } qt2 <- function(p, location, scale, df, lower.tail=TRUE, log.p=FALSE) { checkScaleDf(scale=scale, df=df) tstat <- qt(p, df, lower.tail=lower.tail, log.p=log.p) return(tstat * scale + location) } rt2 <- function(n, location, scale, df) { checkScaleDf(scale=scale, df=df) tstat <- rt(n, df) return(tstat * scale + location) } checkScaleDf <- function(scale, df) { if(scale < 0) stop("'scale' must be non-negative.", call.=FALSE) if(df <= 0) stop("'df' must be greater than zero.", call.=FALSE) if(df <= 2) { warning("sd is only defined for df > 2", call.=FALSE) }else{ sd <- scale * sqrt(df / (df-2)) warning("sd is ", sd, call.=FALSE) } } # ............................................................................. # The function 'dt3' etc calculate these values with given mean, sd, # and df, just like 'dnorm'/'dnorm' with the addition of 'df'. dt3 <- function(x, mean, sd, df) { scale <- checkSdDf(sd=sd, df=df) tstat <- (x - mean)/scale return(dt(tstat, df)/scale) } pt3 <- function(x, mean, sd, df, lower.tail=TRUE, log.p=FALSE) { scale <- checkSdDf(sd=sd, df=df) tstat <- (x - mean)/scale return(pt(tstat, df, lower.tail=lower.tail, log.p=log.p)) } qt3 <- function(p, mean, sd, df, lower.tail=TRUE, log.p=FALSE) { scale <- checkSdDf(sd=sd, df=df) tstat <- qt(p, df, lower.tail=lower.tail, log.p=log.p) return(tstat * scale + mean) } rt3 <- function(n, mean, sd, df) { scale <- checkSdDf(sd=sd, df=df) tstat <- rt(n, df) return(tstat * scale + mean) } checkSdDf <- function(sd, df) { if(sd < 0) stop("'sd' must be non-negative.", call.=FALSE) if(df <= 2) stop("sd is only defined for df > 2", call.=FALSE) sd / sqrt(df / (df-2)) # calculate and return the 'scale' parameter }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/TDist.R
# Utilities to handle calculations on log(probabilities) avoiding under/overflow # and NOT exported. # See http://www.mikemeredith.net/blog/2017/UnderOverflow.htm # and http://www.mikemeredith.net/blog/2017/UnderOverflow_2.htm # logSumExp: sum probabilities without over/underflow # log_p is a vector with log(p); return value is log(sum(p)), scalar # Note the check for all-zero vectors of p, which would otherwise produce NaN logSumExp <- function(log_p) { if(max(log_p) == -Inf) return(-Inf) p_max <- which.max(log_p) log1p(sum(exp(log_p[-p_max] - max(log_p)))) + max(log_p) } # ....................................................................... # log1minusExp: get 1 - p without over/underflow # log_p is a vector with log(p); return value is vector with log(1 - p) log1minusExp <- function(log_p) ifelse(log_p > log(0.5), log(-expm1(log_p)), log1p(-exp(log_p))) # ......................................................................... # logAddExp: add together 2 vectors of probabilities without under/overflow # logp1 and logp2 are vectors with log(p1) and log(p2); returns log(p1 + p2) # avoids calculations if either p1 or p2 or both are zero. logAddExp <- function(logp1, logp2) { bigger <- pmax(logp1, logp2) smaller <- pmin(logp1, logp2) fix <- smaller > -Inf bigger[fix] <- log1p(exp(smaller[fix] - bigger[fix])) + bigger[fix] return(bigger) } # ......................................................................... # logMatMultexp: matrix multiplication using matrices of logs # logA and logB are matrices with log(A) and log(B); returns log(A %*% B) logMatMultExp <- function(logA, logB) { if(ncol(logA) != nrow(logB)) stop("non-conformable matrices") logX <- matrix(NA_real_, nrow(logA), ncol(logB)) for(i in 1:nrow(logX)) for(j in 1:ncol(logX)) logX[i,j] <- logSumExp(logA[i, ] + logB[, j]) return(logX) } # .........................................................................
/scratch/gouwar.j/cran-all/cranData/wiqid/R/UnderOverflow.R
# Extractor function to get WAIC if objects have a WAIC attribute WAIC <- function (object, ...) { waic1 <- function(x) { wc <- attr(x, "WAIC") if(is.null(wc)) wc <- c(NA, NA) return(rev(wc)) } if (length(list(...))) { lls <- t(sapply(list(object, ...), waic1)) Call <- match.call() Call$nobs <- NULL row.names(lls) <- as.character(Call[-1L]) return(as.data.frame(lls)) } else { return(waic1(object)[2]) } }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/WAIC.R
# This file has the S3 generic 'as.Bwiqid' function and a series of methods. as.Bwiqid <- function(object, ...) UseMethod("as.Bwiqid") as.Bwiqid.default <- function(object, ...) { name <- deparse(substitute(object)) header <- paste("MCMC values from object", sQuote(name)) warning("Class 'Bwiqid' is deprecated, please use class 'mcmcOutput'.", call.=FALSE) message("Returning a class 'mcmcOutput' object.") mcmcOutput(object, header, ...) } # ...................................................................
/scratch/gouwar.j/cran-all/cranData/wiqid/R/as_Bwiqid.R
# Biodiversity indices # Where possible, these give output in units of species, # conceptually the number of common species in the community. # Inverse Simpson's index aka Hill's N2 # ===================================== biodSimpson <- function(abVec, correct=TRUE) { # abVec = vector of counts, one element per species. # Other measures of abundance, eg biomass, can be used if correct=FALSE. # correct: if TRUE, small sample correction is applied, cf Hurlbert, # rather than Hill's N2. # Sanity check: if(any(abVec < 0)) stop("Negative abundances are not recognised.") # Convert a matrix into a vector and round: if(correct) abVec <- round(abVec) if(is.matrix(abVec) || is.data.frame(abVec)) abVec <- rowSums(abVec) N <- sum(abVec) if(correct) { 1 / sum((abVec * (abVec - 1)) / (N * (N - 1))) } else { 1 / sum(abVec^2 / N^2) } } # Exponential Shannon's index aka Hill's N1 # ========================================= biodShannon <- function(abVec) { # abVec = vector of measures of abundance, # eg biomass or counts, one element per species. # Sanity check: if(any(abVec < 0)) stop("Negative abundances are not recognised.") # Convert a matrix into a vector: if(is.matrix(abVec) || is.data.frame(abVec)) abVec <- rowSums(abVec) p <- abVec[abVec > 0] / sum(abVec) exp(-sum(p * log(p))) } # Inverse Berger Parker index aka Hill's N[Inf] # ============================================= biodBerger <- function(abVec) { # abVec = vector of measures of abundance, # eg biomass or counts, one element per species. # Sanity check: if(any(abVec < 0)) stop("Negative abundances are not recognised.") # Convert a matrix into a vector: if(is.matrix(abVec) || is.data.frame(abVec)) abVec <- rowSums(abVec) sum(abVec) / max(abVec) } # Exponential Brillouin index # =========================== biodBrillouin <- function(cntVec) { # cntVec = vector of counts, one element per species. # Sanity check: if(any(cntVec < 0)) stop("Negative abundances are not recognised.") # Convert a matrix into a vector: if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) cntVec <- round(cntVec) N <- sum(cntVec) exp((lfactorial(N) - sum(lfactorial(cntVec))) / N) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/biodIndices.R
# Adapted from Kery & Schaub (2012), written by Michael Schaub. # Not exported # Function to create a m-array based on capture-histories (CH) plus vector of frequencies. ch2mArray <- function(CH, freq=1){ CH <- as.matrix(CH) # might be a data frame if(length(freq) == 1) freq <- rep(freq, nrow(CH)) if(length(freq) != nrow(CH)) stop("'freq' must have one value for each row of the capture history matrix.") nocc <- ncol(CH) if(any(freq < 0)) { # When did the loss occur? getLast <- function(x) max(which(x > 0)) last <- apply(CH[freq < 0, ], 1, getLast) lost <- tabulate(last, nbins=nocc) freq <- abs(freq) } else { lost <- rep(0, nocc) } ma <- matrix(0, nocc, nocc+1) # First column and last row will be removed later # Last col is for number never-seen-again for(i in 1:nrow(CH)) { cht <- which(CH[i, ] != 0) # When was animal caught? # Fill in release/recapture data for(z in seq_along(cht[-1])) # Does nothing if length(cht) = 1 ma[cht[z], cht[z+1]] <- ma[cht[z], cht[z+1]] + freq[i] } # Marked animals never seen again: totCH <- sweep(CH, 1, freq, "*") ma[, nocc+1] <- colSums(totCH) - rowSums(ma) - lost # Remove 1st col (REcaptures on 1st occasion = all zeros) # and last row (releases on last occasion will never be recaptured). return(ma[-nocc, -1]) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/ch2mArray.R
# Combine the list of priors and the default list, check the result and # expand scalar values to vector when necessary. # Not exported. # Current version can only deal with vector-valued priors, not varcovar matrices. # TODO ### check for NAs and sigma <= 0 # priors0 : list of priors specified by the user # defaultPriors : list of default priors, used when no user specified value is missing; # each element of defaultPriors must be the correct length. checkPriors <- function(priors0, defaultPriors) { priorErrorFlag <- FALSE priors <- replace (defaultPriors, names(priors0), priors0) parNames <- names(defaultPriors) cruft <- !(names(priors) %in% parNames) if(any(cruft)) { message("The following invalid elements in 'priors' will be ignored:") message(paste(names(priors)[cruft], collapse=" ")) } priors <- priors[parNames] # removes cruft in priors for(i in seq_along(parNames)) { this <- parNames[i] # just to improve readability nPars <- length(defaultPriors[[this]]) if(length(priors[[this]]) == 1) priors[[this]] <- rep(priors[[this]], nPars) if(length(priors[[this]]) != nPars) { message("Wrong length for priors for ", this, " which should have ", nPars, " value(s).") priorErrorFlag <- TRUE } } if(priorErrorFlag) stop("Invalid prior specification.") return(priors) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/checkPriors.R
closedCapM0 <- function(CH, ci = 0.95, ciType=c("normal", "MARK"), ...) { # CH is a 1/0 capture history matrix, animals x occasions, OR # a vector of capture frequencies of length equal to the number # of occasions - trailing zeros are required. # ci is the required confidence interval # ciType is the method of calculation if (is.matrix(CH) || is.data.frame(CH)) { n.occ <- ncol(CH) freq <- tabulate(rowSums(CH), nbins=n.occ) } else { freq <- round(CH) n.occ <- length(freq) } crit <- fixCI(ci) ciType <- match.arg(ciType) N.cap <- sum(freq) # Number of individual animals captured n.snap <- sum(freq * (1:length(freq))) # Total number of capture events beta.mat <- matrix(NA_real_, 2, 4) # objects to hold output colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("Nhat", "phat") varcov <- NULL npar <- NA_real_ logLik <- NA_real_ if(sum(freq[-1]) > 0) { # Need recaptures nll <- function(params) { N <- min(exp(params[1]) + N.cap, 1e+300, .Machine$double.xmax) logp <- plogis(params[2], log.p=TRUE) log1mp <- plogis( -params[2], log.p=TRUE) # log(1-p) tmp <- lgamma(N + 1) - lgamma(N - N.cap + 1) + n.snap*logp + (N*n.occ - n.snap)*log1mp return(min(-tmp, .Machine$double.xmax)) } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- c(log(5), 0) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$iterlim)) nlmArgs$iterlim <- 1000 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res$estimate, "+") npar <- 2 } } if(ciType == "normal") { Nhat <- exp(beta.mat[1, -2]) + N.cap } else { Nhat <- getMARKci(beta.mat[1, 1], beta.mat[1, 2], ci) + N.cap } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = rbind(Nhat, plogis(beta.mat[2, -2, drop=FALSE])), logLik = c(logLik=logLik, df=npar, nobs=N.cap * n.occ)) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapM0.R
# Mb model, capture probability dependent on a permanent behavioural response closedCapMb <- function(CH, ci = 0.95, ciType=c("normal", "MARK"), ...) { # CH is a 1/0 capture history matrix, animals x occasions # ci is the required confidence interval CH <- round(as.matrix(CH)) if(length(dim(CH)) != 2) stop("CH should be a matrix, animals x occasions, or a data frame.") CH <- CH[rowSums(CH) > 0, ] # remove any all-zero capture histories crit <- fixCI(ci) ciType <- match.arg(ciType) nOcc <- ncol(CH) # number of capture occasions N.cap <- nrow(CH) # total number of individual animals captured # n <- colSums(CH) # vector of number captures on each occasion getFirst <- function(x) min(which(x > 0)) firstCap <- apply(CH, 1, getFirst) n0 <- firstCap - 1 # number of misses before first capture, prob = 1 - p ns <- rowSums(CH) - 1 # number of hits after first capture, prob = c nf <- nOcc - firstCap - ns # number of misses after first capture, prob = 1 - c beta.mat <- matrix(NA_real_, 3, 4) # objects to hold output colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("Nhat", "phat", "chat") npar <- NA_real_ logLik <- NA_real_ varcov <- NULL if(N.cap > 0) { nll <- function(params) { f0 <- min(exp(params[1]), 1e+300, .Machine$double.xmax) N <- N.cap + f0 logp <- plogis(params[2], log.p=TRUE) log1mp <- plogis( -params[2], log.p=TRUE) logc <- plogis(params[3], log.p=TRUE) log1mc <- plogis( -params[3], log.p=TRUE) tmp <- lgamma(N + 1) - lgamma(N - N.cap + 1) + sum(n0 * log1mp + ns * logc + nf * log1mc) + N.cap * logp + # Captured animals f0 * nOcc * log1mp # Uncaptured animals return(min(-tmp, .Machine$double.xmax)) } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- c(log(5), 0, 0) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$iterlim)) nlmArgs$iterlim <- 1000 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov, "try-error") && all(diag(varcov) > 0)) { if (!inherits(varcov0, "try-error")) { varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res$estimate, "+") npar <- 3 } } if(ciType == "normal") { Nhat <- exp(beta.mat[1, -2]) + N.cap } else { Nhat <- getMARKci(beta.mat[1, 1], beta.mat[1, 2], ci) + N.cap } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = rbind(Nhat, plogis(beta.mat[-1, -2])), logLik = c(logLik=logLik, df=npar, nobs=length(CH))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapMb.R
closedCapMh2 <- function(CH, ci = 0.95, ciType=c("normal", "MARK"), ...) { # CH is a 1/0 capture history matrix, animals x occasions, OR # a vector of capture frequencies of length equal to the number # of occasions - trailing zeros are required. # ci is the required confidence interval if (is.matrix(CH) || is.data.frame(CH)) { n.occ <- ncol(CH) freq <- tabulate(rowSums(CH), nbins=n.occ) } else { freq <- round(CH) n.occ <- length(freq) } crit <- fixCI(ci) ciType <- match.arg(ciType) N.cap <- sum(freq) # Number of individual animals captured beta.mat <- matrix(NA_real_, 4, 4) # objects to hold output colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("Nhat", "pmixHat", "p1hat", "p2hat") npar <- NA_real_ logLik <- NA_real_ varcov <- NULL if(sum(freq[-1]) > 1) { # Do checks here # nll1 ensures p2 <= p1, but useless for Hessian # nll2 starts with output from nll1 (so p2/p1 not an issue) nll1 <- function(params) { f0 <- min(exp(params[1]), 1e+300, .Machine$double.xmax) f.vect <- c(f0, freq) pi <- plogis(params[2]) p1 <- plogis(params[3]) p2 <- p1 * plogis(params[4]) # ensures that p2 <= p1 bin.co <- lgamma(N.cap + f0 + 1) - lgamma(f0 + 1) tmp <- numeric(length(f.vect)) for(i in seq_along(f.vect)) tmp[i] <- pi * p1^(i-1) * (1-p1)^(n.occ-i+1) + (1-pi) * p2^(i-1) * (1-p2)^(n.occ-i+1) llh <- bin.co + sum(f.vect * log(tmp)) return(min(-llh, .Machine$double.xmax)) } nll2 <- function(params) { f0 <- min(exp(params[1]), 1e+300, .Machine$double.xmax) f.vect <- c(f0, freq) pi <- plogis(params[2]) p1 <- plogis(params[3]) p2 <- plogis(params[4]) bin.co <- lgamma(N.cap + f0 + 1) - lgamma(f0 + 1) tmp <- numeric(length(f.vect)) for(i in seq_along(f.vect)) tmp[i] <- pi * p1^(i-1) * (1-p1)^(n.occ-i+1) + (1-pi) * p2^(i-1) * (1-p2)^(n.occ-i+1) llh <- bin.co + sum(f.vect * log(tmp)) return(min(-llh, .Machine$double.xmax)) } res1 <- suppressWarnings(nlm(nll1, c(log(5), 0,0,0))) params <- res1$estimate p2 <- plogis(params[3]) * plogis(params[4]) params[4] <- qlogis(p2) nlmArgs <- list(...) nlmArgs$f <- nll2 nlmArgs$p <- params nlmArgs$hessian <- TRUE res2 <- do.call(nlm, nlmArgs) if(res2$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res2$code, ")")) beta.mat[, 1] <- res2$estimate logLik <- -res2$minimum varcov0 <- try(chol2inv(chol(res2$hessian)), silent=TRUE) # if (!inherits(varcov, "try-error") && all(diag(varcov) > 0)) { if (!inherits(varcov0, "try-error")) { varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res2$estimate, "+") npar <- 4 } } if(ciType == "normal") { Nhat <- exp(beta.mat[1, -2]) + N.cap } else { Nhat <- getMARKci(beta.mat[1, 1], beta.mat[1, 2], ci) + N.cap } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = rbind(Nhat, plogis(beta.mat[-1, -2])), logLik = c(logLik=logLik, df=npar, nobs=N.cap * n.occ)) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapMh2.R
closedCapMhJK <- function(CH, ci=0.95) { # CH is a 1/0 capture history matrix, animals x occasions, OR # a vector of capture frequencies of length equal to the number # of occasions - trailing zeros are required. # ci is the required confidence interval # B&O = Burnham, K.P. & Overton, W.S. (1979) Robust estimation of population size # when capture probabilities vary among animals. Ecology, 60, 927-936. # R&B = Rexstad, E; K Burnham 1992. User's guide for interactive program CAPTURE. # USGS Patuxent. if (is.matrix(CH) || is.data.frame(CH)) { n.occ <- ncol(CH) freq <- tabulate(rowSums(CH), nbins=n.occ) } else { freq <- round(CH) n.occ <- length(freq) } crit <- fixCI(ci)[2] N.cap <- sum(freq) # Number of animals captured n.snap <- sum(freq * seq_along(freq)) # Total number of capture events n.jack <- min(5, n.occ) # Number of jackknife estimations out.mat <- matrix(NA_real_, 2, 3) colnames(out.mat) <- c("est", "lowCI", "uppCI") rownames(out.mat) <- c("Nhat", "pHat") # if(sum(freq[-1]) > 1) { # Do checks here if(sum(freq[-1]) > 0) { # Do checks here # Remove trailing zeros: while(freq[length(freq)] == 0) freq <- freq[-length(freq)] ### Create matrix of jackknife coefficients following B&O Table 1 p928 # CAPTURE uses a simplified form for n.occ > 30. if(n.occ > 30) { A <- matrix(c( 2, 1, 1, 1, 1, 3, 0, 1, 1, 1, 4,-2, 2, 1, 1, 5,-5, 5, 0, 1, 6,-9,11,-4, 2), nrow=5, ncol=5, byrow=TRUE) } else { # We do a 5x5 matrix, which will contain NaNs if n.occ < 5, then adjust it # afterwards. Equations are from B&O. tmp <- suppressWarnings(factorial(n.occ)/factorial(n.occ-(1:5))) A2 <- rbind( c(n.occ-1, 0,0,0,0) / tmp, c(2*n.occ-3, -(n.occ-2)^2, 0, 0, 0) / tmp, c(3*n.occ-6, -(3*n.occ^2-15*n.occ+19), (n.occ-3)^3, 0,0) /tmp, c(4*n.occ-10, -(6*n.occ^2-36*n.occ+55), 4*n.occ^3-42*n.occ^2+148*n.occ-175, -(n.occ-4)^4, 0)/tmp, c(5*n.occ-15, -(10*n.occ^2-70*n.occ+125), 10*n.occ^3-120*n.occ^2+485*n.occ-660, -((n.occ-4)^5-(n.occ-5)^5), (n.occ-5)^5) / tmp) A <- A2[1:n.jack, 1:n.jack] + 1 } # Adjust length(freq) or ncol(A) so that they match: if(length(freq) < n.jack) freq <- c(freq, rep(0, n.jack - length(freq))) if(length(freq) > n.jack) A <- cbind(A, matrix(1, nrow=n.jack, ncol=length(freq)-n.jack)) # Calculate jackknife estimates and SE (but see revised SE below) Nj <- freq %*% t(A) var.Nj <- freq %*% t(A^2) - Nj # could be negative #SEj <- sqrt(pmax(0, var.Nj)) # Calculate test statistic for differences in Nj's diffs <- diff(Nj[1,]) # The coefficients labelled b[i] by B&O p929: b <- apply(A, 2, diff) b2 <- b^2 # Variance of the differences var.diff <- (freq %*% t(b2) - diffs^2/N.cap) * N.cap / (N.cap-1) Chi2 <- diffs^2 / var.diff # For some values of n.jack < 5, diffs = var.diff = 0, causing NaNs. Chi2[is.nan(Chi2)] <- 0 # Last Chi2 = 0 Chi2 <- c(Chi2, 0) ### Revised calculation of SE # See R&B for theory. CAPTURE source code uses Chi2 - 1 for the power # calculation, or power = 0.5 if Chi2 < 1: lambda <- pmax(0, Chi2 - 1)[-length(Chi2)] powr <- pchisq(3.8415, 1, lambda, lower.tail=FALSE) powr[lambda == 0] <- 0.5 pi <- c(1-powr, 1) * c(1,cumprod(powr)) E.Nj <- sum(pi * Nj) var.rev <- sum( pi * var.Nj + pi * (Nj - E.Nj)^2) # Sometimes negative. SE.rev <- sqrt(max(0,var.rev)) ### Interpolation # See B&O bottom of p933. CAPTURE uses Chi2 to calculate # weights, not the p-values as in B&O (they are NOT equivalent). # Select jackknife which is not significantly different from next higher # order jackknife: m <- which(Chi2 < 3.8415)[1] if(m > 1) { c <- (Chi2[m-1]-3.84)/(Chi2[m-1]-Chi2[m]) d <- c*A[m,] + (1-c)*A[m-1,] N.hat <- sum(d * freq) if(N.hat < Nj[1]) { N.hat <- Nj[1] } } else { # CAPTURE interpolates between N(1) and N.cap if appropriate. xtest <- Nj[1]*freq[1] / (Nj[1] - freq[1]) if (xtest > 3.84) { # interpolation is okay c <- (xtest-3.84)/(xtest-Chi2[1]) d <- c*A[1,] + (1-c) N.hat <- sum(d * freq) } else { N.hat <- Nj[1] # SE.rev <- NA } } ### Calculate 95% CI based on log-normal distribution. See R&B. f0 <- N.hat - N.cap if(f0 < .Machine$double.eps) { CI <- c(N.cap, N.cap) } else { C <- exp(crit*sqrt(log(1 + SE.rev^2/f0^2))) CI <- c(N.cap + f0/C, N.cap + f0*C) } # Prepare output out.mat[1, ] <- c(N.hat, CI) out.mat[2, ] <- n.snap / (out.mat[1, c(1,3,2)] * n.occ) } out <- list(call = match.call(), # beta = beta.mat, real = out.mat, logLik = c(logLik=NA, df=NA, nobs=N.cap * n.occ)) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapMhJK.R
# Mt model, capture probability time dependent closedCapMt <- function(CH, ci = 0.95, ciType=c("normal", "MARK"), ...) { # CH is a 1/0 capture history matrix, animals x occasions # ci is the required confidence interval crit <- fixCI(ci) ciType <- match.arg(ciType) CH <- round(as.matrix(CH)) nocc <- ncol(CH) # number of capture occasions N.cap <- nrow(CH) # total number of individual animals captured n <- colSums(CH) # vector of number captures on each occasion beta.mat <- matrix(NA_real_, nocc+1, 4) # objects to hold output colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("Nhat", paste0("p", 1:nocc)) npar <- NA_real_ logLik <- NA_real_ varcov <- NULL if(N.cap > 0) { nll <- function(params) { N <- min(exp(params[1]) + N.cap, 1e+300, .Machine$double.xmax) logp <- plogis(params[-1], log.p=TRUE) log1mp <- plogis( -params[-1], log.p=TRUE) tmp <- lgamma(N + 1) - lgamma(N - N.cap + 1) + sum(n * logp + (N - n) * log1mp) return(min(-tmp, .Machine$double.xmax)) } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- c(log(5), rep(0, nocc)) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$iterlim)) nlmArgs$iterlim <- 1000 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res$estimate, "+") npar <- nocc+1 } } if(ciType == "normal") { Nhat <- exp(beta.mat[1, -2]) + N.cap } else { Nhat <- getMARKci(beta.mat[1, 1], beta.mat[1, 2], ci) + N.cap } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = rbind(Nhat, plogis(beta.mat[-1, -2])), logLik = c(logLik=logLik, df=npar, nobs=length(CH))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapMt.R
# Mtcov model, capture probability a function of time dependent covariates closedCapMtcov <- function(CH, model=list(p~1), data=NULL, ci = 0.95, ciType=c("normal", "MARK"), ...) { # CH is a 1/0 capture history matrix, animals x occasions # ci is the required confidence interval crit <- fixCI(ci) ciType <- match.arg(ciType) # Standardise the model: model <- stdModel(model, list(p=~1)) CH <- round(as.matrix(CH)) nocc <- ncol(CH) # number of capture occasions N.cap <- nrow(CH) # total number of individual animals captured n <- colSums(CH) # vector of number of captures on each occasion # Convert the covariate data frame into a list dataList <- stddata(data, nocc) dataList$.Time <- standardize(1:nocc) dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 dataList$.time <- as.factor(1:nocc) ddf <- as.data.frame(dataList) pModMat <- modelMatrix(model$p, ddf) K <- ncol(pModMat) beta.mat <- matrix(NA_real_, K+1, 4) # objects to hold output colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("Nhat", colnames(pModMat)) lp.mat <- matrix(NA_real_, nocc, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- paste0("p", 1:nocc) npar <- NA_real_ logLik <- NA_real_ varcov <- NULL if(N.cap > 0) { nll <- function(params) { N <- min(exp(params[1]) + N.cap, 1e+300, .Machine$double.xmax) logitp <- pModMat %*% params[-1] logp <- as.vector(plogis(logitp, log.p = TRUE)) log1mp <- as.vector(plogis( -logitp, log.p = TRUE)) tmp <- lgamma(N + 1) - lgamma(N - N.cap + 1) + sum(n * logp + (N - n) * log1mp) return(min(-tmp, .Machine$double.xmax)) } # res <- nlm(nll, params, hessian=TRUE, iterlim=1000) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- c(log(5), rep(0, K)) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$iterlim)) nlmArgs$iterlim <- 1000 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- pModMat %*% beta.mat[-1, 1] logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res$estimate, "+") temp <- getFittedVar(pModMat, varcov[-1, -1]) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") npar <- K+1 } } } if(ciType == "normal") { Nhat <- exp(beta.mat[1, -2]) + N.cap } else { Nhat <- getMARKci(beta.mat[1, 1], beta.mat[1, 2], ci) + N.cap } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = rbind(Nhat, plogis(lp.mat)), logLik = c(logLik=logLik, df=npar, nobs=length(CH))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/closedCapMtcov.R
# Function for shape and rate parameters of gamma. From DBDA2E-utilities.R; see # p. 238 of "Doing Bayesian Data Analysis" Second Edition, # https://sites.google.com/site/doingbayesiandataanalysis/ # Modified by MM to accept mode/mean and sd as vectors # Not exported. gammaShRaFromModeSD = function( mode , sd ) { # if ( mode <=0 ) stop("mode must be > 0") # if ( sd <=0 ) stop("sd must be > 0") if ( any(mode <= 0) ) stop("mode of gamma prior must be > 0") if ( any(sd <= 0) ) stop("sd of gamma prior must be > 0") rate = ( mode + sqrt( mode^2 + 4 * sd^2 ) ) / ( 2 * sd^2 ) shape = 1 + mode * rate return( list( shape=shape , rate=rate ) ) } gammaShRaFromMeanSD = function( mean , sd ) { # if ( mean <=0 ) stop("mean must be > 0") # if ( sd <=0 ) stop("sd must be > 0") if ( any(mean <= 0) ) stop("mean of gamma prior must be > 0") if ( any(sd <= 0) ) stop("sd of gamma prior must be > 0") shape = mean^2/sd^2 rate = mean/sd^2 return( list( shape=shape , rate=rate ) ) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/converters.R
# Functions for a range of distance measures distBrayCurtis <- function(d1, d2) 1 - 2*sum(pmin(d1, d2)) / (sum(d1)+sum(d2)) distChaoJaccCorr <- function(d1, d2) { n <- sum(d1) ; m <- sum(d2) f1. <- sum(d1==1 & d2>0) f2. <- max(1, sum(d1==2 & d2>0)) f.1 <- sum(d2==1 & d1>0) f.2 <- max(1, sum(d2==2 & d1>0)) U <- min(1, sum(d1[d2>0])/n + ((m-1)/m)*(f.1/(2*f.2))*sum(d1[d2==1]/n)) V <- min(1, sum(d2[d1>0])/m + ((n-1)/n)*(f1./(2*f2.))*sum(d2[d1==1]/m)) 1 - U*V / (U+V-U*V) } distChaoJaccNaive <- function(d1, d2) { shared <- d1 & d2 # which sps are shared U <- sum(d1[shared])/sum(d1) ; V <- sum(d2[shared])/sum(d2) 1 - U*V / (U+V-U*V) } distChaoSorCorr <- function(d1, d2) { n <- sum(d1) ; m <- sum(d2) f1. <- sum(d1==1 & d2>0) f2. <- max(1, sum(d1==2 & d2>0)) f.1 <- sum(d2==1 & d1>0) f.2 <- max(1, sum(d2==2 & d1>0)) U <- min(1, sum(d1[d2>0])/n + ((m-1)/m)*(f.1/(2*f.2))*sum(d1[d2==1]/n)) V <- min(1, sum(d2[d1>0])/m + ((n-1)/n)*(f1./(2*f2.))*sum(d2[d1==1]/m)) 1 - 2*U*V / (U+V) } distChaoSorNaive <- function(d1, d2) { shared <- d1 & d2 # which sps are shared U <- sum(d1[shared])/sum(d1) ; V <- sum(d2[shared])/sum(d2) 1 - 2*U*V / (U+V) } distChord <- function(d1, d2) { s1 <- d1/sqrt(sum(d1^2)) s2 <- d2/sqrt(sum(d2^2)) sqrt(sum((s1 - s2)^2)) } distJaccard <- function(d1, d2) { shared <- sum(d1 & d2) 1 - shared / (sum(d1>0)+sum(d2>0)-shared) } distMatching <- function(d1, d2) { same <- sum((d1 > 0) == (d2 > 0)) 1 - same / length(d1) } distMorisitaHorn <- function(d1, d2) { p1 <- d1 / sum(d1) p2 <- d2 / sum(d2) 1 - 2 * sum(p1*p2) / ( sum(p1^2)+sum(p2^2) ) } distOchiai <- function(d1, d2) { shared <- sum(d1 & d2) 1 - shared / sqrt(sum(d1 > 0) * sum(d2 > 0)) } distPreston <- function(d1, d2) { total <- sum(d1 | d2) f <- function(n, x, y) abs(x^n+y^n-1) 1/nlm(f, 2, x=sum(d1 > 0) / total, y=sum(d2 > 0) / total)$estimate } distRogersTanimoto <- function(d1, d2) { same <- sum((d1 > 0) == (d2 > 0)) different <- sum((d1 > 0) != (d2 > 0)) 1 - same / (length(d1) + different) } distSimRatio <- function(d1, d2) { 1 - sum(d1 * d2) / (sum(d1^2) + sum(d2^2) - sum(d1 * d2)) } distSorensen <- function(d1, d2) { shared <- sum(d1 & d2) 1 - 2*shared / (sum(d1>0)+sum(d2>0)) } distWhittaker <- function(d1, d2) { p1 <- d1 / sum(d1) p2 <- d2 / sum(d2) sum(abs(p1 - p2))/2 }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/distMeasures.R
distShell <- function(DATA, FUNC, diag=FALSE, upper=FALSE, ...) { # Distance matrix computation using a user-defined distance measure. # # Args: # DATA: a matrix-like object with variables in COLUMNS, cases in ROWS. # FUNC(x1, x2, ...): the distance function; takes two vector arguments, # returns a single scalar distance measure. # diag, upper: logical values indicating whether the diagonal or upper # triangle of the distance matrix should be printed by print.dist. # ...: additional arguments passed to FUNC. # # Returns: # An object of class 'dist', ie. the lower triangle of the distance # matrix stored by columns in a vector. call <- match.call() # Convert to a matrix, do sanity checks. DAT <- as.matrix(DATA) if(!is.numeric(DAT)) stop("Argument DAT must be numeric.") if(any(dim(DAT) < 2)) stop("DAT must have at least 2 columns and 2 rows.") cases <- dim(DAT)[1] # number of cases, ie. sites, samples, quadrats,... # Set up the output matrix: OP <- matrix(NA, nrow=cases, ncol=cases) # the output matrix rownames(OP) <- colnames(OP) <- rownames(DAT) # Calculate the index for each pair of cases; the i loop does rows, the # j loop cols. for(i in 1:(cases-1)) for(j in (i+1):cases) OP[i,j] <- FUNC(DAT[i,], DAT[j,],...) # Return the result as an object of type 'dist' with an additional # attribute giving the call (important as a record of the FUNC used). tmp <- as.dist(t(OP), diag=diag, upper=upper) attr(tmp, "call") <- call return(tmp) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/distShell.R
# This file contains methods for classes defined in OTHER packages. # for class 'runjags' from the runjags package as.data.frame.runjags <- function(x, ...) { df <- as.data.frame(as.matrix(as.mcmc.list(x))) names(df) <- sub(",", "\\.", sub("\\]", "", sub("\\[", "", names(df)))) return(invisible(df)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/extraMethods.R
# Calculation of MCMC error # A wrapper for mcmcOutput::getMCE getMCerror <- function(object, n.chains, SDpc=FALSE) { mcmcOutput::getMCE(x=object, pc=SDpc, bad=NA, sort=FALSE) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/getMCerror.R
# Functions to run JAGS via 'rjags' with no extra features/annoyances # loads/unloads more than one JAGS module # rjags::load.module and unload.module will only load/unload one module! loadJagsModules <- function(modules) { for(i in seq_along(modules)) rjags::load.module(modules[i]) } unloadJagsModules <- function(modules) { for(i in seq_along(modules)) rjags::unload.module(modules[i]) } #'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' # Run JAGS in serial mode. # This function is also called (with chains=1) to run JAGS in each worker. # Note that initList MUST be the first argument to work with parLapply. justRunJagsSerial <- function(initList, data, params, modelFile, chains, draws, burnin, adapt=1000, thin=1) { jm <- rjags::jags.model(modelFile, data, initList, n.chains=chains, n.adapt=adapt) if(burnin > 0) update(jm, burnin) rjags::coda.samples(jm, params, n.iter=ceiling(draws / chains) * thin, thin=thin) } # --------------------------------------------------------------- # The main function to run JAGS justRunJags <- function(data, inits, params, modelFile, chains, draws, burnin, thin=1, adapt = 1000, modules = c("glm"), parallel = NULL, seed=NULL) { # Check that `rjags` is installed if(!requireNamespace("rjags", quietly=TRUE)) { stop("The 'rjags' package and the JAGS program are needed to run this function.", call.=FALSE) } # Deal with parallelism: if(chains == 1) parallel <- FALSE if(is.null(parallel)) parallel <- chains < detectCores() if(parallel) { coresToUse <- min(chains, detectCores() - 1) if(coresToUse < 2) { warning("Multiple cores not available; running chains sequentially.") parallel <- FALSE } } if(parallel) { if(chains > coresToUse) warning(paste("Running", chains, "chains on", coresToUse, "cores.")) } # Deal with seeds and RNGs set.seed(seed, kind='default') chainSeeds <- sample.int(1e6, chains) rng0 <- paste("base", c("Wichmann-Hill", "Marsaglia-Multicarry", "Super-Duper", "Mersenne-Twister"), sep="::") rng <- rep(rng0, length=chains) # Fix inits if(is.function(inits)) { initList <- lapply(1:chains, function(x) inits()) } else if (is.list(inits) && length(inits) == chains) { initList <- inits } else stop("inits must be a function or a list of length = chains") for(i in 1:chains) { initList[[i]]$.RNG.name <- rng[i] initList[[i]]$.RNG.seed <- chainSeeds[i] } if(parallel) { ##### Do the parallel stuff ##### message("Waiting for parallel processing to complete...", appendLF=FALSE) ; flush.console() cl <- makeCluster(coresToUse) ; on.exit(stopCluster(cl)) clusterEvalQ(cl, library(rjags)) if(!is.null(modules)) { clusterExport(cl, c("modules", "loadJagsModules"), envir=environment()) clusterEvalQ(cl, loadJagsModules(modules)) # No need to unload as we stopCluster } chainList <- parLapply(cl, initList, justRunJagsSerial, data=data, params=params, modelFile=modelFile, chains=1, draws=ceiling(draws / chains), burnin=burnin, adapt=adapt, thin=thin) mcmcList <- mcmc.list(lapply(chainList, function(x) x[[1]])) message("done.") } else { ##### Do the serial stuff ##### if(!is.null(modules)) loadJagsModules(modules) mcmcList <- justRunJagsSerial(initList, data=data, params=params, modelFile=modelFile, chains=chains, draws=draws, burnin=burnin, adapt=adapt, thin=thin) if(!is.null(modules)) unloadJagsModules(modules) } invisible(mcmcList) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/justRunJags.R
# Single-season 2-species occupancy function, based on Richmond et al 2010. # Richmond et al (2010) interpret "species A detected" to mean # "species A detected at the site on the occasion in question." # See the example in Eqn2 p2038. # DHA is detection history of the dominant species # DHB is detection history of the subordinate species # model is now a list of two-sided formulae (new 2015-09-19) # The default is list(psiA~1, psiBa~1, pA~1, pB~1). If not included in the model # list, the remaining parameters are assigned the following values: # psiBA <- psiBa, rA <- pA, rBa <- pB, rBA <- rBa. occ2sps <- function(DHA, DHB, model=NULL, data=NULL, ci=0.95, verify=TRUE) { crit <- fixCI(ci) DHA <- as.matrix(DHA) DHB <- as.matrix(DHB) if(verify) { if(!all(dim(DHA) == dim(DHB))) stop("DHA and DHB do not have the same number of rows and columns.") DHA <- verifyDH(DHA, allowNA = TRUE) DHB <- verifyDH(DHB, allowNA = TRUE) # Check that the NAs match up if(!all(is.na(DHA) == is.na(DHB))) stop("Missing values in DHA and DHB do not match up.") } # Standardise the model: model <- stdModel(model, list(psiA=~1, psiBa=~1, pA=~1, pB=~1)) # Check for invalid submodels in 'model': parNames <- c("psiA", "psiBa", "psiBA", "pA", "pB", "rA", "rBa", "rBA") ok <- names(model) %in% parNames if(any(!ok)) stop("Invalid submodels for: ", paste(names(model)[!ok], collapse=", ")) # modPars is a vector of length 8 which maps the submodels needed to the # elements of 'model': modPars <- pmatch(parNames, names(model)) names(modPars) <- parNames if(is.null(model$psiBA)) modPars[3] <- modPars[2] # psiBA <- psiBa if(is.null(model$rA)) modPars[6] <- modPars[4] # rA <- pA if(is.null(model$rBa)) modPars[7] <- modPars[5] # rBa <- pB if(is.null(model$rBA)) modPars[8] <- modPars[7] # rBA <- rBa if(is.null(data)) { out <- occ2sps0(DHA, DHB, modPars, ci=ci) out$formulae <- model out$index <- list("psiA"=1, "psiBa"=2, "psiBA"=3, "pA"=4, "pB"=5, "rA"=6, "rBa"=7, "rBA"=8) return(out) } M <- length(model) # Number of elements in the model nSites <- nrow(DHA) site.names <- rownames(data) if(is.null(site.names)) site.names <- 1:nSites data <- as.data.frame(stddata(data, nocc=NULL, scaleBy=NULL)) # Get factor levels and scaling values (needed for prediction) xlev <- lapply(data[sapply(data, is.factor)], levels) scaling <- lapply(data[sapply(data, is.numeric)], getScaling, scaleBy = 1) data <- as.data.frame(lapply(data, doScaling, scaleBy = 1)) # Build model matrices modMatList <- vector('list', M) for(i in 1:M) modMatList[[i]] <- modelMatrix(model[[i]], data) parK <- sapply(modMatList, ncol) # Number of parameters for each model matrix K <- sum(parK) # total number of parameters idK <- rep(1:M, parK) # specifies which of the K parameters belongs to each model matrix index <- vector('list', length(model)) # needed for 'predict' names(index) <- names(model) for(i in seq_along(model)) index[[i]] <- (1:K)[idK == i] # Get coefficient names coefNames <- paste(rep(names(model), parK), unlist(lapply(modMatList, colnames)), sep=":") # Functions getlogPHI and getlogP moved to file occ2sps_utils.R 2017-10-30 # objects to hold output beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- coefNames logLik <- NA_real_ npar <- NA_real_ varcov <- NULL lp.mat <- matrix(NA_real_, nSites * 8, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- as.vector(t(outer(parNames, site.names, paste, sep=":"))) # Do the neg log lik function: logitreal0 <- matrix(NA, nSites, M) nll <- function(params) { for(i in 1:M) { betas <- params[idK == i] logitreal0[, i] <- modMatList[[i]] %*% betas } logitreal <- logitreal0[, modPars] logPHI <- getlogPHI(logitreal[, 1:3]) ### pass logit to getlogPHI logP <- getlogP(DHA, DHB, logitreal[, 4:8]) ### pass logit to getlogP loglik <- apply(logPHI + logP, 1, logSumExp) return(min(-sum(loglik), .Machine$double.xmax)) } # Run mle estimation with optim: params <- rep(0, K) res <- optim(params, nll, method="L-BFGS-B", lower=-10, upper=10, hessian=TRUE) if(res$convergence > 0) { warning(paste("Convergence may not have been reached.", res$message)) } else { logLik <- -res$value } beta.mat[,1] <- res$par lp.mat0 <- matrix(NA, nSites, M) for(i in 1:M) { betas <- res$par[idK == i] lp.mat0[, i] <- modMatList[[i]] %*% betas } lp.mat[, 1] <- as.vector(lp.mat0[, modPars]) varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, beta.mat[, 1], "+") SElp0 <- matrix(NA, nSites, M) for(i in 1:M) { SElp0[, i] <- sqrt(getFittedVar(modMatList[[i]], varcov[idK == i, idK == i])) } SElp <- as.vector(SElp0[, modPars]) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=nSites), ci = ci, formulae = model, index = index, xlev = xlev, scaling = scaling) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occ2sps.R
# Single-season 2-species occupancy function, based on Richmond et al 2010. # Simplified analysis with no covariates. # Called by occ2sps, not exported # modPars is a vector of length 8 which maps the coefficients estimated to the # model parameters; order is # psiA, psiBA, psiBa, pA, pB, rA, rBa, rBA. occ2sps0 <- function(DHA, DHB, modPars, ci=0.95) { nSites <- nrow(DHA) crit <- fixCI(ci) # get the occupancy vector # psiX is a vector with elements psiA, psiBa, psiBA # getlogPHI <- function(psiX) { # log(c(psiX[1] * psiX[3], # both # psiX[1] * (1 - psiX[3]), # A only # (1 - psiX[1]) * psiX[2], # B only # (1 - psiX[1]) * (1 - psiX[2]))) # neither # } # Functions getlogPHI and getlogP moved to file occ2sps_utils.R 2017-10-30 # objects to hold output beta.mat <- matrix(NA_real_, 8, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- names(modPars) logLik <- NA_real_ npar <- NA_real_ varcov <- NULL # Do the neg log lik function: nll <- function(params) { logitreal <- params[modPars] # this is a vector logPHI <- getlogPHI(t(logitreal[1:3])) # this is a vector logPHImat <- matrix(logPHI, nSites, 4, byrow=TRUE) logP <- getlogP(DHA, DHB, t(logitreal[4:8])) loglik <- apply(logPHImat + logP, 1, logSumExp) return(min(-sum(loglik), .Machine$double.xmax)) } # Run mle estimation with optim: params <- rep(0, length(unique(modPars))) res <- optim(params, nll, method="L-BFGS-B", lower=-10, upper=10, hessian=TRUE) if(res$convergence > 0) { warning(paste("Convergence may not have been reached.", res$message)) } else { logLik <- -res$value } beta.mat[,1] <- res$par[modPars] varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- length(unique(modPars)) varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))[modPars]) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, beta.mat[, 1], "+") } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(beta.mat[, -2]), logLik = c(logLik=logLik, df=npar, nobs=nSites), ci = ci) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occ2sps0.R
# Utilities for Single-season 2-species occupancy function, based on Richmond et al 2010. # These functions are used by occ2sps and occ2sps0 # function to get the occupancy matrix # logitpsiX is a matrix with LOGITS of psiA, psiBa, psiBA in columns # and a row for each site # output is a matrix with columns for probabilities of # both sps present, A only, B only, neither, and a row for each site getlogPHI <- function(logitpsiX) { logpsiX <- plogis(logitpsiX, log.p=TRUE) log1mpsiX <- plogis( -logitpsiX, log.p=TRUE) cbind(logpsiX[, 1] + logpsiX[, 3], # both logpsiX[, 1] + log1mpsiX[, 3], # A only log1mpsiX[, 1] + logpsiX[, 2], # B only log1mpsiX[, 1] + log1mpsiX[, 2]) # neither } # Do the detection matrix for all sites # logitpX is a MATRIX with COLUMNS for LOGITS of pA, pB, rA, rBa, rBA # output is a matrix with columns for likelihoods given # both sps present, A only, B only, neither, and a row for each site getlogP <- function(DHA, DHB, logitpX) { logpX <- plogis(logitpX, log.p = TRUE) log1mpX <- plogis( -logitpX, log.p = TRUE) if(nrow(logitpX) == 1) { logpX <- matrix(logpX, nrow(DHA), 5, byrow=TRUE) log1mpX <- matrix(log1mpX, nrow(DHA), 5, byrow=TRUE) } logP <- matrix(NA, nrow(DHA), 4) for(i in 1:nrow(logP)) { dhA <- DHA[i, ] dhB <- DHB[i, ] # prob of detecting B if both present conditional on detection of A logprobCapB <- dhA * logpX[i, 5] + (1 - dhA) * logpX[i, 4] log1mprobCapB <- dhA * log1mpX[i, 5] + (1 - dhA) * log1mpX[i, 4] logP[i, ] <- c( # Both sps present, use the r's sum(dhA * logpX[i, 3] + (1 - dhA) * log1mpX[i, 3], # A dhB * logprobCapB + (1 - dhB) * log1mprobCapB, na.rm=TRUE), # B # Sps A present, B absent, use pA if(sum(dhB, na.rm=TRUE) > 0) { -Inf } else { sum(dhA * logpX[i, 1] + (1 - dhA) * log1mpX[i, 1], na.rm=TRUE) # A }, # Sps A absent, B present, use pB if(sum(dhA, na.rm=TRUE) > 0) { -Inf } else { sum(dhB * logpX[i, 2] + (1 - dhB) * log1mpX[i, 2], na.rm=TRUE) # B }, # Neither present if(sum(dhA, dhB, na.rm=TRUE) > 0) { -Inf } else { 0 } ) } return(logP) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occ2sps_utils.R
# Multiseason occupancy - version cov2 # This version allows for # 1. site covars for psi1 # 2. site and site x interval covars (including a fixed interval effect) for gamma and epsilon # 3. site and site x survey occasion covars for probability of detection. # See MacKenzie et al (2006) "Occupancy..." p194ff occMS <- function(DH, occsPerSeason, model=NULL, data=NULL, ci=0.95, verify=TRUE, ...) { # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** occsPerSeason is a scalar or vector with the number of occasions per season # ci is the required confidence interval. if(verify) { DH <- verifyDH(DH, allowNA=TRUE) } else { DH <- as.matrix(DH) } # Check for simple models: if(is.null(model)) return(occMS0(DH=DH, occsPerSeason=occsPerSeason, ci=ci, verify=FALSE, ...)) if(is.null(data)) return(occMStime(DH=DH, occsPerSeason=occsPerSeason, model=model, data=NULL, ci=ci, verify=FALSE, ...)) crit <- fixCI(ci) # Check for all-NA rows (eg, Grand Skinks data set!) allNA <- rowSums(!is.na(DH)) == 0 if(any(allNA)) { DH <- DH[!allNA, ] data <- data[!allNA, ] } # Deal with occsPerSeason nOcc <- ncol(DH) if(length(occsPerSeason) == 1) occsPerSeason <- rep(occsPerSeason, nOcc/occsPerSeason) if(sum(occsPerSeason) != nOcc) stop("Detection data do not match occasions per season.") nseas <- length(occsPerSeason) seasonID <- rep(1:nseas, occsPerSeason) # find last season with data getLast <- function(dh, grp) { if(all(dh==0)) { # Check for all-NA rows (eg, Grand Skinks data set!) return(NA) } else { return(max(which(rowsum(dh, grp) > 0))) } } last <- as.vector(apply((!is.na(DH))*1, 1, getLast, grp=factor(seasonID))) # DHplus <- as.matrix(cbind(last, DH)) # Extract info on surveys done, ie, 1 or 0 in the DH survey.done <- !is.na(as.vector(DH)) DHvec <- as.vector(DH)[survey.done] siteID <- row(DH)[survey.done] survID <- col(DH)[survey.done] # Standardise the model: model <- stdModel(model, defaultModel=list(psi1=~1, gamma=~1, epsilon=~1, p=~1)) # Check data file nSites <- nrow(DH) siteNames <- rownames(DH) if (is.null(siteNames)) siteNames <- rownames(data) if (is.null(siteNames)) siteNames <- 1:nSites if(!is.null(data)) { if(nrow(data) != nSites) stop("data must have a row for each site") rownames(data) <- NULL # rownames cause problems when the data frame is recast } dataList <- stddata(data, c(nOcc, nseas - 1), scaleBy=NULL) # add built-in covars interval <- rep(1:(nseas-1), each=nSites) dataList$.interval <- as.factor(interval) occasion <- rep(1:nOcc, each=nSites) dataList$.occasion <- as.factor(occasion) season <- rep.int(1:nseas, nSites*occsPerSeason) dataList$.season <- as.factor(season) # Get factor levels and scaling values (needed for prediction) xlev <- lapply(dataList[sapply(dataList, is.factor)], levels) scaling <- lapply(dataList[sapply(dataList, is.numeric)], getScaling, scaleBy = 1) dataList <- lapply(dataList, doScaling, scaleBy = 1) psi1df <- selectCovars(model$psi1, dataList, nSites) psi1Mat <- modelMatrix(model$psi1, psi1df) psi1K <- ncol(psi1Mat) gamDf <- selectCovars(model$gamma, dataList, nSites*(nseas-1)) gamMat <- modelMatrix(model$gamma, gamDf) gamK <- ncol(gamMat) epsDf <- selectCovars(model$epsilon, dataList, nSites*(nseas-1)) epsMat <- modelMatrix(model$epsilon, epsDf) epsK <- ncol(epsMat) pDfNA <- selectCovars(model$p, dataList, nSites*nOcc) pDf <- pDfNA[survey.done, , drop=FALSE] pMat <- modelMatrix(model$p, pDf) # modelMatrix removes any NAs if (nrow(pMat) != sum(survey.done)) stop("Missing survey covars not allowed when a survey was done.") pK <- ncol(pMat) K <- psi1K + gamK + epsK + pK parID <- rep(1:4, c(psi1K, gamK, epsK, pK)) index <- vector('list', length(model)) # needed for 'predict' names(index) <- names(model) for(i in seq_along(model)) index[[i]] <- (1:K)[parID == i] # objects to hold the output beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("psi1:", colnames(psi1Mat)), paste("gam:", colnames(gamMat)), paste("eps:", colnames(epsMat)), paste("p:", colnames(pMat))) lp.mat <- matrix(NA_real_, nSites*(nseas*2 - 1) + sum(survey.done), 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste0("psi:", siteNames), paste0("gamma:", siteNames, ",", interval), paste0("epsilon:", siteNames, ",", interval), paste0("p:", siteNames[siteID], ",", survID)) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL # negative log likelihood function nll <- function(param){ psi1Beta <- param[parID==1] gamBeta <- param[parID==2] epsBeta <- param[parID==3] pBeta <- param[parID==4] psi1Prob <- plogis(psi1Mat %*% psi1Beta) gamProb <- matrix(plogis(gamMat %*% gamBeta), nrow=nSites) epsProb <- matrix(plogis(epsMat %*% epsBeta), nrow=nSites) pProb <- plogis(pMat %*% pBeta) Prh <- rep(1, nSites) for(i in 1:nSites) { if (is.na(last[i])) next res <- c(psi1Prob[i], 1-psi1Prob[i]) # aka PHIO PHIt <- array(0, c(2, 2, nseas-1)) PHIt[1, 1, ] <- 1 - epsProb[i, ] PHIt[1, 2, ] <- epsProb[i, ] PHIt[2, 1, ] <- gamProb[i, ] PHIt[2, 2, ] <- 1 - gamProb[i, ] p <- pProb[siteID == i] dh <- DH[i, survID[siteID == i]] pvec <- p * dh + (1-p)*(1-dh) whichSeas <- seasonID[survID[siteID == i]] if(last[i] > 1) for(t in 1:(last[i]-1)) { if(any(whichSeas == t)) { D <- diag(c(prod(pvec[whichSeas==t]), 1-max(dh[whichSeas==t]))) res <- res %*% D } res <- res %*% PHIt[, , t] } PT <- c(prod(pvec[whichSeas==last[i]]), 1-max(dh[whichSeas==last[i]])) Prh[i] <- res %*% PT } return(min(-sum(log(Prh)), .Machine$double.xmax)) } # res <- nlm(nll, start, hessian=TRUE) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(psi1Mat %*% beta.mat[parID==1, 1], gamMat %*% beta.mat[parID==2, 1], epsMat %*% beta.mat[parID==3, 1], pMat %*% beta.mat[parID==4, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov0, "try-error") && all(diag(varcov0) > 0)) { if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE # tidy later beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- c( # diag(psi1Mat %*% varcov[parID==1, parID==1] %*% t(psi1Mat)), getFittedVar(psi1Mat, varcov[parID==1, parID==1]), getFittedVar(gamMat, varcov[parID==2, parID==2]), getFittedVar(epsMat, varcov[parID==3, parID==3]), getFittedVar(pMat, varcov[parID==4, parID==4])) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=nrow(DH)), ci = ci, formulae = model, index = index, xlev = xlev, scaling = scaling) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occMS.R
# Multiseason occupancy # See MacKenzie et al (2006) "Occupancy..." p194ff occMS0 <- function(DH, occsPerSeason, ci=0.95, verify=TRUE, ...) { # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** occsPerSeason is a scalar or vector with the number of occasions per season # ci is the required confidence interval. if(verify) DH <- verifyDH(DH, allowNA=TRUE) crit <- fixCI(ci) # Check for all-NA rows (eg, Grand Skinks data set!) allNA <- rowSums(!is.na(DH)) == 0 if(any(allNA)) DH <- DH[!allNA, ] # Deal with occsPerSeason nOcc <- ncol(DH) if(length(occsPerSeason) == 1) occsPerSeason <- rep(occsPerSeason, nOcc/occsPerSeason) if(sum(occsPerSeason) != nOcc) stop("Detection data do not match occasions per season.") nseas <- length(occsPerSeason) seasonID <- rep(1:nseas, occsPerSeason) getLast <- function(dh, grp) max(which(rowsum(dh, grp) > 0)) last <- as.vector(apply((!is.na(DH))*1, 1, getLast, grp=factor(seasonID))) DHplus <- as.matrix(cbind(last, DH)) # This speeds things up by a factor of 60 beta.mat <- matrix(NA_real_, 4, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("psi1", "gamma", "epsilon", "p") logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL nll <- function(param) { psi1 <- plogis(param[1]) gam <- plogis(param[2]) eps <- plogis(param[3]) p <- plogis(param[4]) PHI0 <- c(psi1, 1-psi1) PHIt <- matrix(c(1-eps, gam, eps, 1-gam), 2) Prh <- apply(DHplus, 1, Prh1, p=p, PHI0=PHI0, PHIt=PHIt, seasonID) return(min(-sum(log(Prh)), .Machine$double.xmax)) } # res <- nlm(nll, start, hessian=TRUE) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, 4) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov0, "try-error") && all(diag(varcov0) > 0)) { if (!inherits(varcov0, "try-error") ) { npar <- 4 varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE # tidy later beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(beta.mat[, -2]), logLik = c(logLik=logLik, df=npar, nobs=nrow(DH)), ci = ci, link="logit", index = list(psi1 = 1, gamma = 2, epsilon = 3, p = 4) ) class(out) <- c("wiqid", "list") return(out) } # .................................................... # A function to get Pr(dh) for a single detection history, # ie, one row of DH. # Not exported Prh1 <- function(dhp, p, PHI0, PHIt, seasonID) { last <- dhp[1] dh <- dhp[-1] if(all(is.na(dh))) return(1) pvec <- p * dh + (1-p)*(1-dh) res <- PHI0 if(last > 1) for(j in 1:(last-1)) { if(!all(is.na(pvec[seasonID==j]))) { D <- diag(c(prod(pvec[seasonID==j], na.rm=TRUE), 1-max(dh[seasonID==j], na.rm=TRUE))) res <- res %*% D } res <- res %*% PHIt } PT <- c(prod(pvec[seasonID==last], na.rm=TRUE), 1-max(dh[seasonID==last], na.rm=TRUE)) res <- res %*% PT }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occMS0.R
# Multiseason occupancy # This version allows for site covars and season differences, # but not (yet) seasonal covariates # eg psi(hab) gamma(hab+.season) ... # See MacKenzie et al (2006) "Occupancy..." p194ff # function Prh1A is defined in the file occMSseason.R occMScovSite <- function(DH, occsPerSeason, model=NULL, data=NULL, ci=0.95, verify=TRUE, ...) { # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** occsPerSeason is a scalar or vector with the number of occasions per season # ci is the required confidence interval. if(verify) { DH <- verifyDH(DH, allowNA=TRUE) } else { DH <- as.matrix(DH) } crit <- fixCI(ci) # Deal with occsPerSeason nOcc <- ncol(DH) if(length(occsPerSeason) == 1) occsPerSeason <- rep(occsPerSeason, nOcc/occsPerSeason) if(sum(occsPerSeason) != nOcc) stop("Detection data do not match occasions per season.") nseas <- length(occsPerSeason) seasonID <- rep(1:nseas, occsPerSeason) # find last season with data getLast <- function(dh, grp) { if(all(dh==0)) { # Check for all-NA rows (eg, Grand Skinks data set!) return(NA) } else { return(max(which(rowsum(dh, grp) > 0))) } } last <- as.vector(apply((!is.na(DH))*1, 1, getLast, grp=factor(seasonID))) DHplus <- as.matrix(cbind(last, DH)) # Standardise the model: model <- stdModel(model, defaultModel=list(psi1=~1, gamma=~1, epsilon=~1, p=~1)) # Check data file nSites <- nrow(DH) siteNames <- rownames(DH) if (is.null(siteNames)) siteNames <- rownames(data) if (is.null(siteNames)) siteNames <- 1:nSites if(!is.null(data)) { if(nrow(data) != nSites) stop("data must have a row for each site") rownames(data) <- NULL } # else { # data <- data.frame(.dummy = rep(NA, nSites)) # } # cat("Preparing design matrices...") ; flush.console() dataList <- stddata(data, NULL, 0.5) dataList$.interval <- as.factor(rep(1:(nseas-1), each=nSites)) dataList$.season <- as.factor(rep(1:nseas, each=nSites)) # Build model matrices psi1df <- selectCovars(model$psi1, dataList, nSites) if (nrow(psi1df) != nSites) stop("Covariate for psi1 is wrong length.") psi1Mat <- modelMatrix(model$psi1, psi1df) if (nrow(psi1Mat) != nSites) stop("Missing values not allowed in site covariates.") psi1K <- ncol(psi1Mat) gamDf <- selectCovars(model$gamma, dataList, nSites*(nseas-1)) gamMat <- modelMatrix(model$gamma, gamDf) if (nrow(gamMat) != nSites*(nseas-1)) stop("Missing values not allowed in site covariates.") gamK <- ncol(gamMat) epsDf <- selectCovars(model$epsilon, dataList, nSites*(nseas-1)) epsMat <- modelMatrix(model$epsilon, epsDf) if (nrow(epsMat) != nSites*(nseas-1)) stop("Missing values not allowed in site covariates.") epsK <- ncol(epsMat) pDf <- selectCovars(model$p, dataList, nSites*nseas) pMat <- modelMatrix(model$p, pDf) pK <- ncol(pMat) K <- psi1K + gamK + epsK + pK parID <- rep(1:4, c(psi1K, gamK, epsK, pK)) beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("psi1:", colnames(psi1Mat)), paste("gam:", colnames(gamMat)), paste("eps:", colnames(epsMat)), paste("p:", colnames(pMat))) lp.mat <- matrix(NA_real_, nSites*(3*nseas-1), 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste0("psi:", siteNames), paste0("gamma:", siteNames, ",", dataList$.interval), paste0("epsilon:", siteNames, ",", dataList$.interval), paste0("p:", siteNames, ",", dataList$.season)) logLik <- NA_real_ varcov <- NULL nll <- function(param){ psi1Beta <- param[parID==1] gamBeta <- param[parID==2] epsBeta <- param[parID==3] pBeta <- param[parID==4] psi1Prob <- plogis(psi1Mat %*% psi1Beta) gamProb <- matrix(plogis(gamMat %*% gamBeta), nrow=nSites) epsProb <- matrix(plogis(epsMat %*% epsBeta), nrow=nSites) pProb <- matrix(plogis(pMat %*% pBeta), nrow=nSites) Prh <- numeric(nSites) for(i in 1:nSites) { PHI0 <- c(psi1Prob[i], 1-psi1Prob[i]) PHIt <- array(0, c(2, 2, nseas-1)) PHIt[1, 1, ] <- 1 - epsProb[i, ] PHIt[1, 2, ] <- epsProb[i, ] PHIt[2, 1, ] <- gamProb[i, ] PHIt[2, 2, ] <- 1 - gamProb[i, ] p <- pProb[i, seasonID] Prh[i] <- Prh1A(DHplus[i, ], p=p, PHI0=PHI0, PHIt=PHIt, seasonID) } return(min(-sum(log(Prh)), .Machine$double.xmax)) } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(psi1Mat %*% beta.mat[parID==1, 1], gamMat %*% beta.mat[parID==2, 1], epsMat %*% beta.mat[parID==3, 1], pMat %*% beta.mat[parID==4, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov0, "try-error") && all(diag(varcov0) > 0)) { if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE # tidy later beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- c( getFittedVar(psi1Mat, varcov[parID==1, parID==1]), getFittedVar(gamMat, varcov[parID==2, parID==2]), getFittedVar(epsMat, varcov[parID==3, parID==3]), getFittedVar(pMat, varcov[parID==4, parID==4])) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } } # cat("done\n") ; flush.console() out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=nrow(DH)), ci = ci) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occMScovSite.R
# Multiseason occupancy # See MacKenzie et al (2006) "Occupancy..." p194ff occMStime <- function(DH, occsPerSeason, model=NULL, data=NULL, ci=0.95, verify=TRUE, ...) { # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** occsPerSeason is a scalar or vector with the number of occasions per season # ci is the required confidence interval. if(verify) { DH <- verifyDH(DH, allowNA=TRUE) } else { DH <- as.matrix(DH) } crit <- fixCI(ci) # Check for all-NA rows (eg, Grand Skinks data set!) allNA <- rowSums(!is.na(DH)) == 0 if(any(allNA)) { DH <- DH[!allNA, ] data <- data[!allNA, ] } # Deal with occsPerSeason nOcc <- ncol(DH) if(length(occsPerSeason) == 1) occsPerSeason <- rep(occsPerSeason, nOcc/occsPerSeason) if(sum(occsPerSeason) != nOcc) stop("Detection data do not match occasions per season.") nseas <- length(occsPerSeason) seasonID <- rep(1:nseas, occsPerSeason) # find last season with data getLast <- function(dh, grp) { if(all(dh==0)) { # Check for all-NA rows (eg, Grand Skinks data set!) return(NA) } else { return(max(which(rowsum(dh, grp) > 0))) } } last <- as.vector(apply((!is.na(DH))*1, 1, getLast, grp=factor(seasonID))) DHplus <- as.matrix(cbind(last, DH)) # Standardise the model: model <- stdModel(model, defaultModel=list(gamma=~1, epsilon=~1, p=~1)) # Check data file dataList <- stddata(data, NULL, 0.5) dataList$.interval <- as.factor(c(rep(1:(nseas-1)), NA)) dataList$.season <- as.factor(rep(1:nseas)) gamDf <- selectCovars(model$gamma, dataList, nseas)[-nseas, , drop=FALSE] gamMat <- modelMatrix(model$gamma, gamDf) gamK <- ncol(gamMat) epsDf <- selectCovars(model$epsilon, dataList, nseas)[-nseas, , drop=FALSE] epsMat <- modelMatrix(model$epsilon, epsDf) epsK <- ncol(epsMat) pDf <- selectCovars(model$p, dataList, nseas) pMat <- modelMatrix(model$p, pDf) pK <- ncol(pMat) K <- 1 + gamK + epsK + pK parID <- rep(1:4, c(1, gamK, epsK, pK)) beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("psi1", paste("gam:", colnames(gamMat)), paste("eps:", colnames(epsMat)), paste("p:", colnames(pMat))) lp.mat <- matrix(NA_real_, 3*nseas-1, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c("psi1", paste0("gam", 1:(nseas-1)), paste0("eps", 1:(nseas-1)), paste0("p", 1:nseas)) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL nll <- function(param){ psi1 <- plogis(param[1]) gamBeta <- param[parID==2] epsBeta <- param[parID==3] pBeta <- param[parID==4] gamProb <- plogis(gamMat %*% gamBeta) epsProb <- plogis(epsMat %*% epsBeta) pProb <- plogis(pMat %*% pBeta)[seasonID] PHI0 <- c(psi1, 1-psi1) PHIt <- array(0, c(2, 2, nseas-1)) PHIt[1, 1, ] <- 1 - epsProb PHIt[1, 2, ] <- epsProb PHIt[2, 1, ] <- gamProb PHIt[2, 2, ] <- 1 - gamProb Prh <- apply(DHplus, 1, Prh1A, p=pProb, PHI0=PHI0, PHIt=PHIt, seasonID) return(min(-sum(log(Prh)), .Machine$double.xmax)) } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(beta.mat[1, 1], gamMat %*% beta.mat[parID==2, 1], epsMat %*% beta.mat[parID==3, 1], pMat %*% beta.mat[parID==4, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE # tidy later beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- c(varcov[1, 1], getFittedVar(gamMat, varcov[parID==2, parID==2]), getFittedVar(epsMat, varcov[parID==3, parID==3]), getFittedVar(pMat, varcov[parID==4, parID==4])) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plogis(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=nrow(DH))) class(out) <- c("wiqid", "list") return(out) } # .................................................... # A function to get Pr(dh) for a single detection history, # ie, one row of DH. This version has a 3-D array for PHIt # Not exported # dh is a 0/1/NA of length equal total no. of surveys # p is a scalar, or vector of detection probs of length equal to # dh # PHI0 is the vector c(psi1, 1-psi1) # PHIt is a 2 x 2 x (nseas-1) array, where # PHIt[,,t] = matrix(c(1-eps[t], gam[t], eps[t], 1-gam[t]), 2) # seasonID is a vector of length equal to dh, identifying the season. Prh1A <- function(dhp, p, PHI0, PHIt, seasonID) { last <- dhp[1] if (is.na(last)) # occurs if all observations are NA return(1) dh <- dhp[-1] pvec <- p * dh + (1-p)*(1-dh) res <- PHI0 if(last > 1) for(t in 1:(last-1)) { if(!all(is.na(pvec[seasonID==t]))) { D <- diag(c(prod(pvec[seasonID==t], na.rm=TRUE), 1-max(dh[seasonID==t], na.rm=TRUE))) res <- res %*% D } res <- res %*% PHIt[, , t] } PT <- c(prod(pvec[seasonID==last], na.rm=TRUE), 1-max(dh[seasonID==last], na.rm=TRUE)) res <- res %*% PT return(res) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occMStime.R
# Single season occupancy with site and survey covariates. # 'link' argument added 2015-02-20 # modifications to allow 'predict' 2017-02-09 occSS <- function(DH, model=NULL, data=NULL, ci=0.95, link=c("logit", "probit"), verify=TRUE, ...) { # single-season occupancy models with site and survey covatiates # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns.. # ** model is a list of 2-sided formulae for psi and p; can also be a single # 2-sided formula, eg, model = psi ~ habitat. # ** data is a DATA FRAME with single columns for site covariates and a column for each survey occasion for each survey covariate. # ci is the required confidence interval. if(verify) DH <- verifyDH(DH, allowNA=TRUE) if(is.null(model)) { y <- rowSums(DH, na.rm=TRUE) n <- rowSums(!is.na(DH)) return(occSS0(y, n, ci=ci, link=link, ...)) } crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Standardise the model: model <- stdModel(model, list(psi=~1, p=~1)) # Summarize detection history site.names <- rownames(DH) DH <- as.matrix(DH) nSites <- nrow(DH) nSurv <- ncol(DH) notDetected <- rowSums(DH, na.rm=TRUE) == 0 # TRUE if species NOT detected at the site if (nSurv < 2) stop("More than one survey occasion is needed") if(is.null(site.names)) site.names <- 1:nSites # Convert the covariate data frame into a list dataList <- stddata(data, nSurv, scaleBy=NULL) time <- rep(1:nSurv, each=nSites) # dataList$.Time <- as.vector(scale(time)) /2 dataList$.Time <- time dataList$.Time2 <- time^2 dataList$.Time3 <- time^3 dataList$.time <- as.factor(time) before <- cbind(FALSE, DH[, 1:(nSurv - 1)] > 0) # 1 if animal seen on previous occasion dataList$.b <- as.vector(before) # Get factor levels and scaling values (needed for prediction) xlev <- lapply(dataList[sapply(dataList, is.factor)], levels) scaling <- lapply(dataList[sapply(dataList, is.numeric)], getScaling, scaleBy = 1) dataList <- lapply(dataList, doScaling, scaleBy = 1) survey.done <- !is.na(as.vector(DH)) DHvec <- as.vector(DH)[survey.done] siteID <- as.factor(row(DH))[survey.done] survID <- as.factor(col(DH))[survey.done] psiDf <- selectCovars(model$psi, dataList, nSites) if (nrow(psiDf) != nSites) stop("Number of site covars doesn't match sites.\nAre you using survey covars?") psiModMat <- modelMatrix(model$psi, psiDf) if(nrow(psiModMat) != nrow(psiDf)) stop("Missing site covariates are not allowed.") psiK <- ncol(psiModMat) pDf0 <- selectCovars(model$p, dataList, nSites*nSurv) pDf <- pDf0[survey.done, , drop=FALSE] pModMat <- modelMatrix(model$p, pDf) if(nrow(pModMat) != nrow(pDf)) stop("Missing survey covariates are not allowed when a survey was done.") pK <- ncol(pModMat) K <- psiK + pK # objects to hold output beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("psi:", colnames(psiModMat)), paste("p:", colnames(pModMat))) lp.mat <- matrix(NA_real_, nSites + sum(survey.done), 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste("psi:", site.names, sep=""), paste("p:", siteID, ",", survID, sep="")) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL # Negative log likelihood function nll <- function(param){ psiBeta <- param[1:psiK] pBeta <- param[(psiK+1):K] # psiProb <- as.vector(plink(psiModMat %*% psiBeta)) linkpsi <- as.vector(psiModMat %*% psiBeta) logpsi <- plink(linkpsi, log.p=TRUE) log1mpsi <- plink( -linkpsi, log.p=TRUE) linkp <- pModMat %*% pBeta logp <- plink(linkp, log.p=TRUE) log1mp <- plink( -linkp, log.p=TRUE) logLik1 <- DHvec * logp + (1-DHvec) * log1mp logLik2 <- tapply(logLik1, siteID, sum) llh <- sum(logAddExp(logpsi + logLik2, log1mpsi + log(notDetected))) return(min(-llh, .Machine$double.xmax)) } # Run mle estimation with nlm: # res <- nlm(nll, param, hessian=TRUE) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(psiModMat %*% beta.mat[1:psiK, 1], pModMat %*% beta.mat[(psiK+1):K, 1]) if(res$code < 3) # Keep NA if in doubt logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 rownames(varcov) <- rownames(beta.mat) SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") # SElp <- c(sqrt(diag(psiModMat %*% varcov[1:psiK, 1:psiK] %*% t(psiModMat))), # sqrt(diag(pModMat %*% varcov[(psiK+1):K, (psiK+1):K] %*% t(pModMat)))) SElp <- sqrt(c(getFittedVar(psiModMat, varcov[1:psiK, 1:psiK]), getFittedVar(pModMat, varcov[(psiK+1):K, (psiK+1):K]))) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } out <- list(call = match.call(), link = match.arg(link), beta = beta.mat, beta.vcv = varcov, real = plink(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=nrow(DH)), ci = ci, formulae = model, index = list(psi=1:psiK, p=(psiK+1):K), xlev = xlev, scaling = scaling) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSS.R
# Single season occupancy with no covariates (psi(.) p(.) model) # 'link' argument added 2015-02-20 occSS0 <- function(y, n, ci=0.95, link=c("logit", "probit"), ...) { # n is a vector with the number of occasions at each site. # y is a vector with the number of detections at each site. # ci is the required confidence interval. if(length(n) == 1) n <- rep(n, length(y)) if(length(y) != length(n)) stop("y and n must have the same length") if(any(y > n)) stop("y cannot be greater than n") crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } beta.mat <- matrix(NA_real_, 2, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("psiHat", "pHat") logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL if(sum(n) > 0) { # If all n's are 0, no data available. nll <- function(params) { logpsi <- plink(params[1], log.p=TRUE) log1mpsi <- plink( -params[1], log.p=TRUE) logp <- plink(params[2], log.p=TRUE) log1mp <- plink( -params[2], log.p=TRUE) logprob <- logAddExp(logpsi + logp * y + log1mp * (n - y), log1mpsi + log(y==0)) return(min(-sum(logprob), .Machine$double.xmax)) } # res <- nlm(nll, params, hessian=TRUE) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, 2) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- 2L varcov <- varcov0 beta.mat[, 2] <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 3:4] <- sweep(outer(beta.mat[, 2], crit), 1, res$estimate, "+") } } out <- list(call = match.call(), link = match.arg(link), beta = beta.mat, beta.vcv = varcov, real = plink(beta.mat[, -2]), logLik = c(logLik=logLik, df=npar, nobs=length(y)), ci = ci, formulae = list(psi= ~ 1, p = ~1), index = list(psi= 1, p = 2)) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSS0.R
# Single season occupancy with site covariates (not survey covariates) # 'model' argument added 2013-12-02 # 'link' argument added 2015-02-20 occSScovSite <- function(y, n, model=NULL, data=NULL, ci=0.95, link=c("logit", "probit"), ...) { # single-season occupancy models with site-specific covatiates # new version with y/n input; much faster! # y is a vector with the number of detections at each site. # n is a vector with the number of occasions at each site. # model is a list of 2-sided formulae for psi and p; can also be a single # 2-sided formula, eg, model = psi ~ habitat. # ci is the required confidence interval. if(length(n) == 1) n <- rep(n, length(y)) if(length(y) != length(n)) stop("y and n must have the same length") if(any(y > n)) stop("y cannot be greater than n") crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Standardise the model: model <- stdModel(model, list(psi=~1, p=~1)) # Convert the covariate data frame into a list nSites <- length(y) dataList <- stddata(data, nocc=NULL) psiDf <- selectCovars(model$psi, dataList, nSites) if (nrow(psiDf) != nSites) stop("Number of site covars doesn't match sites.") psiModMat <- modelMatrix(model$psi, psiDf) psiK <- ncol(psiModMat) pDf <- selectCovars(model$p, dataList, nSites) if (nrow(pDf) != nSites) stop("Number of site covars doesn't match sites.") pModMat <- modelMatrix(model$p, pDf) pK <- ncol(pModMat) K <- psiK + pK # modelMatrix removes rows with NAs: if(nrow(psiModMat) != nSites || nrow(pModMat) != nSites) stop("Missing site covariates are not allowed.") beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("psi:", colnames(psiModMat)), paste("p:", colnames(pModMat))) lp.mat <- matrix(NA_real_, nSites*2, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste("psi:", 1:nSites, sep=""), paste("p:", 1:nSites, sep="")) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL nll <- function(param){ psiBeta <- param[1:psiK] pBeta <- param[(psiK+1):K] logitpsi <- as.vector(psiModMat %*% psiBeta) logpsi <- plink(logitpsi, log.p=TRUE) log1mpsi <- plink( -logitpsi, log.p=TRUE) logitp <- as.vector(pModMat %*% pBeta) logp <- plink(logitp, log.p=TRUE) log1mp <- plink( -logitp, log.p=TRUE) logprob <- logAddExp(logpsi + logp * y + log1mp * (n - y), log1mpsi + log(y==0)) return(min(-sum(logprob), .Machine$double.xmax)) } # Run mle estimation with nlm: # res <- nlm(nll, param, hessian=TRUE) nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) # Process output beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(psiModMat %*% beta.mat[1:psiK, 1], pModMat %*% beta.mat[(psiK+1):K, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- c(getFittedVar(psiModMat, varcov[1:psiK, 1:psiK]), getFittedVar(pModMat, varcov[(psiK+1):K, (psiK+1):K])) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } } out <- list(call = match.call(), link = match.arg(link), beta = beta.mat, beta.vcv = varcov, real = plink(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=length(y))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSScovSite.R
# Royle-Nichols occupancy models, with abundance-induced heterogeneity in detection probability. # 'link' argument added 2015-02-20 # 'verify' argument added 2016-09-20 ## WORK IN PROGRESS ## can't yet deal with survey covariates. occSSrn <- function(DH, model=NULL, data=NULL, ci=0.95, link=c("logit", "probit"), verify=TRUE, ...) { # single-season Royle-Nichols model with site and survey covariates # ** DH is detection data in a 1/0/NA matrix or data frame, sites in rows, # detection occasions in columns. # ** model is a list of 2-sided formulae for lambda and r; can also be a single # 2-sided formula, eg, model = lambda ~ habitat. # NOTE: survey covariates not yet implemented! # ** data is a DATA FRAME with single columns for site covariates and a column for each survey occasion for each survey covariate. # ci is the required confidence interval. if (verify) DH <- verifyDH(DH, allowNA=TRUE) if(TRUE) { # TODO check that survey covars aren't included in the model y <- rowSums(DH, na.rm=TRUE) n <- rowSums(!is.na(DH)) if(is.null(model)) { return(occSSrn0(y, n, ci=ci, link=link, ...)) } else { return(occSSrnSite(y, n, model=model, data=data, ci=ci, link=link, ...)) } } } # ------------------------------------------------------------------ occSSrn0 <- function(y, n, ci=0.95, link=c("logit", "probit"), ...) { # Fast version without covariates. # y is a vector with the number of detections at each site. # n is a vector with the number of occasions at each site. # ci is the required confidence interval. if(length(n) == 1) n <- rep(n, length(y)) if(length(y) != length(n)) stop("y and n must have the same length") if(any(y > n)) stop("y cannot be greater than n") crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Starting values: beta.mat <- matrix(NA_real_, 2, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("lambda", "r") logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL if(sum(n) > 0 && sum(y) > 0 && any(y < n)) { # If all n's are 0, no data available. params <- c(0, 0) Nmax <- 100 # See later if this is sensible # Negative log-likelihood function: # nll <- function(params) { # lambda <- exp(params[1]) # r <- plink(params[2]) # rpart <- (1-r)^(0:Nmax) # Npart <- dpois(0:Nmax, lambda) # llh <- 0 # for(i in seq_along(n)) { # llh <- llh + log(sum((1-rpart)^y[i] * rpart^(n[i]-y[i]) * Npart)) # } # return(min(-llh, .Machine$double.xmax)) # min(..) stops Inf being returned # } nll <- function(params) { lambda <- exp(params[1]) log1mr <- plink( -params[2], log.p=TRUE) # log(1-r) logrpart <- log1mr * (0:Nmax) # log( (1-r)^N ), vector length 101 log1mrpart <- log1minusExp(logrpart) # log( 1-(1-r)^N ), vector length 101 logNpart <- dpois(0:Nmax, lambda, log=TRUE) # log(poisson), vector length 101 llh <- 0 for(i in seq_along(n)) { tmp <- if(y[i]==0) 0 else log1mrpart*y[i] # because log(0) * 0 -> NaN llh <- llh + logSumExp(tmp + logrpart*(n[i]-y[i]) + logNpart) } return(min(-llh, .Machine$double.xmax)) # min(..) stops Inf being returned } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- params nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- 2L varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") } } lambda <- exp(beta.mat[1, -2]) real <- rbind(1-dpois(0, lambda), lambda, plink(beta.mat[2, -2])) colnames(real) <- c("est", "lowCI", "uppCI") rownames(real) <- c("psiHat", "lambdaHat", "rHat") out <- list(call = match.call(), link = c(lambda = "log", r = match.arg(link)), beta = beta.mat, beta.vcv = varcov, real = real, logLik = c(logLik=logLik, df=npar, nobs=length(y)), ci = ci, formulae = list(lambda = ~ 1, r = ~ 1), index = list(lambda = 1, r = 2)) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSSrn.R
# Single-season Royle-Nichols occupancy models, with abundance-induced # heterogeneity in detection probability, with site covariates # (not survey covariates) # 'link' argument added 2015-02-20 # stuff predict added 2017-02-15 occSSrnSite <- function(y, n, model=NULL, data=NULL, ci=0.95, link=c("logit", "probit"), ...) { # single-season occupancy models with site-specific covatiates # y is a vector with the number of detections at each site. # n is a vector with the number of occasions at each site. # model is a list of 2-sided formulae for lambda and r; can also be a single # 2-sided formula, eg, model = lambda ~ habitat. # ci is the required confidence interval. if(length(n) == 1) n <- rep(n, length(y)) if(length(y) != length(n)) stop("y and n must have the same length") if(any(y > n)) stop("y cannot be greater than n") crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Standardise the model: model <- stdModel(model, list(lambda=~1, r=~1)) # Convert the covariate data frame into a list nSites <- length(y) dataList <- stddata(data, nocc=NULL, scaleBy=NULL) # Get factor levels and scaling values (needed for prediction) xlev <- lapply(dataList[sapply(dataList, is.factor)], levels) scaling <- lapply(dataList[sapply(dataList, is.numeric)], getScaling, scaleBy = 1) dataList <- lapply(dataList, doScaling, scaleBy = 1) lamDf <- selectCovars(model$lambda, dataList, nSites) if (nrow(lamDf) != nSites) stop("Number of site covars doesn't match sites.") lamModMat <- modelMatrix(model$lambda, lamDf) lamK <- ncol(lamModMat) rDf <- selectCovars(model$r, dataList, nSites) if (nrow(rDf) != nSites) stop("Number of site covars doesn't match sites.") rModMat <- modelMatrix(model$r, rDf) rK <- ncol(rModMat) K <- lamK + rK # modelMatrix removes rows with NAs: if(nrow(lamModMat) != nSites || nrow(rModMat) != nSites) stop("Missing site covariates are not allowed.") beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("lambda:", colnames(lamModMat)), paste("r:", colnames(rModMat))) lp.mat <- matrix(NA_real_, nSites*2, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste("lambda:", 1:nSites, sep=""), paste("r:", 1:nSites, sep="")) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL nll <- function(param){ lamBeta <- param[1:lamK] rBeta <- param[(lamK+1):K] lambda <- as.vector(exp(lamModMat %*% lamBeta)) logs <- as.vector(plink( -rModMat %*% rBeta, log.p=TRUE)) # s = 1 - r llh <- numeric(nSites) for(i in 1:nSites) { logrpart <- logs[i] * (0:Nmax) log1mrpart <- log1minusExp(logrpart) logNpart <- dpois(0:Nmax, lambda[i], log=TRUE) tmp <- if(y[i]==0) 0 else log1mrpart * y[i] llh[i] <- logSumExp(tmp + logrpart * (n[i]-y[i]) + logNpart) } return(min(-sum(llh), .Machine$double.xmax)) } # Run mle estimation with nlm: Nmax <- 100 nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) # Process output beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(lamModMat %*% beta.mat[1:lamK, 1], rModMat %*% beta.mat[(lamK+1):K, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- c(getFittedVar(lamModMat, varcov[1:lamK, 1:lamK]), getFittedVar(rModMat, varcov[(lamK+1):K, (lamK+1):K])) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") logLik <- -res$minimum } } realLam <- exp(lp.mat[1:nSites, ]) realR <- plink(lp.mat[(nSites+1):(nSites*2), ]) realPsi <- 1-dpois(0, realLam) rownames(realPsi) <- paste("psi:", 1:nSites, sep="") out <- list(call = match.call(), link = c(lambda = "log", r = match.arg(link)), beta = beta.mat, beta.vcv = varcov, real = rbind(realPsi, realLam, realR), logLik = c(logLik=logLik, df=npar, nobs=length(y)), ci = ci, formulae = model, index = list(lambda=1:lamK, r=(lamK+1):K), xlev = xlev, scaling = scaling) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSSrnSite.R
# New verion 2013-02-27 without the time argument (no time=FALSE option) # 'link' argument added 2015-02-20 # 'verify' argument added 2016-09-20 occSStime <- function(DH, model=p~1, data=NULL, ci=0.95, plot=TRUE, link=c("logit", "probit"), verify=TRUE, ...) { # DH is a 1/0 matrix of detection histories, sites x occasions # model is a 2-sided formula for probability of detection, eg, model = p ~ habitat. # data is a DATA FRAME with a row for each capture occasion and columns for time covariates. # ci is the required confidence interval. # Sanity checks and such: DH <- as.matrix(DH) # in case it's a data frame if(verify) DH <- verifyDH(DH, allowNA=TRUE) nocc <- ncol(DH) if (nocc < 2) stop("More than one survey occasion is needed") notDetected <- rowSums(DH, na.rm=TRUE) == 0 # TRUE if species NOT detected at the site if(!is.null(data) && nrow(data) != nocc) stop("'data' must have one row for each survey occasion.") crit <- fixCI(ci) if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Standardise the model: model <- stdModel(model, defaultModel=list(p=~1)) # Add built-in covars to the data frame dataList <- stddata(data, NULL, 0.5) dataList$.time <- as.factor(1:nocc) dataList$.Time <- standardize(1:nocc) dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 pDf <- as.data.frame(dataList) # Do the model matrix for p: pModMat <- modelMatrix(model$p, pDf) pK <- ncol(pModMat) K <- pK + 1 beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c("psi", paste("p:", colnames(pModMat))) lp.mat <- matrix(NA_real_, nocc + 1, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c("psi", paste0("p", 1:nocc)) logLik <- NA_real_ npar <- NA_integer_ varcov <- NULL # ???? if(ncol(DH) > 1 && sum(DH, na.rm=TRUE) > 0) { # Negative log-likelihood function: nll <- function(params) { logpsi <- plink(params[1], log.p=TRUE) log1mpsi <- plink( -params[1], log.p=TRUE) pBeta <- params[-1] linkp <- pModMat %*% pBeta logp <- plink(linkp, log.p=TRUE) log1mp <- plink( -linkp, log.p=TRUE) logLik1 <- sweep(DH, 2, logp, "*") + sweep((1-DH), 2, log1mp, "*") logLik2 <- rowSums(logLik1, na.rm=TRUE) llh <- sum(logAddExp(logpsi + logLik2, log1mpsi + log(notDetected))) return(min(-llh, .Machine$double.xmax)) # min(..) stops Inf being returned } nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (code", res$code, ")")) beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(beta.mat[1], pModMat %*% beta.mat[-1,1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov0, "try-error") && all(diag(varcov0) > 0)) { if (!inherits(varcov0, "try-error")) { npar <- K varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") SElp <- c(sqrt(varcov[1,1]), sqrt(getFittedVar(pModMat, varcov[-1,-1] ))) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } # Do the plot if(plot) { real.p <- plink(lp.mat[-1, ]) ylim <- range(0, real.p, na.rm=TRUE) plot(1:nocc, real.p[, 1], type='l', ylim=ylim, xlab="Time", ylab="Probability of detection") lines(1:nocc, real.p[, 2], lty=3) lines(1:nocc, real.p[, 3], lty=3) } } out <- list(call = match.call(), link=match.arg(link), beta = beta.mat, beta.vcv = varcov, real = plink(lp.mat), logLik=c(logLik=logLik, df=npar, nobs=nrow(DH))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/occSStime.R
# Function to plot activity centres plotACs <- function( object, # mcmcOutput object with ACs attribute which=NA, # which ACs to plot (don't usually want to do all in one plot) howMany=3000, # number of points to plot for each animal showLabels=TRUE # whether to label plot with animal IDs ) { # Check input if(!inherits(object, "mcmcOutput")) stop("Input object is not class 'mcmcOutput'.") ACs <- attr(object, "ACs") if(is.null(ACs)) stop("Can't find Activity Centre information.") # Reduce number of iterations if(dim(ACs)[1] > howMany) { keep <- seq(1, dim(ACs)[1], length = howMany) ACs <- ACs[keep,,] } # Get posterior means for locations x <- colMeans(ACs[, ,1], na.rm=TRUE) y <- colMeans(ACs[, ,2], na.rm=TRUE) # Get number if animals captured ncaps <- sum(!is.na(colMeans(ACs[, , 1]))) # Recover animal IDs M <- dim(ACs)[2] # total, incl. uncaptures animalIDs <- rep(NA, M) animalIDs[1:ncaps] <- dimnames(ACs)[[2]][1:ncaps] if(any(is.na(which))) which <- 1:M # do the plot MASS::eqscplot(x, y, type='n', xlim=range(ACs[, , 1], na.rm=TRUE), ylim=range(ACs[, , 2], na.rm=TRUE), ann=FALSE, axes=FALSE) traps <- attr(object, "traps") if(!is.null(traps)) points(traps, pch=3, col='red') colors <- palette()[-1] colno <- 1 for(i in which) { col <- colors[colno] points(ACs[, i, ], cex=0.1, col=adjustcolor(col, 0.3)) colno <- colno+1 if(colno > length(colors)) colno <- 1 } if(showLabels) plotrix::boxed.labels(x[which], y[which], labels=animalIDs[which]) invisible(0) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/plotACs.R
# Function to plot the Bayesian posterior resulting from the comb method # described in Kruschke (2015) DBDA. plotComb <- function(x, y, credMass=0.95, plot=TRUE, showMode=FALSE, shadeHDI=NULL, ...) { if(length(credMass) != 1 || credMass <= 0 || credMass >= 1) stop("credMass must be in 0 < credMass < 1") # Calculate HDI: sorted = sort(y, decreasing=TRUE ) heightIdx = min( which( cumsum( sorted) >= sum(y) * credMass ) ) height = sorted[heightIdx] indices = which( y >= height ) gaps <- which(diff(indices) > 1) begs <- indices[c(1, gaps + 1)] ends <- indices[c(gaps, length(indices))] HDI <- cbind(lower = x[begs], upper = x[ends]) attr(HDI, "credMass") <- credMass attr(HDI, "height") <- height # Do the plot: if(plot) { dots <- list(...) # if(length(dots) == 1 && class(dots[[1]]) == "list") if(length(dots) == 1 && inherits(dots[[1]], "list")) # Fixed 2022-06-06 dots <- dots[[1]] defaultArgs <- list(xlab=deparse(substitute(x)), yaxt="n", ylab="", main="", cex.lab=1.5, cex=1.4, col="skyblue", bty="n", lwd=5, xlim=range(x)) useArgs <- modifyList(defaultArgs, dots) selPlot <- names(useArgs) %in% c(names(as.list(args(plot.default))), names(par(no.readonly=TRUE))) plotArgs <- useArgs[selPlot] plotArgs$x <- x plotArgs$y <- y plotArgs$type <- "l" do.call("plot", plotArgs) abline(h=0, col='grey') # Display the HDI. if(!is.null(credMass)) { ht <- attr(HDI, "height") if(!is.null(shadeHDI)) { for (i in 1:nrow(HDI)) { inHDI <- which(x >= HDI[i, 1] & x <= HDI[i, 2]) polyx <- c(HDI[i, 1], HDI[i, 1], x[inHDI], HDI[i, 2], HDI[i, 2]) polyy <- c(0, ht, y[inHDI], ht, 0) polygon(polyx, polyy, border=NA, col=shadeHDI) } } else { segments(HDI, 0, HDI, ht, lty=2) } do.call(lines, plotArgs) segments(HDI[, 1], ht, HDI[, 2], ht, lwd=4, lend='butt') text( mean(HDI), ht, bquote(.(100*credMass) * "% HDI" ), adj=c(.5,-1.7), cex=useArgs$cex, xpd=TRUE ) # text( HDI, ht, bquote(.(signif(HDI, 3))), text( HDI, ht, signifish(HDI, 3), pos=3, cex=useArgs$cex, xpd=TRUE ) } # Display mean or mode: cenTendHt <- 0.9 * max(y) if ( showMode==FALSE ) { meanParam <- sum(x * y) text( meanParam, cenTendHt, bquote(mean==.(signifish(meanParam,3))), adj=c(.5,0), cex=useArgs$cex, xpd=TRUE ) } else { modeParam <- x[which.max(y)] text( modeParam, cenTendHt, bquote(mode==.(signifish(modeParam,3))), adj=c(.5,0), cex=useArgs$cex, xpd=TRUE ) } } # end if(plot) return(HDI) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/plotComb.R
# Now a wrapper for mcmcOutput::postPlot plotPost <- function( ... ) { warning("'plotPost' is deprecated, please use 'mcmcOutput::postPlot'", call.=FALSE) mcmcOutput::postPlot(...) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/plotPost.R
# This is the 'predict' method for 'wiqid' objects # Essential info to be included in the 'wiqid' fit: # = link function used, at present "logit", "probit" and "log" are implemented. # = index : a named list giving the rows of the 'beta' and 'varcov' matrices # for each submodel # For models involving covariates (ie, not intercept-only), these are also needed: # = model formulae # = preprocessing information: # - for numeric covars, scaling info (mean, SD/2) # - for factors, original levels predict.wiqid <- function(object, newdata, parameter, ci, type=c("link", "response"), ...) { if(missing(newdata) || !is.data.frame(newdata)) stop("Please supply a data frame for newdata.") if(missing(parameter)) stop("Please specify the parameter.") if(missing(ci)) ci <- object$ci if(is.null(ci)) ci <- 0.95 crit <- fixCI(ci) # fix link link <- object$link if(is.null(link)) link <- "logit" if(length(link) > 1) link <- link[[parameter]] # get the index, if length(index) == 1 it's an intercept-only model index <- object$index[[parameter]] if(is.null(index)) stop("No coefficients found for parameter ", parameter) if(length(index) == 1) { # INTERCEPT ONLY MODEL message("This is an intercept-only model, all values identical.") intercept <- object$beta[index, 1:2] # est and SE intercept <- c(intercept, intercept[1] + intercept[2] * crit) lp.mat <- matrix(rep(intercept, each=nrow(newdata)), nrow(newdata)) rownames(lp.mat) <- rownames(newdata) colnames(lp.mat) <- c("est", "SE", "lowCI", "uppCI") } else { # get the model formula forms <- object$formulae if(is.null(forms)) stop("No information on models in object.") formula <- forms[[parameter]] if(is.null(formula)) stop("No submodel found for parameter ", parameter) varsNeeded <- all.vars(formula) # Get coefficients and varcovar matrix coeffs <- coef(object)[index] vcv <- object$beta.vcv[index, index] # wrangle newdata varsMissing <- !varsNeeded %in% names(newdata) if(any(varsMissing)) { missingText <- paste(varsNeeded[varsMissing], collapse=", ") stop("Needed variable(s) missing from new data: ", missingText) } newdata <- newdata[, names(newdata) %in% varsNeeded, drop=FALSE] # scale numeric covars newdata <- scaleToMatch(newdata, object$scaling) xlev <- object$xlev xlev <- xlev[names(xlev)%in% varsNeeded] # can be empty mf <- model.frame(formula, newdata, xlev=xlev) modMat <- modelMatrix(formula, mf) # Get point estimates and SEs lp.mat <- matrix(NA_real_, nrow(modMat), 4) rownames(lp.mat) <- rownames(modMat) colnames(lp.mat) <- c("est", "SE", "lowCI", "uppCI") lp.mat[, 1] <- modMat %*% coeffs # lp.mat[, 2] <- sqrt(diag(modMat %*% vcv %*% t(modMat))) lp.mat[, 2] <- sqrt(getFittedVar(modMat, vcv)) lp.mat[, 3:4] <- sweep(outer(lp.mat[, 2], crit), 1, lp.mat[, 1], "+") } type <- match.arg(type) if(type == "response") { SE <- lp.mat[, 2] if(link == "logit") { lp.mat <- plogis(lp.mat) lp.mat[, 2] <- SE * lp.mat[, 1] * (1 - lp.mat[, 1]) } else if(link == "probit"){ SE <- SE * dnorm(lp.mat[, 1]) lp.mat <- pnorm(lp.mat) lp.mat[, 2] <- SE } else if(link == "log") { lp.mat <- exp(lp.mat) lp.mat[, 2] <- SE * lp.mat[, 1] } else { stop("Link type ", link, " not recognised.") } } attr(lp.mat, "ci") <- ci attr(lp.mat, "link") <- link return(lp.mat) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/predict.R
# Function to calculate model averaged predictions from multiple models predictAvg <- function(modList, newdata, parameter, ci=0.95, type = c("link", "response"), IC=AICc) { nModels <- length(modList) # Get model weights ic <- sapply(modList, IC) delta <- ic - min(ic) modLLH <- exp(-delta/2) modWt <- modLLH / sum(modLLH) # Get model predictions ests <- SEs <- matrix(NA, nrow(newdata), nModels) # colnames(ests) <- colnames(SEs) <- names(modList) links <- character(nModels) for(i in 1:nModels) { pred <- predict(modList[[i]], newdata=newdata, parameter=parameter, ci=ci, type='link') ests[,i] <- pred[,1] SEs[,i] <- pred[,2] links[i] <- attr(pred, "link") } link <- links[1] stopifnot(all(links == link)) out <- matrix(NA_real_, nrow(newdata), 4) rownames(out) <- rownames(newdata) colnames(out) <- c("est", "SE", "lowCI", "uppCI") # Get MA point estimate tmp <- sweep(ests, 2, modWt, "*") out[, 'est'] <- rowSums(tmp) # Get unconditional SE correctn <- sweep(ests, 1, out[, 'est'], "-")^2 var.u <- SEs^2 + correctn # unconditional variances tmp <- sweep(var.u, 2, modWt, "*") out[, 'SE'] <- sqrt(rowSums(tmp)) # Get CI crit <- fixCI(ci) out[, 3:4] <- sweep(outer(out[, 2], crit), 1, out[, 1], "+") # Convert to 'response' scale if requested type <- match.arg(type) if(type == "response") { SE <- out[, 2] if(link == "logit") { out <- plogis(out) out[, 2] <- SE * out[, 1] * (1 - out[, 1]) } else if(link == "probit"){ SE <- SE * dnorm(out[, 1]) out <- pnorm(out) out[, 2] <- SE } else if(link == "log") { out <- exp(out) out[, 2] <- SE * out[, 1] } else { stop("Link type ", link, " not recognised.") } } attr(out, "ci") <- ci attr(out, "link") <- link return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/predictAvg.R
# Calculation of species accumulation curves and cummulative richness estimates # as produced by EstimateS. # See the EstimateS Users Guide: # http://viceroy.eeb.uconn.edu/estimates/EstimateSPages/EstSUsersGuide/EstimateSUsersGuide.htm # downloaded 2013-03-14. # Helper functions, pretty trivial but useful to plug into richCurve richSobs <- function(incVec) { # Convert a matrix/dataframe into a vector and round: incVec <- round(incVec) if(is.matrix(incVec) || is.data.frame(incVec)) incVec <- rowSums(incVec) sum(incVec > 0) } richSingle <- function(cntVec) { # Convert a matrix/dataframe into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) sum(cntVec == 1) } richDouble <- function(cntVec) { # Convert a matrix/dataframe into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) sum(cntVec == 2) } richUnique <- function(incMat) sum(rowSums(round(incMat) > 0) == 1) richDuplicate <- function(incMat) sum(rowSums(round(incMat) > 0) == 2) richCurve <- function(obsMat, FUNC, runs=10, ...) { m <- ncol(obsMat) K <- length(FUNC(obsMat)) out <- array(NA, c(runs, m, K)) if(K > 1) dimnames(out) <- list(NULL, NULL, names(FUNC(obsMat))) shuff <- obsMat for(i in 1:runs) { for(j in 1:m) { out[i, j, ] <- FUNC(shuff[, 1:j, drop=FALSE], ...) } shuff <- shuff[, sample(m)] } list(mean = apply(out, 2:3, mean, na.rm=TRUE), SD = apply(out, 2:3, sd, na.rm=TRUE)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/richCurve.R
# Calculation of richness estimators used in EstimateS. # See the EstimateS Users Guide: # http://viceroy.eeb.uconn.edu/estimates/EstimateSPages/EstSUsersGuide/EstimateSUsersGuide.htm # downloaded 2013-03-14. # The output corresponds to the last row of the EstimateS output. # -------------------------------------------------------------------- # ACE : Anne Chao's "ACE" ABUNDANCE-BASED richness estimator # ========================================================== # This corresponds to the last row of EstimateS output column 16, "ACE" richACE <- function(cntVec, threshold = 10) { # 'cntVec' should be a vector of species counts (abundances) # threshold is the max. abundance for rare species # Convert a matrix/dataframe into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) cntVec <- cntVec[cntVec > 0] Srare <- sum(cntVec <= threshold) # Number of rare species Nrare <- sum(cntVec[cntVec <= threshold]) # Number if indivs in rare species F1 <- sum(cntVec == 1) # Number of singletons Sobs <- length(cntVec) if(F1 == Nrare) return(Sobs + F1 * (F1 - 1) / 2) # Chao1 (F2=0 in this case) Cace <- 1 - F1 / Nrare Fi <- table(cntVec[cntVec <= threshold]) Fn <- as.numeric(names(Fi)) gamma2 <- max(Srare * sum(Fn * (Fn - 1) * Fi) / (Cace * Nrare * (Nrare - 1)) - 1, 0) return(Sobs - Srare + Srare / Cace + F1 * gamma2 / Cace) } # ICE : Anne Chao's "ICE" INCIDENCE-BASED richness estimator # ========================================================== # This corresponds to the last row of EstimateS output column 18, "ICE" richICE <- function(incMat, threshold = 10) { # 'incMat' should be a matrix of species incidences # threshold is the max. incidences for infrequent species # Convert abundances to incidences: incMat <- round(incMat) > 0 incVec <- rowSums(incMat) m <- ncol(incMat) infr <- incVec <= threshold & incVec > 0 # Which species are infrequent Sinfr <- sum(infr) # Number of infrequent species Ninfr <- sum(incVec[infr]) # Number of incidences of infrequent species minfr <- sum(colSums(incMat[infr, , drop=FALSE]) > 0) # Number of samples with >=1 infrequent sps Q1 <- sum(incVec == 1) # Number of uniques Sobs <- sum(incVec > 0) if(Q1 == Ninfr) return(Sobs + ((m - 1) / m) * (Q1 * (Q1 - 1) / 2)) # Chao2 Eqn 4 (Q2=0 in this case) Cice <- 1 - Q1 / Ninfr Qj <- table(incVec[infr]) Qn <- as.numeric(names(Qj)) gamma2 <- max(Sinfr * minfr * sum(Qn * (Qn - 1) * Qj) / (Cice * (minfr - 1) * Ninfr^2) - 1, 0) return(Sobs - Sinfr + Sinfr / Cice + Q1 * gamma2 / Cice) } # Chao1 : Anne Chao's "Chao1" ABUNDANCE-BASED richness estimator # ============================================================== # This corresponds to the last row of EstimateS output columns 20-23 # Chao 1 Mean # Chao 1 95% CI Lower Bound # Chao 1 95% CI Upper Bound # Chao 1 SD (analytical) # Equation numbers refer to Appendix B of EstimateS 8.2 Users Guide at richChao1 <- function(cntVec, correct=FALSE, ci = 0.95) { # cntVec = vector of counts, one element per species # correct : if TRUE, bias-corrected Chao1 is calculated # ci : the desired confidence interval. if(ci > 1 | ci < 0.5) stop("ci must be between 0.5 and 1") crit <- qnorm(1 - (1 - ci[1]) / 2) # Convert a matrix into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) # Get number of individuals and of species observed: N <- sum(cntVec) Sobs <- sum(cntVec > 0) # Get number of singletons and doubletons: F1 <- sum(cntVec == 1) F2 <- sum(cntVec == 2) # Calculate Chao1 # T = number of undetected species = Chao1 - Sobs if(correct | F2 == 0) { T <- F1 * (F1 - 1) / (2 * (F2 + 1)) # Eqn 2 } else { T <- F1^2 / (2 * F2) # Eqn 1 } # If F1 == 0, these evaluate correctly to T = 0. Chao1 <- Sobs + T # Calculate variance if(F1 == 0) { # Eqn 8 var <- Sobs * exp(-N / Sobs) * (1 - exp(-N / Sobs)) } else if(F2 == 0) { # Eqn 7 var <- (F1*(F1-1) / 2) + (F1*(2*F1 - 1)^2 / 4) - (F1^4 / (4 * Chao1)) } else if(correct) { # Eqn 6 var <- (F1*(F1-1) / (2 * (F2+1))) + (F1*(2*F1 - 1)^2 / (4*(F2+1)^2)) + (F1^2 * F2 * (F1-1)^2 / (4 * (F2+1)^4)) } else { # Eqn 5 var <- F2 * ((F1 / F2)^2 / 2 + (F1 / F2)^3 + (F1 / F2)^4 / 4) } # Calculate CI if(F1 == 0) { # Eqn 14 P <- exp(-N / Sobs) low <- max(Sobs, Sobs / (1-P) - crit * sqrt(Sobs * P / (1-P))) upp <- Sobs / (1-P) + crit * sqrt(Sobs * P / (1-P)) } else { # Eqn 13 if(T == 0) { # in this case K is undefined low <- upp <- Sobs } else { K <- exp(crit * sqrt(log(1 + var / T^2))) low <- Sobs + T / K upp <- Sobs + T * K } } return(c(Chao1 = Chao1, Chao1Low = low, Chao1Upp = upp, Chao1SD = sqrt(var))) } # Chao2 : Anne Chao's "Chao2" INCIDENCE-BASED richness estimator # ============================================================== # This corresponds to the last row of EstimateS output columns 24-27 # Chao 2 Mean # Chao 2 95% CI Lower Bound # Chao 2 95% CI Upper Bound # Chao 2 SD (analytical) richChao2 <- function(incMat, correct=FALSE, ci = 0.95) { # incMat = a 0/1 matrix of incidence data, species x sites # correct : if TRUE, bias-corrected Chao2 is calculated # ci : the desired confidence interval. if(ci > 1 | ci < 0.5) stop("ci must be between 0.5 and 1") crit <- qnorm(1 - (1 - ci[1]) / 2) # Convert a count matrix into an incidence matrix: incMat <- round(incMat) > 0 # Get number of sites, incidences, and species observed: m <- ncol(incMat) M <- sum(incMat) incVec <- rowSums(incMat) Sobs <- sum(incVec > 0) # Get number of uniques and duplicates: Q1 <- sum(incVec == 1) Q2 <- sum(incVec == 2) # Calculate Chao2 # T = number of undetected species = Chao1 - Sobs if(correct | Q2 == 0) { T <- ((m - 1) / m) * (Q1 * (Q1 - 1) / (2 * (Q2 + 1))) # Eqn 4 } else { T <- Q1^2 / (2 * Q2) # Eqn 3 } # If Q1 == 0, both these evaluate correctly to T = 0. Chao2 <- Sobs + T # Calculate variance if(Q1 == 0) { # Eqn 12 var <- Sobs * exp(-M / Sobs) * (1 - exp(-M / Sobs)) } else if(Q2 == 0) { # Eqn 11 var <- ((m - 1) / m) * (Q1*(Q1-1) / 2) + ((m - 1) / m)^2 * (Q1*(2*Q1 - 1)^2 / 4) - ((m - 1) / m)^2 * (Q1^4 / (4 * Chao2)) } else if(correct) { # Eqn 10 var <- ((m - 1) / m) * (Q1*(Q1-1) / (2 * (Q2+1))) + ((m - 1) / m)^2 * (Q1*(2*Q1 - 1)^2 / (4*(Q2+1)^2)) + ((m - 1) / m)^2 * (Q1^2 * Q2 * (Q1-1)^2 / (4 * (Q2+1)^4)) } else { # Eqn 9 var <- Q2 * ((Q1 / Q2)^2 / 2 + (Q1 / Q2)^3 + (Q1 / Q2)^4 / 4) } # Calculate CI if(Q1 == 0) { # Eqn 14 P <- exp(-M / Sobs) low <- max(Sobs, Sobs / (1-P) - crit * sqrt(Sobs * P / (1-P))) upp <- Sobs / (1-P) + crit * sqrt(Sobs * P / (1-P)) } else { # Eqn 13 if(T == 0) { # in this case K is undefined low <- upp <- Sobs } else { K <- exp(crit * sqrt(log(1 + var / T^2))) low <- Sobs + T / K upp <- Sobs + T * K } } return(c(Chao2 = Chao2, Chao2Low = low, Chao2Upp = upp, Chao2SD = sqrt(var))) } # Jack1 : First order jackknife INCIDENCE-BASED richness estimator # ================================================================ richJack1 <- function(incMat) { # incMat = a 0/1 matrix of incidence data, species x sites # Convert a count matrix into an incidence matrix: incMat <- round(incMat) > 0 # Get number of sites, species observed, and uniques: m <- ncol(incMat) incVec <- rowSums(incMat) Sobs <- sum(incVec > 0) Q1 <- sum(incVec == 1) return(Sobs + Q1 * (m - 1) / m) } # Jack2 : Second order jackknife INCIDENCE-BASED richness estimator # ================================================================= richJack2 <- function(incMat) { # incMat = a 0/1 matrix of incidence data, species x sites # Convert a count matrix into an incidence matrix: incMat <- round(incMat) > 0 # Get number of sites, species observed, uniques and duplicates: m <- ncol(incMat) incVec <- rowSums(incMat) Sobs <- sum(incVec > 0) Q1 <- sum(incVec == 1) Q2 <- sum(incVec == 2) return(Sobs + Q1 * (2*m - 3) / m - Q2 * (m - 2)^2 / m / (m-1)) } # Bootstrap : Bootstrap INCIDENCE-BASED richness estimator # ======================================================== richBoot <- function(incMat) { # incMat = a 0/1 matrix of incidence data, species x sites # Convert a count matrix into an incidence matrix: incMat <- round(incMat) > 0 # Get number of sites, species observed, and proportion of samples that contain species k: m <- ncol(incMat) incVec <- rowSums(incMat) Sobs <- sum(incVec > 0) pk <- incVec / m return(Sobs + sum((1-pk) ^ m)) } # MMMeans : Michaelis-Menton INCIDENCE-BASED richness estimator # This uses Mao's tau, producing what EstS calls MMMeans. MMRuns is not calculated as it # is inferior to MMMeans as described in the Users Guide # This function uses Raaijmakers' MLEs as given in # Colwell, R K; J A Coddington. 1994. Estimating terrestrial biodiversity through # extrapolation. Philosophical Transactions of the Royal Society of London B 345:101-118. richMM <- function(incMat) { # incMat = a 0/1 matrix of incidence data, species x sites if(ncol(incMat) == 1) # Can't fit curve to 1 data point! return(0) # get estimates of species for subsets: tau <- richRarefy(incMat)[, 1] Xi <- tau / seq(tau) Xbar <- mean(Xi) Yi <- tau Ybar <- mean(Yi) Syy <- sum((Yi - Ybar)^2) Sxx <- sum((Xi - Xbar)^2) Sxy <- sum((Xi - Xbar) * (Yi - Ybar)) Bhat <- (Xbar * Syy - Ybar * Sxy) / (Ybar* Sxx - Xbar * Sxy) Shat <- Ybar + Bhat * Xbar if(Shat < 0 || !is.finite(Shat)) return(NA_real_) return(Shat) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/richEstimators.R
# Calculation of richness estimators which are not included in EstimateS. # (or at least not in version 8.2) # JackA1, JackA2 : First and second order jackknife ABUNDANCE-BASED # richness estimators # ================================================================ # See Gotelli & Colwell 2011 p41 richJackA1 <- function(cntVec) { # cntVec = vector of counts, one element per species # Convert a matrix into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) # Get number of species observed, and uniques: Sobs <- sum(cntVec > 0) f1 <- sum(cntVec == 1) return(Sobs + f1) } # JackA2 : Second order jackknife ABUNDANCE-BASED richness estimator # ================================================================== richJackA2 <- function(cntVec) { # cntVec = vector of counts, one element per species # Convert a matrix into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) # Get number of sites, species observed, and uniques: Sobs <- sum(cntVec > 0) f1 <- sum(cntVec == 1) f2 <- sum(cntVec == 2) return(Sobs + 2 * f1 - f2) } # Rennolls & Laumonier (2006) 'shadow species' ABUNDANCE-BASED estimator of richness # ================================================================================== richRenLau <- function(cntVec) { # cntVec = vector of counts, one element per species # Convert a matrix into a vector and round: cntVec <- round(cntVec) if(is.matrix(cntVec) || is.data.frame(cntVec)) cntVec <- rowSums(cntVec) fk <- table(cntVec[cntVec > 0]) k <- as.numeric(names(fk)) n <- sum(cntVec) C <- if(is.na(fk["1"])) 1 - fk["1"] / n else 1 # Good's (1953) coverage estimator pik <- 1 - (1 - (C * k / n))^n # (corrected) inclusion probability nuk <- 1 / pik - 1 # shadow sps / observed sps shadows <- round(fk * nuk) # est. number of shadow species return(sum(fk, shadows)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/richEstimatorsMore.R
# Calculation of rarefaction estimator (Mao Tau) used in EstimateS. # See the EstimateS Users Guide: # http://viceroy.eeb.uconn.edu/estimates/EstimateSPages/EstSUsersGuide/EstimateSUsersGuide.htm # downloaded 2013-03-14. # This corresponds to EstimateS output columns 3 & 6: # Sobs (Mao Tau) # Sobs SD (Mao Tau) # For the confidence interval, just use +/- 1.96 * SD richRarefy <- function(incmat) { # Calculates a sample-based rarefaction curve using Mao's tau estimator # incmat = matrix of incidences or counts, species x occasions # Returns: a matrix with columns for # Mao's tau estimator # approximate standard deviation incmat <- round(incmat) > 0 # turn counts into incidences H <- dim(incmat)[2] # number of samples Sobs <- sum(rowSums(incmat) > 0) # number of sps observed; rows with all zeros don't count # see Eqn 5 in Colwell+ 2004 sj <- table(rowSums(incmat)) # Number of species encountered on 1, 2, 3,.. occasions s1 <- sj["1"] ; if(is.na(s1)) s1 <- 0 # Number of uniques s2 <- sj["2"] ; if(is.na(s2)) s2 <- 0 # Number of duplicates jay <- as.numeric(names(sj)) # To estimate variance, we need an estimate of species richness; Colwell+ 2004 (Eqn 7) # use a form of "Chao2": Stilde <- if(s2 > 0) Sobs + (H-1)*s1^2/(2*H*s2) else Sobs tau <- sigma <- numeric(H) for(h in 1:H) { smalljay <- jay[jay <= (H - h)] # Values of j for which alfjh != 0 alfjh <- c( exp(lfactorial(H - h) + lfactorial(H - smalljay) - lfactorial(H - h - smalljay) - lfactorial(H)), rep(0, length(jay)-length(smalljay))) # ifelse won't work here tau[h] <- Sobs - sum(alfjh*sj) sigma[h] <- sqrt(sum(sj*(1-alfjh)^2) - tau[h]^2/Stilde) } return(cbind(MaoTau = tau, SD = sigma)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/richRarefy.R
# A wrapper for secr::secr.fit to fix the damage done by the 'upgrade' to v. 4. # fastproximity is new in v.4, and reconfigures some models (but not others) so that # they run faster. The default is fastproximity=TRUE. This means that you can have # a set of models where some have been reconfigured, others not. Unfortunately, # AICs are not comparable across these models. Even more unfortunately, secr::AIC # compares them. # This function just facilitates the consistent use of secr.fit with fastproximity=FALSE. # In versions prior to v.4.0.0, fastproximity is silently ignored. secrFit <- function(capthist, model = list(D~1, g0~1, sigma~1), mask = NULL, buffer = NULL, CL = FALSE, detectfn = NULL, ...) { secr::secr.fit(capthist=capthist, model=model, mask=mask, buffer=buffer, CL=CL, detectfn=detectfn, ..., details=list(fastproximity=FALSE)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/secrFit.R
# Run a Shiny App from a Package # Adapted from code by Jason Bryer (jason@@bryer.org) on Github at # https://github.com/jbryer/IS606 # This version will only run apps in the wiqid package. showShinyApp <- function(topic) { shinyPath <- file.path(path.package("wiqid"), 'shiny') apps <- list.dirs(shinyPath, recursive=FALSE, full.names=FALSE) if(missing(topic)) { cat("The following topics are available:\n") return(apps) } if(!requireNamespace("shiny", quietly=TRUE)) stop("You need to install the 'shiny' package to run this function.") which <- pmatch(topic, apps) if(is.na(which)) stop("No app for this topic available.") appPath <- file.path(path.package("wiqid"), 'shiny', apps[which]) cat("--- R will freeze while the app is running; press Esc when done. ---\n") ; flush.console() try(shiny::runApp(appPath)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/showShinyApp.R
# simplified Gelman convergence diagnostic using Brooks & Gelman's "interval" method. # See Brooks & Gelman (1998) General methods for monitoring convergence of iterative simulations. J Computational and Graphical Statistics, 7, 434-455. p. 441 # This follows WinBUGS in using the central 80% interval as the measure of width (WinBUGS manual p.27). simpleRhat <- function(object, n.chains, burnin=0) { mcmcOutput::getRhat(x=object, bad=NA, sort=FALSE) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/simpleRhat.R
# Functions 'standardize' and 'standardize2match' # Centre and scale a vector or array and return an object of the same class. # For an array, the mean and SD of the whole array is used. # Code modified from the function base::scale. standardize <- function (x, center = TRUE, scale = TRUE) { if (!is.numeric(x)) stop("'x' must be a numeric vector or array.", call. = FALSE) if (length(center) != 1) stop("'center' must be logical or numeric of length 1.", call. = FALSE) if (length(scale) != 1) stop("'scale' must be logical or numeric of length 1.", call. = FALSE) if (is.logical(center)) { if (center) { center <- mean(x, na.rm = TRUE) x <- x - center } } else { if (!is.numeric(center)) stop("'centre' must be numeric or logical.", call. = FALSE) x <- x - center } if (is.logical(scale)) { if (scale) { scale <- sd(x, na.rm=TRUE) x <- x / scale } } else { if (!is.numeric(scale)) stop("'scale' must be numeric or logical.", call. = FALSE) x <- x / scale } return(x) } #........................................................................ # Standardize a new numeric object to the same mean and sd as # existing output from 'standardize' standardize2match <- function (x, y) { if (!is.numeric(x) || !is.numeric(x)) stop("'x' and 'y' must be a numeric vectors or arrays.", call. = FALSE) return((x - mean(y, na.rm=TRUE)) / sd(y, na.rm=TRUE)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/standardize.R
# Maximum likelihood estimation for Cormack-Jolly-Seber apparent-survival models # of type phi(t) p(t) or with time covariates # Added grouping factor # Uses detection history matrix as input, not m-array. # Uses multinomial likelihood. # Grouping factor strategy: # Do m-array for each group, combine into a 3d array # Do design matrices with a set of occasions for each group # Calculate q-array and likelihood for each group and combine log_qArray <- function(log_phi, log_p, log_1mp) { # Calculates the matrix of multinomial cell log(probabilities) # corresponding to an m-array. # log_phi = vector of log(apparent survival probabilities) # log_p = vector of log(recapture probabilities) # log_1mp = vector of log(1 - recapture probabilities) # NO SANITY CHECKS, calling function must take care of this. n <- length(log_phi) # Create n x n+1 matrix and fill diagonal q <- diag(as.vector(log_p + log_phi), n, n+1) # Fill the upper triangle, and get the row sums sum_probs <- numeric(n) for (i in 1:(n-1)){ for (j in (i+1):n) { q[i,j] <- sum(log_phi[i:j]) + sum(log_1mp[i:(j-1)]) + log_p[j] } sum_probs[i] <- logSumExp(q[i, i:n]) } sum_probs[n] <- q[n, n] # Add the last column and return q[, n+1] <- log1minusExp(sum_probs) return(q) } # .......................................................................... survCJS <- function(DH, model=list(phi~1, p~1), data=NULL, freq=1, group, interval=1, ci = 0.95, link=c("logit", "probit"), ...) { # ** DH is detection history matrix/data frame, animals x occasions. # ** freq is vector of frequencies for each detection history # ** model is a list of 2-sided formulae for psi and p; can also be a single # 2-sided formula, eg, model = psi ~ habitat. # ** data a data frame with the covariates. # ** group is a factor specifying which group each row of DH belongs to. # ** ci is required confidence interval. if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Sanity checks: for DH?? ni <- ncol(DH) - 1 # number of survival intervals and REcapture occasions if(!is.null(data) && nrow(data) != ni) stop("The 'data' argument is not a valid data frame.") if(length(freq) == 1) freq <- rep(freq, nrow(DH)) if (length(freq) != nrow(DH)) stop("freq must have a value for each row of the detection history matrix.") if(!missing(group)) { if(length(group) != nrow(DH)) stop("Group must have a value for each row of the detection history matrix.") group <- as.factor(group) nGroup <- nlevels(group) groupNames <- levels(group) data <- as.data.frame(cbind(data, group=rep(groupNames, each=ni))) } else { group <- NULL nGroup <- 1 } if(length(interval) == 1) interval <- rep(interval, ni) if(length(interval) != ni) stop("'interval' must be scalar or length equal to number of intervals between captures") crit <- fixCI(ci) # Convert detection history to 3d array of m-arrays to facilitate use of multinomial likelihood mARRAY <- array(0, c(ni, ni+1, nGroup)) if(nGroup == 1) { mARRAY[, , 1] <- ch2mArray(CH=DH, freq=freq) } else { for(i in 1:nGroup) { DHgrp <- subset(DH, group==groupNames[i]) freqgrp <- subset(freq, group==groupNames[i]) mARRAY[, , i] <- ch2mArray(CH=DHgrp, freq=freqgrp) } } # Standardise the model: model <- stdModel(model, defaultModel=list(phi=~1, p=~1)) # Standardize the data dataList <- stddata(data, NULL) dataList$.Time <- standardize(1:ni) dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 dataList$.time <- as.factor(1:ni) # Set up model matrices phiDf <- selectCovars(model$phi, dataList, ni*nGroup) phiMat <- modelMatrix(model$phi, phiDf) phiK <- ncol(phiMat) pDf <- selectCovars(model$p, dataList, ni*nGroup) pMat <- modelMatrix(model$p, pDf) pK <- ncol(pMat) K <- phiK + pK if(nrow(phiMat) != ni*nGroup || nrow(pMat) != ni*nGroup) stop("Missing values not allowed in covariates.") # Set up objects to hold output beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("phi:", colnames(phiMat)), paste("p:", colnames(pMat))) lp.mat <- matrix(NA_real_, ni*nGroup*2, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") if(nGroup == 1) { rownames(lp.mat) <- c( paste0("phi", 1:ni), paste0("p", 1:ni)) } else { rownames(lp.mat) <- c( paste0(data$group, ":phi", 1:ni), paste0(data$group, ":p", 1:ni)) } npar <- NA_real_ varcov <- NULL # Log likelihood function nll <- function(param){ phiBeta <- param[1:phiK] pBeta <- param[(phiK+1):K] log_phi <- plink(phiMat %*% phiBeta, log.p=TRUE) link_p <- pMat %*% pBeta log_p <- plink(link_p, log.p=TRUE) log_1mp <- plink( -link_p, log.p=TRUE) if(nGroup == 1) { nll <- -sum(mARRAY[, , 1] * log_qArray(log_phi*interval, log_p, log_1mp)) } else { nll <- numeric(nGroup) for(i in 1:nGroup) { log_phi0 <- log_phi[data$group == groupNames[i]] log_p0 <- log_p[data$group == groupNames[i]] log_1mp0 <- log_1mp[data$group == groupNames[i]] nll[i] <- -sum(mARRAY[, , i] * log_qArray(log_phi0*interval, log_p0, log_1mp0)) } } return(min(sum(nll), .Machine$double.xmax)) } # Run mle estimation with nlm: # res <- nlm(nll, param, hessian=TRUE, stepmax=10) # 2015-03-01 nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$stepmax)) nlmArgs$stepmax <- 10 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) # Process the output beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(phiMat %*% beta.mat[1:phiK, 1], pMat %*% beta.mat[(phiK+1):K, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) # if (!inherits(varcov0, "try-error") && all(diag(varcov0) > 0)) { if (!inherits(varcov0, "try-error")) { varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") SElp <- c(sqrt(getFittedVar(phiMat, varcov[1:phiK, 1:phiK])), sqrt(getFittedVar(pMat, varcov[(phiK+1):K, (phiK+1):K]))) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") npar <- K } # Put it all together and return out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plink(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=sum(mARRAY)) ) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/survCJS.R
# Estimation of apparent survival - CJS models # These functions allow for Adult and Juvenile survival to differ, # eg. when birds are ringed as nestlings. # Data should be organised as 2 detection history matrices, one for juveniles and one for # adults (when juveniles are recaptured they are already adults). # qArrayAJ <- function(phi, p, phiJ=phi) { log_qArrayAJ <- function(log_phi, log_p, log_1mp, log_phiJ=log_phi) { # Calculates the matrix of multinomial cell log(probabilities) # corresponding to an m-array. # log_phi = vector of log(apparent survival probabilities) # log_phiJ = vector of log(apparent survival probabilities) for juveniles # log_p = vector of log(recapture probabilities) # log_1mp = vector of log(1 - recapture probabilities) # NO SANITY CHECKS, calling function must take care of this. n <- length(log_phi) # Create n x n+1 matrix and fill diagonal q <- diag(as.vector(log_p + log_phiJ), n, n+1) # Fill the upper triangle, and get the row sums sum_probs <- numeric(n) for (i in 1:(n-1)){ for (j in (i+1):n) { q[i,j] <- log_phiJ[i] + sum(log_phi[(i+1):j]) + sum(log_1mp[i:(j-1)]) + log_p[j] } sum_probs[i] <- logSumExp(q[i, i:n]) } sum_probs[n] <- q[n, n] # Add the last column and return q[, n+1] <- log1minusExp(sum_probs) return(q) } # .......................................................................... survCJSaj <- function(DHj, DHa=NULL, model=list(phiJ~1, phiA~1, p~1), data=NULL, freqj=1, freqa=1, ci = 0.95, link=c("logit", "probit"), ...) { # phi(t) p(t) model or models with time covariates for Cormack-Joly-Seber # estimation of apparent survival. # ** DHj is detection history matrix/data frame, animals x occasions, for animals marked as juveniles; DHa (optional) has detection histories for animals marked as adults. # ** freqj and freqa are vectors of frequencies for each detection history # ** model is a list of 2-sided formulae for psiJ, psiA and p; can also be a single # 2-sided formula, eg, model = psiJ ~ habitat. # ** data a data frame with the covariates. # ** ci is required confidence interval. if(match.arg(link) == "logit") { plink <- plogis } else { plink <- pnorm } # Sanity checks: # Check DHj and DHa have same no. of columns ... nocc <- ncol(DHj) ni <- nocc - 1 # number of survival intervals and REcapture occasions if(!is.null(DHa) && ncol(DHa) != nocc) stop("'DHa' and 'DHj' must have the same number of columns.") if (length(freqj) == 1) freqj <- rep(freqj, nrow(DHj)) # if (length(freqa) == 1) # freqa <- rep(freqa, nrow(DHa)) # Not needed if(ci > 1 | ci < 0.5) stop("ci must be between 0.5 and 1") alf <- (1 - ci[1]) / 2 crit <- qnorm(c(alf, 1 - alf)) # Deal with grownup juveniles, do m-array for these: grown <- DHj # Remove first capture getFirst <- function(x) min(which(x == 1)) first <- apply(DHj, 1, getFirst) for(i in 1:nrow(grown)) grown[i, first[i]] <- 0 marrayA <- ch2mArray(grown, freqj) # Do m-array for juvenile juveniles ma <- matrix(0, nocc, nocc+1) for(i in 1:nrow(DHj)) { cht <- which(DHj[i, ] != 0) # When was animal caught? # Fill in release/recapture data # we are only interested in the first recapture if(length(cht) > 1) ma[cht[1], cht[2]] <- ma[cht[1], cht[2]] + freqj[i] } # Juveniles never seen again: ringed <- tapply(freqj, first, sum) ma[, nocc+1] <- c(ringed, 0) - rowSums(ma) marrayJ <- ma[-nocc, -1] # Add data for adults if(!is.null(DHa)) marrayA <- marrayA + ch2mArray(DHa, freqa) # Standardise the model: model <- stdModel(model, defaultModel=list(phiJ=~1, phiA=~1, p=~1)) # Standardize the data dataList <- stddata(data, NULL) dataList$.Time <- as.vector(scale(1:ni)) #/2 dataList$.Time2 <- dataList$.Time^2 dataList$.Time3 <- dataList$.Time^3 dataList$.time <- as.factor(1:ni) # Set up model matrices phiADf <- selectCovars(model$phiA, dataList, ni) phiAMat <- modelMatrix(model$phiA, phiADf) phiAK <- ncol(phiAMat) phiJDf <- selectCovars(model$phiJ, dataList, ni) phiJMat <- modelMatrix(model$phiJ, phiJDf) phiJK <- ncol(phiJMat) pDf <- selectCovars(model$p, dataList, ni) pMat <- modelMatrix(model$p, pDf) pK <- ncol(pMat) K <- phiAK + phiJK + pK parID <- rep(1:3, c(phiAK, phiJK, pK)) if(nrow(phiAMat) != ni || nrow(phiJMat) != ni || nrow(pMat) != ni) stop("Missing values not allowed in covariates.") # Objects to hold results beta.mat <- matrix(NA_real_, K, 4) colnames(beta.mat) <- c("est", "SE", "lowCI", "uppCI") rownames(beta.mat) <- c( paste("phiA:", colnames(phiAMat)), paste("phiJ:", colnames(phiJMat)), paste("p:", colnames(pMat))) lp.mat <- matrix(NA_real_, ni*3, 3) colnames(lp.mat) <- c("est", "lowCI", "uppCI") rownames(lp.mat) <- c( paste("phiA", 1:ni, sep=""), paste("phiJ", 1:ni, sep=""), paste("p", 1:ni, sep="")) npar <- NA_real_ varcov <- NULL nll <- function(param){ phiABeta <- param[parID==1] phiJBeta <- param[parID==2] pBeta <- param[parID==3] log_phiA <- plink(phiAMat %*% phiABeta, log.p=TRUE) log_phiJ <- plink(phiJMat %*% phiJBeta, log.p=TRUE) link_p <- pMat %*% pBeta log_p <- plink(link_p, log.p=TRUE) log_1mp <- plink( -link_p, log.p=TRUE) # Calculate the negative log(likelihood) value: return(min(-sum(marrayA * log_qArrayAJ(log_phiA, log_p, log_1mp, log_phiA), # adults marrayJ * log_qArrayAJ(log_phiA, log_p, log_1mp, log_phiJ)), # juveniles .Machine$double.xmax)) } # Run mle estimation with nlm: # res <- nlm(nll, param, hessian=TRUE, stepmax=10) # 2015-03-01 nlmArgs <- list(...) nlmArgs$f <- nll nlmArgs$p <- rep(0, K) nlmArgs$hessian <- TRUE if(is.null(nlmArgs$stepmax)) nlmArgs$stepmax <- 10 res <- do.call(nlm, nlmArgs) if(res$code > 2) # exit code 1 or 2 is ok. warning(paste("Convergence may not have been reached (nlm code", res$code, ")")) # Organise the output beta.mat[,1] <- res$estimate lp.mat[, 1] <- c(phiAMat %*% beta.mat[parID==1, 1], phiJMat %*% beta.mat[parID==2, 1], pMat %*% beta.mat[parID==3, 1]) logLik <- -res$minimum varcov0 <- try(chol2inv(chol(res$hessian)), silent=TRUE) if (!inherits(varcov0, "try-error")) { varcov <- varcov0 SE <- suppressWarnings(sqrt(diag(varcov))) beta.mat[, 2] <- SE beta.mat[, 3:4] <- sweep(outer(SE, crit), 1, res$estimate, "+") temp <- sqrt(c(getFittedVar(phiAMat, varcov[parID==1, parID==1]), getFittedVar(phiJMat, varcov[parID==2, parID==2]), getFittedVar(pMat, varcov[parID==3, parID==3]))) if(all(temp >= 0)) { SElp <- sqrt(temp) lp.mat[, 2:3] <- sweep(outer(SElp, crit), 1, lp.mat[, 1], "+") } npar <- K } out <- list(call = match.call(), beta = beta.mat, beta.vcv = varcov, real = plink(lp.mat), logLik = c(logLik=logLik, df=npar, nobs=sum(marrayJ, marrayA))) class(out) <- c("wiqid", "list") return(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/survCJSaj.R
# Function to estimate parameters of Robust Design model # Robust sensu Kendall et al 1997 p566 # normalising constants ignored, so AIC will not match other software # With different p and different phi across seasons # Same number of occasions in each season, model M[0]^t # Returns estimates of p[i], pStar[i], phi[i], N[i], b[i] # Precision estimation requires bootstrap: NOT YET DONE. survRD <- function(DH, freq=1, occsPerSeason) { # Do sanity checks here if(length(occsPerSeason) > 1) # For the moment! stop("Different occasions per season are not supported: 'occsPerSeason' must be scalar.") # if(length(occsPerSeason) == 1) { K <- ncol(DH) / occsPerSeason # Number of seasons # occsPerSeason <- rep(occsPerSeason, K) # } else { # K <- length(occsPerSeason) # } seasonID <- rep(1:K, each=occsPerSeason) if(length(seasonID) != ncol(DH)) stop("The number of columns of 'DH' does not match the season data.") # turn the DH into a season-wise DH and do m-array getDHseason <- function(dh) tapply(dh, as.factor(seasonID), max) DHseason <- t(apply(DH, 1, getDHseason)) mMat <- ch2mArray(DHseason, freq=freq) # For L2, we need only the count of each within-season capture history. getDHchar <- function(dh) tapply(dh, as.factor(seasonID), paste, collapse="") DHchar <- t(apply(DH, 1, getDHchar)) # What capture histories are possible? # This version assumes nOcc same for all seasons ###### nOcc <- occsPerSeason[1] Omega01 <- matrix(0, 2^nOcc, nOcc) for(i in 1:nOcc) Omega01[, i] <- rep(0:1, each=2^(nOcc-i)) OmegaCh <- apply(Omega01, 1, paste0, collapse="") # Get the count of each capture history for each season getX.i <- function(x) { tmp <- factor(x, levels=OmegaCh) table(tmp)[-1] } X.i <- apply(DHchar, 2, getX.i) # Function to calculate a vector of probabilities for capture histories # (with constant p) getpCond <- function(Omega01, p) { tmp <- Omega01 * p + (1 - Omega01) * (1 - p) pOmega <- apply(tmp, 1, prod)[-1] return(pOmega / sum(pOmega)) } param <- rep(0, K*2 - 1) nll <- function(param) { p <- plogis(param[1:K]) pStar <- 1 - (1 - p)^nOcc phi <- plogis(param[(K+1):(K*2-1)]) # logL1 <- sum(mMat * log(qArray(phi, pStar[-1])), na.rm=TRUE) logL1 <- sum(mMat * log_qArray(log(phi), log(pStar[-1]), log(1 - pStar[-1]))) logL2i <- numeric(K) for(i in 1:K) { pCond <- getpCond(Omega01, p[i]) logL2i[i] <- sum(X.i[, i] * log(pCond)) } return(min(-logL1 - sum(logL2i), .Machine$double.xmax)) } res <- nlm(nll, param) ## , hessian=TRUE) res pHat <- plogis(res$estimate[1:K]) phiHat <- plogis(res$estimate[(K+1):(2*K-1)]) pStarHat <- 1 - (1 - pHat)^nOcc # return this n <- as.vector(colSums(DHseason)) Nhat <- n / pStarHat # return this # Kendal et al 1995 Eqns 2 and 3: # Btilde <- Nhat[-1] - phiHat * Nhat[-K] bHat <- Nhat[-1] / Nhat[-K] - phiHat # return this return(list( phiHat = phiHat, bHat = bHat, pStarHat = pStarHat, Nhat = Nhat, pHat = pHat)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/survRD.R
# Function to estimate parameters of Robust Design model # with "ad hoc" strategy: # N and p are estimated beforehand with a closedCap* function survRDah <- function(DH, freq=1, occsPerSeason, N, pStar) { # Do sanity checks here if(length(occsPerSeason) > 1) # For the moment! stop("Different occasions per season are not supported: 'occsPerSeason' must be scalar.") K <- ncol(DH) / occsPerSeason # Number of seasons if(length(N) != K) stop("'N' must have one value per season.") if(length(pStar) != K) stop("'pStar' must have one value per season.") seasonID <- rep(1:K, each=occsPerSeason) if(length(seasonID) != ncol(DH)) stop("The number of columns of 'DH' does not match the season data.") # turn the DH into a season-wise DH and do m-array getDHseason <- function(dh) tapply(dh, as.factor(seasonID), max) DHseason <- t(apply(DH, 1, getDHseason)) mMat <- ch2mArray(DHseason, freq=freq) param <- rep(0, K-1) # Log likelihood function nll <- function(param) { log_phi <- plogis(param, log.p=TRUE) nll <- -sum(mMat * log_qArray(log_phi, log(pStar[-1]), log(1 - pStar[-1]))) return(min(nll, .Machine$double.xmax)) } res <- nlm(nll, param) ## , hessian=TRUE) phiHat <- plogis(res$estimate) bHat <- N[-1] / N[-K] - phiHat # return this return(list( phiHat = phiHat, bHat = bHat, pStarHat = pStar, Nhat = N)) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/survRDah.R
# This file contains utilities used in several places in the code # and NOT exported: # modelMatrix : wrapper for model.matrix, changes "(Intercept)" to "Intrcpt" # getVar0, getFittedVar : get variance for fitted values # getScaling, doScaling, scaleToMatch : functions to deal with scaling # signifish : an alternative to signif (added 10-02-2015) # fixCI : Calculate critical values for CI. # getMARKci : Calculate MARK-style confidence intervals for N # stdModel : Regularize a list of formulae, ensuring it is a named list of one-sided formulae. # stddata : Convert a data frame of site and survey data into a list and standardise # selectCovars : Pull the covars needed for a model matrix into a specific data frame # matchStart : has its own file # fixNames : removed, use 'make.names(., unique=TRUE) instead (2019-04-17) # AICtable moved to file AICc.R # logSumExp etc are now in file UnderOverflow.R # Functions to convert parameters of distributions (eg mean and sd to shape and rate) # are in converters.R # Variants of the t-distribution are in TDist.R # ............................................................................... # Produces a model matrix but the intercept column is named "Intrcpt" without # parentheses modelMatrix <- function(formula, data, ...) { mm <- model.matrix(formula, data, ...) colnames(mm)[colnames(mm) == "(Intercept)"] <- "Intrcpt" return(mm) } # ............................................................................... # Functions to calculate the variance of fitted values from model matrix and var-covar matrix. # added 2017-10-16 # Output of getFittedVar is equivalent to # diag(MM %*% varcov %*% t(MM)) # but does not require calculation of the full matrix, which can be huge. getVar0 <- function(x, vcv) x %*% vcv %*% x # x : one row of a model matrix # vcv : variance-covariance matrix getFittedVar <- function(MM, vcv) apply(MM, 1, getVar0, vcv=vcv) # MM : a model matrix # vcv : variance-covariance matrix # ............................................................................... # Functions to deal with scaling, can be used with s/lapply getScaling <- function(x, scaleBy) c(mean(x, na.rm=TRUE), sd(x, na.rm=TRUE) / scaleBy) doScaling <- function(x, scaleBy) if(is.numeric(x)) (x - mean(x, na.rm=TRUE)) / sd(x, na.rm=TRUE) * scaleBy else x # This takes a whole data frame scaleToMatch <- function(target, scaling) { for(i in seq_along(target)) { if(is.numeric(target[[i]])) { pos <- match(names(target)[i], names(scaling)) if(!is.na(pos)) { sc <- scaling[[pos]] target[[i]] <- (target[[i]] - sc[1]) / sc[2] } } } return(target) } # ............................................................................... # A more sensible version of signif signifish <- function(x, digits=3) ifelse(x < 10^digits, signif(x, digits=digits), round(x)) # ............................................................................... # Deal with confidence interval specification: fixCI <- function(ci) { if(ci > 1 | ci < 0.5) stop("ci must be between 0.5 and 1") alf <- (1 - ci[1]) / 2 return(qnorm(c(alf, 1 - alf))) } # ..................................................................... # Function to calculate the MARK-style confidence intervals for N # See help for Closed Captures getMARKci <- function(beta, SE.beta, ci) { f0.hat <- exp(beta) crit <- qnorm((1 - ci[1]) / 2, lower.tail=FALSE) C <- exp(crit * sqrt(log(1 + SE.beta^2))) # See the Burnham et al reference, p212! return(c(f0.hat, f0.hat/C, f0.hat*C)) } # ......................................................................... ## Regularize a list of formulae, ensuring it is a named list of one-sided formulae. # New version: stdModel <- function (model1, defaultModel) { if(is.null(model1)) return(defaultModel) if(inherits(model1, "formula")) model1 <- list(model1) if(!is.list(model1)) stop("The 'model' argument must be a formula or a list of formulae.") LHS <- function (form) { trms <- as.character (form) if (length(trms)==2) '' else trms[2] } RHS <- function (form) { trms <- as.character (form) if (length(trms)==3) as.formula(paste(trms[c(1,3)], collapse=" ")) else form } lhs <- sapply(model1, LHS) temp <- lapply(model1, RHS) if (is.null(names(model1))) { names(temp) <- lhs } else { names(temp) <- ifelse(names(model1) == '', lhs, names(model1)) } newModel <- replace (defaultModel, names(temp), temp) return(newModel) } # ............................................................................. ## Convert a data frame of site and survey data into a list # ** Site covars will each have a single column in the data frame, # ** survey covars will have a column for each survey occasion, and # column names end with the number of the occasion, eg, temperature # will be in columns named "temp1", "temp2", etc. stddata <- function(df, nocc=NULL, scaleBy=1) { if (is.null(df)) return(NULL) if(!is.data.frame(df)) stop("The 'data' argument must be a data frame.") dataList <- as.list(df) ## Group variables spread over > 1 column into a single vector if (!is.null(nocc)) { nocc <- sort(nocc, decreasing=TRUE) # start with biggest for (this.nocc in nocc) { # look for names ending with number of occasions nam <- names(df) clue <- paste0(this.nocc, "$", collapse="") clueDo <- grep(clue, nam) if(length(clueDo) > 0) { for(i in clueDo) { # get stem, generate set of names stem <- sub(clue, "", nam[i]) subnames <- paste0(stem, 1:this.nocc) subtable <- df[, subnames] # check that there's a column for each occasion if(ncol(subtable) != this.nocc) stop("Survey covariates must have 1 column for each survey occasion.") # check that all have same class classes <- sapply(subtable, class) if(length(unique(classes)) != 1) stop("All columns of the survey covariates must have the same class (all factor or all numeric).") # remove original columns from the list: dataList <- replace(dataList, subnames, NULL) # convert to a matrix, then a vector; # fortunately this also converts factors to character tmp <- as.vector(as.matrix(subtable)) if(is.character(tmp)) tmp <- as.factor(tmp) # convert to factor AFTER combining columns dataList <- c(dataList, list(tmp)) names(dataList)[length(dataList)] <- stem } } } } ## Standardize numeric variables to mean = 0 and sd = scaleBy if (!is.null(scaleBy)) { doScale <- function(x) { if (is.numeric(x)) x <- standardize(x) * scaleBy return(x) } dataList <- lapply(dataList, doScale) } return(dataList) } # ........................................................................... # Pull the covars needed for a model matrix into a specific data frame selectCovars <- function(formula, dataList, minrows) { wanted <- rownames(attr(terms(formula), "factors")) found <- wanted %in% names(dataList) wanted <- wanted[found] if (length(wanted) > 0) { df <- as.data.frame(dataList[wanted]) df <- cbind(df, .dummy = rep(NA, minrows)) } else { df <- data.frame(.dummy = rep(NA, minrows)) } stopifnot(nrow(df) %% minrows == 0) return(df) } # ........................................................
/scratch/gouwar.j/cran-all/cranData/wiqid/R/utils.R
# Performs partial matching, finding the elements in 'big' # where the start of 'big' matches 'little'. # eg. matchStart(c("Ju", "Nov", "J"), month.name) returns 6, 7, 11, 1. # June and July match both 'Ju' and 'J', but only the first match is returned. # The order of the output depends on 'little'. # Does not use 'grep', and brackets "[ ]" are treated as normal characters. # Returns integer(0) if no matches. # 'little' may also be a vector of numerical indices, which will be checked and returned. matchStart <- function(little, big) { if(is.numeric(little)) { little <- round(little) if(all(little < 0)) return(seq_along(big)[little]) out <- little[little > 0 & little <= length(big)] } else { little <- as.character(little) out <- NULL for(i in seq_along(little)) { nc <- nchar(little[i]) big1 <- substr(big, 1, nc) out <- c(out, which(big1 == little[i])) } } unique(out) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/utils_matchStart.R
# Functions to sanity-check different arguments to 'wiqid' functions. # Not exported. # Check detection histories # ------------------------- verifyDH <- function(DH, allowNA=FALSE, allowNArow=FALSE) { if (!is.matrix(DH) && !is.data.frame(DH)) stop("Detection history must be a matrix or data frame") DH <- as.matrix(DH) if(!is.numeric(DH)) stop("Detection history has non-numeric values.") if(!allowNA && any(is.na(DH))) stop("Detection history has NA values.") if(!any(is.finite(DH))) stop("Detection history has no non-missing values.") n <- rowSums(!is.na(DH)) if(!allowNArow && any(n == 0)) stop("Detection history has a row with all NAs.") # added 2017-10-17 DH <- round(DH) range <- range(DH, na.rm=TRUE) if(range[1] < 0) stop("Detection history has negative values.") if(range[2] > 1) stop("Detection history has values > 1.") return(DH) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/verify.R
# Various methods for class wigid, ie. output for wiqid MLE functions. # coef and vcov added 2017-02-09 coef.wiqid <- function(object, ...) { object$beta[,1] } vcov.wiqid <- function(object, ...) { object$beta.vcv } print.wiqid <- function(x, digits=4, ...) { cat("Call: ") print(x$call) if(anyDuplicated(x$real) > 0) { cat("\nReal values (duplicates omitted):\n") print(unique(x$real), digits=digits, ...) } else { cat("\nReal values:\n") print(x$real, digits=digits, ...) } cat("\nAIC:", AIC(x), "\n") } logLik.wiqid <- function(object, ...) { tmp <- as.vector(object$logLik) ll <- tmp[1] attr(ll, 'df') <- tmp[2] attr(ll, 'nobs') <- tmp[3] class(ll) <- "logLik" return(ll) } nobs.wiqid <- function(object, ...) { nobs <- as.numeric(object$logLik)[3] return(if(is.null(nobs)) NA_integer_ else nobs) }
/scratch/gouwar.j/cran-all/cranData/wiqid/R/wiqid_methods.R
# Helper functions for shiny app "visBeta" # ======================================= # The plotting function # --------------------- visBeta <- function (shapes, data, showPost=FALSE, npoints=100, param) { # if(length(shapes) < 1) # shapes <- c(0, 0) shapes <- pmax(shapes, 0.0001) ## Calculation of the binomial likelihood curve xx <- c(0, seq(0.0002, 0.9998, length=npoints-2), 1) if(data[2] > 0) { binom.curve <- dbinom(data[1], data[2], xx) * (data[2]+1) binom.max <- max(binom.curve) } else { binom.max <- 0 } ## Calculation of the posterior curve if(data[2] > 0 && showPost == 1) { post.curve <- pmin(dbeta(xx, shapes[1]+data[1], shapes[2]+diff(data)), 1e100) post.mean <- (shapes[1]+data[1]) / (sum(shapes)+data[2]) post.max <- min(max(post.curve), 20) } else { post.max <- 0 } beta.curve <- pmin(dbeta(xx, shapes[1], shapes[2]), 1e100) beta.max <- min(max(beta.curve), 20) mu <- shapes[1] / sum(shapes) ylim <- range(0, beta.max, binom.max, post.max) plot(0:1, ylim, type='n', las=1, xlab = expression(theta), ylab = "Probability density") segments(0,0,1,0, col='lightgrey') if(data[2] > 0) { lines(xx, binom.curve, lwd=3, col='lightgrey') points(rep(data[1]/data[2], 2), c(0, binom.max), col='lightgrey', pch=19, cex=2) } lines(xx, beta.curve, col='red', lwd=2) if(data[2] > 0 && showPost == 1) { lines(xx, post.curve, lwd=2, lty=2, col='blue') abline(v=post.mean, col='blue', lty=4) } abline(v=mu, col='red', lty=3) msg <- if(param == "useMode") {"Prior controlled by Mode and Concentration" } else {"Prior controlled by the shape parameters"} mtext(msg, 3) } # ...................................................................... # The results table function # -------------------------- resultsBeta <- function (shapes, data, showPost=FALSE) { shapes <- pmax(shapes, 0.0001) priorMean <- round(shapes[1] / sum(shapes), 2) priorSD <- round(sqrt(prod(shapes)) / (sum(shapes)^2 * (sum(shapes) + 1)), 2) priorMode <- round((shapes[1] -1) / sum(shapes -1), 2) priorConc <- round(sum(shapes), 2) res <- matrix(c("Beta prior(shape1, shape2)", as.character(c(shapes, priorMean, priorSD, priorMode, priorConc))), nrow=1) if(data[2] > 0) { fail <- diff(data) likMode <- round(data[1] / data[2], 2) res <- rbind(res, as.character(c("Binomial data (successes, failures)", data[1], fail, "", "", likMode, ""))) if(showPost) { postShape1 <- shapes[1] + data[1] postShape2 <- shapes[2] + fail postMean <- round(postShape1 / (postShape1 + postShape2), 2) postSD <- round(sqrt((postShape1 * postShape2) / ((postShape1+postShape2)^2 * (postShape1 + postShape2 + 1))), 2) postMode <- round((postShape1-1) / (postShape1 + postShape2-2), 2) postConc <- round(postShape1+postShape2, 2) res <- rbind(res, as.character(c("Posterior beta(shape1, shape2)", postShape1, postShape2, postMean, postSD, postMode, postConc))) } } # Convert to data frame resDF <- as.data.frame(res, stringsAsFactors=FALSE) colnames(resDF) <- c("Component", "Param1", "Param2", "Mean", "SD", "Mode", "Concentration") return(resDF) }
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Beta/helper_visBeta.R
# visBeta library(shiny) source("helper_visBeta.R") shinyServer(function(input, output, session) { observe({ val <- input$maxValue # Control the maximum value of the data and Concentration sliders. updateSliderInput(session, "Conc", max = val) updateSliderInput(session, "data", max = val) }) shapes <- reactive({ if(input$param == "useMode") { sh <- c(input$Mode * (input$Conc-2) + 1, (1-input$Mode) * (input$Conc-2) + 1) } else { sh <- c(input$Shape1, input$Shape2) } if(length(sh) < 2) sh <- c(0, 0) sh }) output$distPlot <- renderPlot({ # draw a beta curve visBeta(shapes = shapes(), data=input$data, showPost = input$showPost, param = input$param) }) results <- reactive({ resultsBeta(shapes = shapes(), data = input$data, showPost = input$showPost) }) # Show the values using an HTML table output$results <- renderTable({ results() }) })
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Beta/server.R
# code for shiny app "visBeta" this version with radio buttons shinyUI(fluidPage( titlePanel("Beta priors"), sidebarLayout( sidebarPanel( radioButtons("param", "Parameters to use:", c("Mode+Concentration"="useMode", "Shapes"="useShape"), inline=TRUE), sliderInput("Mode", label = "Prior mode:", min = 0, max = 1, step = 0.01, value = 0.2), sliderInput("Conc", label = "Prior concentration:", min = 2, max = 20, step = 0.1, value = 5), sliderInput("Shape1", label = "Prior shape1:", min = 0, max = 10, step = 0.1, value = 0.8), sliderInput("Shape2", label = "Prior shape2:", min = 0, max = 10, step = 0.1, value = 0.8), hr(), sliderInput("data", label = "Data: number of successes and trials:", min = 0, max = 10, step = 1, value = c(0, 0)), checkboxInput("showPost", label = "Display the posterior", value = FALSE), sliderInput("maxValue", label = "Change upper limits for trials and concentration", min = 10, max = 500, step = 10, value = 10), p(em("Code by Mike Meredith"), align="center") ), mainPanel( h3("Display beta distributions, binomial likelihoods, and beta posteriors"), p("Choose which parameters to use, then control the curve with the mode and concentration sliders, or the shape sliders."), plotOutput("distPlot"), tableOutput("results"), h3("Notes"), p("The mode+concentration parameterisation cannot display U-shaped forms of the beta distribution; use the shape parameters for that."), p("The beta distribution has two shape parameters, usually denoted by 'alpha' and 'beta', but here we use the R argument names, 'shape1' and 'shape2'."), p("Both shape parameters must be > 0; when the slider is at 0, the value used is 0.0001"), p("The data consist of the number of trials and the number of successes, eg, 3 heads in 5 coin tosses, or a species observed on 3 visits to a site out of 5."), p("The likelihood curve is scaled so that the area under the curve = 1. This is done by multiplying the output from dbinom by the number of trials + 1."), br() ) ) ))
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Beta/ui.R
# Helper functions for shiny app "visGamma" # ========================================= # The plotting function # --------------------- visGamma <- function (shape=2, rate=2, count=0, units=0, showPost=FALSE, npoints=100, xmax=10, param) { shape <- max(shape, 0.0001) rate <- max(rate, 0.0001) if(units > 0) xmax <- max(xmax, round(2 * count / units)) ## Calculation of the Poisson likelihood curve xx <- c(0, seq(0.0002, xmax, length=npoints - 1)) if(units > 0) { lik.curve <- dpois(count, xx*units) * units lik.max <- max(lik.curve) } else { lik.max <- 0 } ## Calculation of the posterior curve if(units > 0 && showPost) { post.curve <- pmin(dgamma(xx, shape+count, rate+units), 1e100) post.max <- min(max(post.curve), 3) post.mean <- (shape+count) / (rate+units) } else { post.max <- 0 } prior.curve <- pmin(dgamma(xx, shape, rate), 1e100) prior.max <- min(max(prior.curve), 3) prior.mean <- shape / rate ylim <- range(0, prior.max, lik.max, post.max) plot(c(0, xmax), ylim, type='n', las=1, xlab = expression(lambda), ylab = "Probability density") segments(0,0,xmax,0, col='lightgrey') if(units > 0) { lines(xx, lik.curve, lwd=3, col='lightgrey') points(rep(count/units, 2), c(0, lik.max), col='lightgrey', pch=19, cex=2) } lines(xx, prior.curve, col='red', lwd=2) if(units > 0 && showPost == 1) { lines(xx, post.curve, lwd=2, lty=2, col='blue') abline(v=post.mean, col='blue', lty=4) } abline(v=prior.mean, col='red', lty=3) msg <- if(param == "useMode") { "Prior controlled by Mode and Rate" } else { "Prior controlled by Shape and Rate"} mtext(msg, 3) } # ...................................................................... # The results table function # -------------------------- resultsGamma <- function (shape=2, rate=2, count=0, units=0, showPost=FALSE) { shape <- max(shape, 0.0001) rate <- max(rate, 0.0001) priorMean <- round(shape / rate, 2) priorSD <- round(sqrt(shape) / rate, 2) priorMode <- round(max((shape-1)/rate, 0), 2) res <- matrix(c("Gamma prior(shape, rate)", as.character(c(shape, rate, priorMean, priorSD, priorMode))), nrow=1) if(units > 0) { res <- rbind(res, c("Poisson data (count, units)", as.character(c(count, units)), "", "", round(count/units,2))) if(showPost) { postShape <- shape + count postRate <- rate + units postMean <- round(postShape / postRate, 2) postSD <- round(sqrt(postShape) / postRate, 2) postMode <- round(max((postShape-1)/postRate, 0), 2) res <- rbind(res, c("Posterior gamma(shape, rate)", as.character(c(postShape, postRate, postMean, postSD, postMode)))) } } # Convert to data frame resDF <- as.data.frame(res, stringsAsFactors=FALSE) colnames(resDF) <- c("Component", "Param1", "Param2", "Mean", "SD", "Mode") return(resDF) }
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Gamma/helper_visGamma.R
# code for shiny app "visGamma" library(shiny) source("helper_visGamma.R") shinyServer(function(input, output, session) { observe({ val <- input$maxValue # Control the maximum value of units and counts sliders. updateSliderInput(session, "units", max = val) # , value=min(val, input$units)) updateSliderInput(session, "count", max = val) # , value=min(val, input$count)) }) observe({ max <- 10 if(input$units > 0) max <- max(10, round(2 * input$count / input$units)) # Control the maximum value of the other sliders. updateSliderInput(session, "Mode", max = max, value=min(max, input$Mode)) updateSliderInput(session, "Shape", max = max, value=min(max, input$Shape)) }) shape <- reactive({ if(input$param == "useMode") { input$Mode * input$Rate + 1 } else { input$Shape } }) output$distPlot <- renderPlot({ # draw a gamma curve visGamma(shape = shape(), rate = input$Rate, count = input$count, units = input$units, showPost = input$showPost, param = input$param) }) results <- reactive({ resultsGamma(shape = shape(), rate = input$Rate, count = input$count, units = input$units, showPost = input$showPost) }) # Show the values using an HTML table output$results <- renderTable({ results() }) })
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Gamma/server.R
# code for shiny app "visGamma" this version with radio buttons shinyUI(fluidPage( titlePanel("Gamma priors"), sidebarLayout( sidebarPanel( radioButtons("param", "Parameters to use:", c("Mode+Rate"="useMode", "Shape+Rate"="useShape"), inline=TRUE), sliderInput("Mode", label = "Prior mode:", min = 0, max = 10, step = 0.1, value = 2), sliderInput("Rate", label = "Prior rate:", min = 0, max = 10, step = 0.1, value = 1), sliderInput("Shape", label = "Prior shape:", min = 0, max = 10, step = 0.1, value = 1), hr(), sliderInput("units", label = "Data: number of sample units", min = 0, max = 10, step = 1, value = 0), sliderInput("count", label = "Data: total count", min = 0, max = 10, step = 1, value = 0), checkboxInput("showPost", label = "Display the posterior", value = FALSE), sliderInput("maxValue", label = "Change upper limits for data", min = 10, max = 500, step = 10, value = 10), p(em("Code by Mike Meredith"), align="center") ), mainPanel( h3("Display gamma distributions, Poisson likelihoods, and gamma posteriors"), p("Choose which parameters to use, then control the curve with the rate slider and either the mode or shape sliders."), plotOutput("distPlot"), tableOutput("results"), h3("Notes"), p("The rate and shape must be > 0; when the slider is at 0, the value used is 0.0001"), p("The data consist of the number of units in the sample and the total count, eg, 10 goals in 6 football matches, or 10 ticks on 6 rats examined."), p("The likelihood curve is scaled so that the area under the curve = 1. This is done by multiplying the output from dpois by the number of units."), p("The mode of the likelihood curve is the Maximum Likelihood Estimate of the rate, lambda."), br() ) ) ))
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Gamma/ui.R
# Helper functions for shiny app "visBeta" # ======================================= # The plotting function # --------------------- visQuad <- function (beta0, beta1, beta2, npoints=100) { xx <- seq(-1, 1, length=npoints) y2 <- beta0 + beta1*xx + beta2*xx^2 y1 <- beta0 + beta1*xx plot(xx, y2, type='l', las=1, ylim=c(-2, 2), xlab = "x", ylab = "y") lines(xx, y1, col='red') abline(h = beta0, v=0, lty=3) if(beta2 != 0) { xmin <- -beta1 / (2*beta2) ymin <- beta0 + beta1*xmin + beta2*xmin^2 points(xmin, ymin, cex=2) } msg <- sprintf("%1.1f %+1.1fx %+1.1f", beta0, beta1, beta2) title(main = bquote(y == .(msg)*x^2)) } #
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Quadratic/helper_visQuad.R
# visBeta library(shiny) source("helper_visQuad.R") shinyServer(function(input, output, session) { output$distPlot <- renderPlot({ # draw a beta curve visQuad(beta0 = input$beta0, beta1 = input$beta1, beta2 = input$beta2) }) })
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Quadratic/server.R
# code for shiny app "visQuad" shinyUI(fluidPage( titlePanel("Quadratic functions"), sidebarLayout( sidebarPanel( sliderInput("beta0", label = "Intercept:", min = -2, max = 2, step = 0.1, value = 0), sliderInput("beta1", label = "Linear coefficient:", min = -10, max = 10, step = 0.1, value = 1), sliderInput("beta2", label = "Quadratic coefficient:", min = -10, max = 10, step = 0.1, value = 0) ), mainPanel( plotOutput("distPlot") ) ) ))
/scratch/gouwar.j/cran-all/cranData/wiqid/inst/shiny/Quadratic/ui.R
#' Give a [function] [`...`][dots] if it does not have it #' #' Adds [`...`][dots] to a [closure]'s [`args`] if it does not have it already. #' #' If `f` already has [`...`][dots] in its [`args`], then it is returned with no #' changes. Otherwise, [`...`][dots] is added to `f`'s [formals] and then `f` is #' returned. See **Handling of [primitive]s** below. #' #' @section How [`...`][dots] is added to [closure]s: These are the steps that #' `withdots()` takes **only** if `f` is a [closure] without [`...`][dots] in #' its [`formals`]: #' #' 1. [`attributes`]`(f)` are temporarily saved and set aside. #' #' 1. If there is a [`srcref`] [`attribute`][attr] among the set-aside #' [`attributes`]`(f)`, it is removed (see **Why the [`srcref`] #' [`attribute`][attr] is removed** below). #' #' 1. [`...`][dots] is added to the [`formals`] of `f` using [`formals<-`]. #' #' 1. The remaining set-aside [`attributes`] are added back to `f` with #' [`attributes<-`]. #' #' 1. `f` is returned. #' #' @section Handling of [primitive]s: If `f` is [primitive] and already has #' [`...`][dots] in its [`args`] (e.g., [c()], [rep()], [max()]), then it is #' returned as is. #' #' If `f` is [primitive] and does **not** have [`...`][dots] in its [`args`], #' then an error will be thrown. The user can bypass this error by processing #' `f` with [rlang::as_closure()] before passing it to `withdots()`. #' **However, keep in mind that the argument matching behavior of the #' resulting [closure] may be different from what is expected, since #' [primitive]s may use nonstandard argument matching.** #' #' @section Why the [`srcref`] [`attribute`][attr] is removed: Many #' [function]s---including those created with [function()]---have a [`srcref`] #' [`attribute`][attr]. When a [function] is [print]ed, [print.function()] #' relies on this [`attribute`][attr] by default to depict the [function]'s #' [formals] and [body]. #' #' `withdots()` adds [`...`][dots] via [`formals<-`], which expressly drops #' [`attributes`] (see its [documentation page][formals<-]). To prevent this #' loss, `withdots()` sets [`attributes`]`(f)` aside at the beginning and #' re-attaches them to `f` at the end. Normally, this would re-attach the #' original `f`'s [`srcref`] [`attribute`][attr] to the new `f`, making it so #' that the newly added [`...`][dots] would not be depicted when the new `f` #' is [print]ed. For this reason, the old [`srcref`] [`attribute`][attr] is #' dropped, and only the remaining [`attributes`] are re-attached to the new #' `f`. #' #' Observe what would happen during [print]ing if **all** original #' [`attributes`]`(f)` were naively added to the modified `f`: #' #' ```{r naive_withdots} #' # Create a function with no dots: #' foo <- function(a = 1) { #' # Helpful comment #' a #' } #' #' # Give it important attributes that we can't afford to lose: #' attr(foo, "important_attribute") <- "crucial information" #' class(foo) <- "very_special_function" #' #' # Print foo, which also prints its important attributes: #' foo #' #' # Save its attributes: #' old_attributes <- attributes(foo) #' #' # Add dots: #' formals(foo)[["..."]] <- quote(expr = ) #' #' # See that the important attributes have been dropped: #' foo #' #' # Add the attributes back: #' attributes(foo) <- old_attributes #' #' # Print it again, and we see that the attributes have returned. #' # However, the ... disappears from the argument list. #' foo #' #' # We know the actual function definitely has dots, since it can handle #' # extraneous arguments: #' foo(1, 2, junk, "arguments", NULL) #' #' # Remove the "srcref" attribute, and the function is printed accurately. #' # Furthermore, its important attributes are intact: #' attr(foo, "srcref") <- NULL #' foo #' #' # Success (although the comments in the body() of the function are lost) #' ``` #' #' @param f A [function]. See **Handling of [primitive]s** in case `f` is #' [primitive]. #' #' @return If `f` has [`...`][dots] in its [`args`], then `f`. #' #' Otherwise, a [closure]: a tweaked version of `f`, whose only differences #' are: #' #' 1. [`...`][dots] has been appended to the end of its [`formals`], and #' #' 1. any [`srcref`] [`attribute`][attr] has been removed (see **Why the #' [`srcref`] [`attribute`][attr] is removed** below). #' #' @examples #' # The base::match() function has no ... and can't handle extraneous arguments #' if (FALSE) { #' match("z", letters, cannot_handle_ = "junk arguments") #' } #' #' # But if we give it dots... #' match_with_dots <- withdots(match) #' #' # ...it can now handle extraneous arguments: #' match_with_dots("z", letters, can_now_handle = "junk arguments") #' @export withdots <- function(f) { if (!is.function(f)) { stop("f must be a function.", "\nConsider passing to rlang::as_function() first.", call. = FALSE) } if (is.primitive(f)) { args_fn <- args(f) if (!is.function(args_fn) || !any(names(formals(args_fn)) == "...")) { stop("f must be a closure (non-primitive) or a primitive with a", "\nwell-defined argument list that already contains ...", "\nConsider passing f to rlang::as_closure() first.", call. = FALSE) } return(f) } if (any(names(formals(f)) == "...")) { return(f) } a <- attributes(f) a[["srcref"]] <- NULL formals(f)[["..."]] <- quote(expr = ) attributes(f) <- a f }
/scratch/gouwar.j/cran-all/cranData/withdots/R/withdots.R
# From https://github.com/r-lib/rlang/blob/main/R/aaa.R on_load <- function(expr, env = parent.frame(), ns = topenv(env)) { expr <- substitute(expr) force(env) callback <- function() { # Evaluate with promise semantics rather than `base::eval()` do <- NULL do.call(delayedAssign, list("do", expr, env)) do } ns$.__rlang_hook__. <- c(ns$.__rlang_hook__., list(callback)) } run_on_load <- function(ns = topenv(parent.frame())) { hook <- ns$.__rlang_hook__. rm(envir = ns, list = ".__rlang_hook__.") # FIXME: Transform to `while` loop to allow hooking into on-load # from an on-load hook? for (callback in hook) { callback() } } .onLoad <- function(...) { run_on_load() } the <- new.env() on_package_load <- function(pkg, expr, env = parent.frame()) { expr <- substitute(expr) force(env) run <- function(...) { # Evaluate with promise semantics rather than `base::eval()` do <- NULL do.call(delayedAssign, list("do", expr, env)) do } # Always register hook in case package is later unloaded & reloaded setHook(packageEvent(pkg, "onLoad"), run) # For compatibility with R < 4.0 where base isn't locked is_sealed <- function(pkg) { identical(pkg, "base") || environmentIsLocked(asNamespace(pkg)) } # Run right away if package is already loaded but only if its # namespace is locked. The registering package might be a dependency # of `package`. In that case, `package` might not be fully populated # yet (#1225). if (isNamespaceLoaded(pkg) && is_sealed(pkg)) { run() } }
/scratch/gouwar.j/cran-all/cranData/withr/R/aaa.R
#' Collation Order #' #' Temporarily change collation order by changing the value of the #' `LC_COLLATE` locale. #' #' @template with #' @param new `[character(1)]`\cr New collation order #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @examples #' #' # Modify collation order: #' x <- c("bernard", "bérénice", "béatrice", "boris") #' with_collate("fr_FR", sort(x)) #' with_collate("C", sort(x)) #' #' @export with_collate <- function(new, code) { with_locale(c(LC_COLLATE = new), code) } #' @rdname with_collate #' @export local_collate <- function(new = list(), .local_envir = parent.frame()) { local_locale(c(LC_COLLATE = new), .local_envir = .local_envir) }
/scratch/gouwar.j/cran-all/cranData/withr/R/collate.R
#' Connections which close themselves #' #' R file connections which are automatically closed. #' #' @template with #' @param con For `with_connection()` a named list with the connection(s) to #' create. For `local_connection()` the code to create a single connection, #' which is then returned. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @examples #' with_connection(list(con = file("foo", "w")), { #' writeLines(c("foo", "bar"), con) #' }) #' #' read_foo <- function() { #' readLines(local_connection(file("foo", "r"))) #' } #' read_foo() #' #' unlink("foo") #' @export with_connection <- function(con, code) { stopifnot(all(is.named(con))) on.exit({ for (connection in con) close(connection) }) eval(substitute(code), envir = con, enclos = parent.frame()) } #' @rdname with_connection #' @export local_connection <- function(con, .local_envir = parent.frame()) { defer(close(con), envir = .local_envir) con }
/scratch/gouwar.j/cran-all/cranData/withr/R/connection.R
#' DBMS Connections which disconnect themselves. #' #' Connections to Database Management Systems which automatically disconnect. In #' particular connections which are created with `DBI::dbConnect()` and closed #' with `DBI::dbDisconnect()`. #' #' @template with #' @param con For `with_db_connection()` a named list with the connection(s) to #' create. For `local_db_connection()` the code to create a single connection, #' which is then returned. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @examples #' db <- tempfile() #' with_db_connection( #' list(con = DBI::dbConnect(RSQLite::SQLite(), db)), { #' DBI::dbWriteTable(con, "mtcars", mtcars) #' }) #' #' head_db_table <- function(...) { #' con <- local_db_connection(DBI::dbConnect(RSQLite::SQLite(), db)) #' head(DBI::dbReadTable(con, "mtcars"), ...) #' } #' head_db_table() #' unlink(db) #' @export with_db_connection <- function(con, code) { requireNamespace("DBI", quietly = TRUE) stopifnot(all(is.named(con))) stopifnot(all(vlapply(con, methods::is, "DBIConnection"))) nme <- tempfile() (get("attach", baseenv()))(con, name = nme, warn.conflicts = FALSE) on.exit({ for (connection in con) DBI::dbDisconnect(connection) detach(nme, character.only = TRUE) }) force(code) } #' @rdname with_db_connection #' @export local_db_connection <- function(con, .local_envir = parent.frame()) { requireNamespace("DBI", quietly = TRUE) stopifnot(methods::is(con, "DBIConnection")) defer(DBI::dbDisconnect(con), envir = .local_envir) con }
/scratch/gouwar.j/cran-all/cranData/withr/R/db.R
# Find first knitr frame on the stack knitr_exit_frame <- function(envir) { frames <- as.list(sys.frames()) frame_loc <- frame_loc(envir, frames) if (!frame_loc) { return(envir) } knitr_ns <- asNamespace("knitr") # This doesn't handle correctly the recursive case (knitr called # within a chunk). Handling this would be a little fiddly for an # uncommon edge case. for (i in seq(1, frame_loc)) { if (identical(topenv(frames[[i]]), knitr_ns)) { return(frames[[i]]) } } NULL } source_exit_frame <- function(envir) { source_exit_frame_option(envir) %||% envir } # Returns an environment if expression is called directly from `source()`. # Otherwise returns `NULL`. source_exit_frame_option <- function(envir, frames = as.list(sys.frames())) { calls <- as.list(sys.calls()) i <- frame_loc(envir, frames) if (!i) { return(NULL) } if (i < 4) { return(NULL) } is_call <- function(x, fn) { is.call(x) && identical(x[[1]], fn) } calls <- as.list(calls) if (!is_call(calls[[i - 3]], quote(source))) { return(NULL) } if (!is_call(calls[[i - 2]], quote(withVisible))) { return(NULL) } if (!is_call(calls[[i - 1]], quote(eval))) { return(NULL) } if (!is_call(calls[[i - 0]], quote(eval))) { return(NULL) } frames[[i - 3]] } frame_loc <- function(envir, frames) { n <- length(frames) if (!n) { return(0) } for (i in seq_along(frames)) { if (identical(frames[[n - i + 1]], envir)) { return(n - i + 1) } } 0 } in_knitr <- function(envir) { knitr_in_progress() && identical(knitr::knit_global(), envir) } knitr_in_progress <- function() { isTRUE(getOption("knitr.in.progress")) }
/scratch/gouwar.j/cran-all/cranData/withr/R/defer-exit.R
# Include standalone defer to overwrite it: #' @include standalone-defer.R NULL #' Defer Evaluation of an Expression #' #' Similar to [on.exit()], but allows one to attach #' an expression to be evaluated when exiting any frame currently #' on the stack. This provides a nice mechanism for scoping side #' effects for the duration of a function's execution. #' #' @param expr `[expression]`\cr An expression to be evaluated. #' @param envir `[environment]`\cr Attach exit handlers to this environment. #' Typically, this should be either the current environment or #' a parent frame (accessed through [parent.frame()]). #' @param priority `[character(1)]`\cr Specify whether this handler should #' be executed `"first"` or `"last"`, relative to any other #' registered handlers on this environment. #' #' @section Running handlers within `source()`: #' withr handlers run within `source()` are run when `source()` exits #' rather than line by line. #' #' This is only the case when the script is sourced in `globalenv()`. #' For a local environment, the caller needs to set #' `options(withr.hook_source = TRUE)`. This is to avoid paying the #' penalty of detecting `source()` in the normal usage of `defer()`. #' #' @details #' `defer()` works by attaching handlers to the requested environment (as an #' attribute called `"handlers"`), and registering an exit handler that #' executes the registered handler when the function associated with the #' requested environment finishes execution. #' #' Deferred events can be set on the global environment, primarily to facilitate #' the interactive development of code that is intended to be executed inside a #' function or test. A message alerts the user to the fact that an explicit #' `deferred_run()` is the only way to trigger these deferred events. Use #' `deferred_clear()` to clear them without evaluation. The global environment #' scenario is the main motivation for these functions. #' #' @family local-related functions #' @export #' @examples #' # define a 'local' function that creates a file, and #' # removes it when the parent function has finished executing #' local_file <- function(path) { #' file.create(path) #' defer_parent(unlink(path)) #' } #' #' # create tempfile path #' path <- tempfile() #' #' # use 'local_file' in a function #' local({ #' local_file(path) #' stopifnot(file.exists(path)) #' }) #' #' # file is deleted as we leave 'local' local #' stopifnot(!file.exists(path)) #' #' # investigate how 'defer' modifies the #' # executing function's environment #' local({ #' local_file(path) #' print(attributes(environment())) #' }) #' #' # Note that examples lack function scoping so deferred calls are #' # generally executed immediately #' defer(print("one")) #' defer(print("two")) defer <- function(expr, envir = parent.frame(), priority = c("first", "last")) { if (identical(envir, globalenv())) { source_frame <- source_exit_frame_option(envir) if (!is.null(source_frame)) { # Automatically enable `source()` special-casing for the global # environment. This is the default for `source()` and the normal # case when users run scripts. This also happens in R CMD check # when withr is used inside an example because an R example is # run inside `withAutoprint()` which uses `source()`. local_options(withr.hook_source = TRUE) # And fallthrough to the default `defer()` handling. Within # `source()` we don't require manual calling of # `deferred_run()`. } else if (is_top_level_global_env(envir)) { global_defer(expr, priority = priority) return(invisible(NULL)) } } priority <- match.arg(priority, choices = c("first", "last")) if (knitr_in_progress() && identical(envir, knitr::knit_global())) { return(defer_knitr(expr, envir, priority = priority)) } # Don't handle `source()` by default to avoid a performance hit if (!is.null(getOption("withr.hook_source"))) { envir <- source_exit_frame(envir) } thunk <- as.call(list(function() expr)) after <- priority == "last" do.call( base::on.exit, list(thunk, TRUE, after), envir = envir ) } # Inline formals for performance formals(defer)[["priority"]] <- eval(formals(defer)[["priority"]]) #' @rdname defer #' @export defer_parent <- function(expr, priority = c("first", "last")) { defer(expr, parent.frame(2), priority = priority) } #' @rdname defer #' @export deferred_run <- function(envir = parent.frame()) { if (knitr_in_progress() && identical(envir, knitr::knit_global())) { # The handlers are thunks so we don't need to clear them. # They will only be run once. frame <- knitr_exit_frame(envir) handlers <- knitr_handlers(frame) } else { if (is_top_level_global_env(envir)) { handlers <- the$global_exits } else { handlers <- frame_exits(envir) } deferred_clear(envir) } n <- length(handlers) i <- 0L if (!n) { message("No deferred expressions to run") return(invisible(NULL)) } defer(message( sprintf("Ran %s/%s deferred expressions", i, n) )) for (expr in handlers) { eval(expr, envir) i <- i + 1L } } frame_exits <- function(frame = parent.frame()) { exits <- do.call(sys.on.exit, list(), envir = frame) # The exit expressions are stored in a single object that is # evaluated on exit. This can be NULL, an expression, or multiple # expressions wrapped in {. We convert this data structure to a list # of expressions. if (is.null(exits)) { list() } else if (identical(exits[[1]], quote(`{`))) { as.list(exits[-1]) } else { list(exits) } } frame_clear_exits <- function(frame = parent.frame()) { do.call(on.exit, list(), envir = frame) } #' @rdname defer #' @export deferred_clear <- function(envir = parent.frame()) { if (is_top_level_global_env(envir)) { the$global_exits <- list() } else { frame_clear_exits(envir) } invisible() } #' Defer expression globally #' #' This function is mostly internal. It is exported to be called in #' standalone `defer()` implementations to defer expressions from the #' global environment. #' #' @inheritParams defer #' @keywords internal #' @export global_defer <- function(expr, priority = c("first", "last")) { priority <- match.arg(priority, choices = c("first", "last")) env <- globalenv() handlers <- the$global_exits if (!length(handlers)) { # For session scopes we use reg.finalizer() if (is_interactive()) { message( sprintf("Setting global deferred event(s).\n"), "i These will be run:\n", " * Automatically, when the R session ends.\n", " * On demand, if you call `withr::deferred_run()`.\n", "i Use `withr::deferred_clear()` to clear them without executing." ) } reg.finalizer(env, function(env) deferred_run(env), onexit = TRUE) } handler <- as.call(list(function() expr)) if (priority == "first") { the$global_exits <- c(list(handler), handlers) } else { the$global_exits <- c(handlers, list(handler)) } invisible(NULL) } the$global_exits <- list() # Evaluate `frames` lazily to avoid expensive `sys.frames()` # call for the default case of a local environment is_top_level_global_env <- function(envir, frames = sys.frames()) { if (!identical(envir, globalenv())) { return(FALSE) } # Check if another global environment is on the stack !any(vapply(frames, identical, NA, globalenv())) } # This picks up knitr's first frame on the stack and registers the # handler there. To avoid mixing up knitr's own exit handlers with # ours, we don't hook directly but instead save the list of handlers # as an attribute on the frame environment. This allows `deferred_run()` # to run our handlers without running the knitr ones. defer_knitr <- function(expr, envir, priority = c("first", "last")) { priority <- match.arg(priority, choices = c("first", "last")) envir <- knitr_exit_frame(envir) handler <- as.call(list(function() expr)) handlers <- knitr_handlers(envir) # Add `on.exit` hook if run for first time if (!length(handlers)) { defer_knitr_run(envir) } if (priority == "first") { handlers <- c(list(handler), handlers) } else { handlers <- c(handlers, list(handler)) } attr(envir, "withr_knitr_handlers") <- handlers invisible(NULL) } knitr_handlers <- function(envir) { attr(envir, "withr_knitr_handlers") %||% list() } # Evaluate `handlers` lazily so we get the latest version defer_knitr_run <- function( envir, handlers = knitr_handlers(envir) ) { defer(envir = envir, { for (expr in handlers) { eval(expr, envir) } }) } # Augment rlang with withr features such as knitr support on_load({ on_package_load("rlang", local({ if (is.null(getOption("withr:::inject_defer_override"))) { ns <- asNamespace("rlang") do.call("unlockBinding", list("defer", ns)) defer(lockBinding("defer", ns)) ns$defer <- defer } })) })
/scratch/gouwar.j/cran-all/cranData/withr/R/defer.R
#' @include with_.R #' @include wrap.R NULL # Internal *_dev functions ------------------------------------------------ pdf_dev <- wrap(grDevices::pdf, NULL, grDevices::dev.cur()) postscript_dev <- wrap(grDevices::postscript, NULL, grDevices::dev.cur()) svg_wrapper <- function(filename, width = 7, height = 7, pointsize = 12, onefile = FALSE, family = "sans", bg = "white", antialias = c("default", "none", "gray", "subpixel"), ...) { grDevices::svg(filename, width, height, pointsize, onefile, family, bg, antialias, ...) } svg_dev <- wrap(svg_wrapper, NULL, grDevices::dev.cur()) xfig_dev <- wrap(grDevices::xfig, NULL, grDevices::dev.cur()) # These functions arguments differ between R versions, so just use ... cairo_pdf_dev <- function(filename, ...) { grDevices::cairo_pdf(filename = filename, ...) grDevices::dev.cur() } cairo_ps_dev <- function(filename, ...) { grDevices::cairo_ps(filename = filename, ...) grDevices::dev.cur() } # These functions arguments differ between unix and windows, so just use ... bmp_dev <- function(filename, ...) { grDevices::bmp(filename = filename, ...) grDevices::dev.cur() } tiff_dev <- function(filename, ...) { grDevices::tiff(filename = filename, ...) grDevices::dev.cur() } png_dev <- function(filename, ...) { grDevices::png(filename = filename, ...) grDevices::dev.cur() } jpeg_dev <- function(filename, ...) { grDevices::jpeg(filename = filename, ...) grDevices::dev.cur() } # User-level with_* fns --------------------------------------------------- #' Graphics devices #' #' Temporarily use a graphics device. #' #' @name devices #' @aliases with_dev with_device #' @template with #' @param new \code{[named character]}\cr New graphics device #' @param ... Additional arguments passed to the graphics device. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @seealso \code{\link[grDevices]{Devices}} #' @examples #' # dimensions are in inches #' with_pdf(file.path(tempdir(), "test.pdf"), width = 7, height = 5, #' plot(runif(5)) #' ) #' #' # dimensions are in pixels #' with_png(file.path(tempdir(), "test.png"), width = 800, height = 600, #' plot(runif(5)) #' ) NULL dev_close <- function(which) { prev <- grDevices::dev.prev(which) grDevices::dev.off(which) # No devices active if (prev != which) { grDevices::dev.set(prev) } prev } #' @describeIn devices BMP device #' @export with_bmp <- with_(bmp_dev, dev_close) #' @rdname devices #' @export local_bmp <- local_(bmp_dev, dev_close) #' @describeIn devices CAIRO_PDF device #' @inheritParams grDevices::cairo_pdf #' @export with_cairo_pdf <- with_(cairo_pdf_dev, dev_close) #' @rdname devices #' @export local_cairo_pdf <- local_(cairo_pdf_dev, dev_close) #' @describeIn devices CAIRO_PS device #' @inheritParams grDevices::cairo_ps #' @export with_cairo_ps <- with_(cairo_ps_dev, dev_close) #' @rdname devices #' @export local_cairo_ps <- local_(cairo_ps_dev, dev_close) #' @describeIn devices PDF device #' @inheritParams grDevices::pdf #' @export with_pdf <- with_(pdf_dev, dev_close) #' @rdname devices #' @export local_pdf <- local_(pdf_dev, dev_close) #' @describeIn devices POSTSCRIPT device #' @inheritParams grDevices::postscript #' @param command the command to be used for \sQuote{printing}. Defaults #' to \code{"default"}, the value of option \code{"printcmd"}. The #' length limit is \code{2*PATH_MAX}, typically 8096 bytes on unix systems and #' 520 bytes on windows. #' @export with_postscript <- with_(postscript_dev, dev_close) #' @rdname devices #' @export local_postscript <- local_(postscript_dev, dev_close) #' @describeIn devices SVG device #' @inheritParams grDevices::svg #' @export with_svg <- with_(svg_dev, dev_close) #' @rdname devices #' @export local_svg <- local_(svg_dev, dev_close) #' @describeIn devices TIFF device #' @export with_tiff <- with_(tiff_dev, dev_close) #' @rdname devices #' @export local_tiff <- local_(tiff_dev, dev_close) #' @describeIn devices XFIG device #' @inheritParams grDevices::xfig #' @export with_xfig <- with_(xfig_dev, dev_close) #' @rdname devices #' @export local_xfig <- local_(xfig_dev, dev_close) #' @describeIn devices PNG device #' @export with_png <- with_(png_dev, dev_close) #' @rdname devices #' @export local_png <- local_(png_dev, dev_close) #' @describeIn devices JPEG device #' @export with_jpeg <- with_(jpeg_dev, dev_close) #' @rdname devices #' @export local_jpeg <- local_(jpeg_dev, dev_close)
/scratch/gouwar.j/cran-all/cranData/withr/R/devices.R
#' @include with_.R NULL # working directory ---------------------------------------------------------- #' Working directory #' #' Temporarily change the current working directory. #' #' @template with #' @param new `[character(1)]`\cr New working directory #' @inheritParams with_collate #' @seealso [setwd()] #' @export #' @examples #' getwd() #' #' with_dir(tempdir(), getwd()) with_dir <- with_(setwd) #' @rdname with_dir #' @export local_dir <- local_(setwd)
/scratch/gouwar.j/cran-all/cranData/withr/R/dir.R
# env ------------------------------------------------------------------------ get_envvar <- function(envs, action = "replace") { envs <- as_envvars(envs) Sys.getenv(names(envs), names = TRUE, unset = NA) } set_envvar <- function(envs, action = "replace") { envs <- as_envvars(envs) stopifnot(is.character(action), length(action) == 1) action <- match.arg(action, c("replace", "prefix", "suffix")) if (length(envs) == 0) { return() } old <- Sys.getenv(names(envs), names = TRUE, unset = NA) set <- !is.na(envs) both_set <- set & !is.na(old) if (any(both_set)) { if (action == "prefix") { envs[both_set] <- paste(envs[both_set], old[both_set]) } else if (action == "suffix") { envs[both_set] <- paste(old[both_set], envs[both_set]) } } if (any(set)) do.call("Sys.setenv", as.list(envs[set])) if (any(!set)) Sys.unsetenv(names(envs)[!set]) invisible(old) } as_envvars <- function(envs) { if (length(envs) == 0) { return(envs) } stopifnot(is.named(envs)) # if any envs are null make them NA instead envs[vlapply(envs, is.null)] <- NA # if there are duplicated entries keep only the last one envs <- envs[!duplicated(names(envs), fromLast = TRUE)] envs } #' Environment variables #' #' Temporarily change system environment variables. #' #' @template with #' @param new,.new `[named character]`\cr New environment variables #' @param ... Named arguments with new environment variables. #' @param action should new values `"replace"`, `"prefix"` or #' `"suffix"` existing variables with the same name. #' @inheritParams with_collate #' @details if `NA` is used those environment variables will be unset. #' If there are any duplicated variable names only the last one is used. #' @seealso [Sys.setenv()] #' @examples #' with_envvar(new = c("GITHUB_PAT" = "abcdef"), Sys.getenv("GITHUB_PAT")) #' #' # with_envvar unsets variables after usage #' Sys.getenv("TEMP_SECRET") #' with_envvar(new = c("TEMP_SECRET" = "secret"), Sys.getenv("TEMP_SECRET")) #' Sys.getenv("TEMP_SECRET") #' @export with_envvar <- with_(set_envvar, get = get_envvar) #' @rdname with_envvar #' @export local_envvar <- local_(set_envvar, get = get_envvar, dots = TRUE)
/scratch/gouwar.j/cran-all/cranData/withr/R/env.R
#' Files which delete themselves #' #' Create files, which are then automatically removed afterwards. #' @template with #' @param file,.file `[named list]`\cr Files to create. #' @param ... Additional (possibly named) arguments of files to create. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @examples #' with_file("file1", { #' writeLines("foo", "file1") #' readLines("file1") #' }) #' #' with_file(list("file1" = writeLines("foo", "file1")), { #' readLines("file1") #' }) #' @export with_file <- function(file, code) { file_nms <- names2(file) unnamed <- file_nms == "" file_nms[unnamed] <- as.character(file[unnamed]) on.exit(unlink(file_nms, recursive = TRUE)) force(code) invisible(file) } #' @rdname with_file #' @export local_file <- function(.file, ..., .local_envir = parent.frame()) { .file <- utils::modifyList(as.list(.file), list(...)) .file <- as_character(.file) file_nms <- names2(.file) unnamed <- file_nms == "" file_nms[unnamed] <- as.character(.file[unnamed]) defer(unlink(file_nms, recursive = TRUE), envir = .local_envir) invisible(.file) }
/scratch/gouwar.j/cran-all/cranData/withr/R/file.R
#' Language #' #' Temporarily change the language used for translations. #' #' @param lang A BCP47 language code like "en" (English), "fr" (French), #' "fr_CA" (French Canadian). Formally, this is a lower case two letter #' [ISO 639 country code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes), #' optionally followed by "_" or "-" and an upper case two letter #' [ISO 3166 region code](https://en.wikipedia.org/wiki/ISO_3166-2). #' @inheritParams with_collate #' @export #' @examples #' with_language("en", try(mean[[1]])) #' with_language("fr", try(mean[[1]])) #' with_language("es", try(mean[[1]])) with_language <- function(lang, code) { local_language(lang) code } #' @export #' @rdname with_language local_language <- function(lang, .local_envir = parent.frame()) { if (!is.character(lang) || length(lang) != 1) { stop("`lang` must be a string") } # Reset a first time in case the cache was populated beforehand reset_gettext_cache() # Reset afterwards to clear any translation we might cache defer(reset_gettext_cache(), envir = .local_envir) # https://stackoverflow.com/questions/6152321 lang <- gsub("-", "_", lang, fixed = TRUE) if (!has_nls()) { warning("Changing language has no effect when R installed without NLS") } # > Note: The variable LANGUAGE is ignored if the locale is set to ‘C’. # > In other words, you have to first enable localization, by setting LANG # > (or LC_ALL) to a value other than ‘C’, before you can use a language # > priority list through the LANGUAGE variable. # --- https://www.gnu.org/software/gettext/manual/html_node/The-LANGUAGE-variable.html # `LC_ALL` has precedence over `LANG`. Check for the latter if the # former is unset, otherwise check for the former. if (Sys.getenv("LC_ALL", "") == "") { # Causes too many failures because testthat sets `LANG` to "C" # check_language_envvar("LANG") } else { check_language_envvar("LC_ALL") } local_envvar(LANGUAGE = lang, .local_envir = .local_envir) invisible() } # Reset cache to avoid gettext() retrieving cached value from a previous # language (idea from https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=931456) # See https://github.com/r-lib/withr/issues/213. reset_gettext_cache <- function() { bindtextdomain("reset", local_tempdir()) } check_language_envvar <- function(var) { if (Sys.getenv(var) %in% c("C", "C.UTF-8")) { warning(sprintf( "Changing language has no effect when envvar %s='%s'", var, Sys.getenv(var) )) } } has_nls <- function() capabilities("NLS")[[1]]
/scratch/gouwar.j/cran-all/cranData/withr/R/language.R
#' @include with_.R # lib ------------------------------------------------------------------------ set_libpaths <- function(paths, action = "replace") { paths <- as_character(paths) paths <- normalizePath(paths, mustWork = TRUE) old <- .libPaths() paths <- merge_new(old, paths, action) .libPaths(paths) invisible(old) } get_libpaths <- function(...) { .libPaths() } set_temp_libpath <- function(action = "prefix") { paths <- tempfile("temp_libpath") dir.create(paths) set_libpaths(paths, action = action) } #' Library paths #' #' Temporarily change library paths. #' #' @template with #' @param new `[character]`\cr New library paths #' @param action `[character(1)]`\cr should new values `"replace"`, `"prefix"` or #' `"suffix"` existing paths. #' @inheritParams with_collate #' @seealso [.libPaths()] #' @family libpaths #' @examples #' .libPaths() #' new_lib <- tempfile() #' dir.create(new_lib) #' with_libpaths(new_lib, print(.libPaths())) #' unlink(new_lib, recursive = TRUE) #' @export with_libpaths <- with_(set_libpaths, .libPaths, get = get_libpaths) #' @rdname with_libpaths #' @export local_libpaths <- local_(set_libpaths, .libPaths, get = get_libpaths) #' Library paths #' #' Temporarily prepend a new temporary directory to the library paths. #' #' @template with #' @seealso [.libPaths()] #' @inheritParams with_libpaths #' @family libpaths #' @export with_temp_libpaths <- with_( set_temp_libpath, .libPaths, get = get_libpaths, new = FALSE ) #' @rdname with_temp_libpaths #' @export local_temp_libpaths <- local_( set_temp_libpath, .libPaths, get = get_libpaths, new = FALSE )
/scratch/gouwar.j/cran-all/cranData/withr/R/libpaths.R
#' @include aaa.R NULL #' @rdname with_ #' @export local_ <- function(set, reset = set, get = NULL, ..., envir = parent.frame(), new = TRUE, dots = FALSE) { if (!missing(...)) { stop("`...` must be empty.") } fmls <- formals(set) if (length(fmls) > 0L) { # Called pass all extra formals on called_fmls <- setNames(lapply(names(fmls), as.symbol), names(fmls)) # Special case for dots. If `set()` and/or `get()` take dots, it # is assumed they implement `options()`-like semantic: a list # passed as first argument is automatically spliced in the dots. names(called_fmls)[names(called_fmls) == "..."] <- "" if (new) { if (dots) { called_fmls[[1]] <- as.symbol(".new") fun_args <- c(alist(.new = list(), ... = ), fmls[-1L]) } else { called_fmls[[1]] <- as.symbol("new") fun_args <- c(alist(new = list()), fmls[-1L]) } } else { fun_args <- fmls } } else { # no formals called_fmls <- NULL fun_args <- alist() } set_call <- as.call(c(substitute(set), called_fmls)) reset <- if (missing(reset)) substitute(set) else substitute(reset) if (dots) { modify_call <- quote(.new <- list_combine(as.list(.new), list(...))) } else { modify_call <- NULL } if (is.null(get)) { fun <- eval(bquote(function(args) { .(modify_call) old <- .(set_call) withr::defer(.(reset)(old), envir = .local_envir) invisible(old) })) } else { get_call <- as.call(c(substitute(get), called_fmls)) fun <- eval(bquote(function(args) { .(modify_call) old <- .(get_call) withr::defer(.(reset)(old), envir = .local_envir) .(set_call) invisible(old) })) } # substitute does not work on arguments, so we need to fix them manually formals(fun) <- c(fun_args, alist(.local_envir = parent.frame())) environment(fun) <- envir fun }
/scratch/gouwar.j/cran-all/cranData/withr/R/local_.R
#' Locale settings #' #' Temporarily change locale settings. #' #' Setting the `LC_ALL` category is currently not implemented. #' #' @template with #' @param new,.new `[named character]`\cr New locale settings #' @param ... Additional arguments with locale settings. #' @inheritParams with_collate #' @seealso [Sys.setlocale()] #' @examples #' #' ## Change locale for time: #' df <- data.frame( #' stringsAsFactors = FALSE, #' date = as.Date(c("2019-01-01", "2019-02-01")), #' value = c(1, 2) #' ) #' with_locale(new = c("LC_TIME" = "es_ES"), code = plot(df$date, df$value)) #' ## Compare with: #' # plot(df$date, df$value) #' #' ## Month names: #' with_locale(new = c("LC_TIME" = "en_GB"), format(ISOdate(2000, 1:12, 1), "%B")) #' with_locale(new = c("LC_TIME" = "es_ES"), format(ISOdate(2000, 1:12, 1), "%B")) #' #' ## Change locale for currencies: #' with_locale(new = c("LC_MONETARY" = "it_IT"), Sys.localeconv()) #' with_locale(new = c("LC_MONETARY" = "en_US"), Sys.localeconv()) #' #' ## Ordering: #' x <- c("bernard", "bérénice", "béatrice", "boris") #' with_locale(c(LC_COLLATE = "fr_FR"), sort(x)) #' with_locale(c(LC_COLLATE = "C"), sort(x)) #' #' @export with_locale <- function(new, code) { local_locale(new) code } #' @rdname with_locale #' @export local_locale <- function(.new = list(), ..., .local_envir = parent.frame()) { new <- list_combine(as.list(.new), list(...)) cats <- as_locale_cats(new) # <https://github.com/r-lib/withr/issues/179> # R supports setting LC_COLLATE to C via envvar. When that is the # case, it takes precedence over the currently set locale. We need # to set both the envvar and the locale for collate to fully take # effect. if ("LC_COLLATE" %in% names(cats)) { collate <- cats["LC_COLLATE"] local_envvar(collate, .local_envir = .local_envir) } old <- get_locale(cats) defer(set_locale(old), envir = .local_envir) set_locale(cats) invisible(old) } set_locale <- function(cats) { mapply(Sys.setlocale, names(cats), cats) } get_locale <- function(cats) { vapply(names(cats), Sys.getlocale, character(1)) } as_locale_cats <- function(cats) { cats <- as_character(cats) stopifnot(is.named(cats)) if ("LC_ALL" %in% names(cats)) { stop("Setting LC_ALL category not implemented.", call. = FALSE) } cats }
/scratch/gouwar.j/cran-all/cranData/withr/R/locale.R
#' @include with_.R NULL # Makevars -------------------------------------------------------------------- #' Create a new `Makevars` file, by adding new variables #' #' You probably want [with_makevars()] instead of this function. #' #' Unlike [with_makevars()], it does not activate the new `Makevars` #' file, i.e. it does not set the `R_MAKEVARS_USER` environment variable. #' #' @param variables `[named character]`\cr new variables and their values #' @param old_path `[character(1)]`\cr location of existing `Makevars` #' file to modify. #' @param new_path `[character(1)]`\cr location of the new `Makevars` file #' @param assignment `[character(1)]`\cr assignment type to use. #' #' @keywords internal #' @export set_makevars <- function(variables, old_path = makevars_user(), new_path = tempfile(), assignment = c("=", ":=", "?=", "+=")) { if (length(variables) == 0) { return() } stopifnot(is.named(variables)) assignment <- match.arg(assignment) old <- NULL if (length(old_path) == 1 && file.exists(old_path)) { lines <- readLines(old_path) old <- lines for (var in names(variables)) { loc <- grep(paste(c("^[[:space:]]*", var, "[[:space:]]*", "="), collapse = ""), lines) if (length(loc) == 0) { lines <- append(lines, paste(sep = assignment, var, variables[var])) } else if(length(loc) == 1) { lines[loc] <- paste(sep = assignment, var, variables[var]) } else { stop("Multiple results for ", var, " found, something is wrong.", .call = FALSE) } } } else { lines <- paste(names(variables), variables, sep = assignment) } if (!identical(old, lines)) { writeLines(con = new_path, lines) } old } #' Makevars variables #' #' Temporarily change contents of an existing `Makevars` file. #' #' @details If no `Makevars` file exists or the fields in `new` do #' not exist in the existing `Makevars` file then the fields are added to #' the new file. Existing fields which are not included in `new` are #' appended unchanged. Fields which exist in `Makevars` and in `new` #' are modified to use the value in `new`. #' #' @template with #' @param new,.new `[named character]`\cr New variables and their values #' @param path,.path `[character(1)]`\cr location of existing `Makevars` file to modify. #' @param ... Additional new variables and their values. #' @param assignment,.assignment `[character(1)]`\cr assignment type to use. #' @inheritParams with_collate #' @examples #' writeLines("void foo(int* bar) { *bar = 1; }\n", "foo.c") #' system("R CMD SHLIB --preclean -c foo.c") #' with_makevars(c(CFLAGS = "-O3"), system("R CMD SHLIB --preclean -c foo.c")) #' unlink(c("foo.c", "foo.so")) #' @export with_makevars <- function(new, code, path = makevars_user(), assignment = c("=", ":=", "?=", "+=")) { assignment <- match.arg(assignment) makevars_file <- tempfile() on.exit(unlink(makevars_file), add = TRUE) force(path) with_envvar(c(R_MAKEVARS_USER = makevars_file), { set_makevars(new, path, makevars_file, assignment = assignment) force(code) }) } #' @rdname with_makevars #' @export local_makevars <- function(.new = list(), ..., .path = makevars_user(), .assignment = c("=", ":=", "?=", "+="), .local_envir = parent.frame()) { .new <- utils::modifyList(as.list(.new), list(...)) .new <- as_character(.new) .assignment <- match.arg(.assignment) makevars_file <- tempfile() defer(unlink(makevars_file), envir = .local_envir) force(.path) local_envvar(c(R_MAKEVARS_USER = makevars_file), .local_envir = .local_envir) invisible(set_makevars(.new, .path, makevars_file, assignment = .assignment)) }
/scratch/gouwar.j/cran-all/cranData/withr/R/makevars.R
#' Execute code with a modified search path #' #' `with_package()` attaches a package to the search path, executes the code, then #' removes the package from the search path. The package namespace is _not_ #' unloaded however. `with_namespace()` does the same thing, but attaches the #' package namespace to the search path, so all objects (even unexported ones) are also #' available on the search path. #' @param package \code{[character(1)]}\cr package name to load. #' @param env \code{[environment()]}\cr Environment to attach. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @inheritParams defer #' @inheritParams base::library #' @template with #' @examples #' \dontrun{ #' with_package("ggplot2", { #' ggplot(mtcars) + geom_point(aes(wt, hp)) #' }) #' } #' @export with_package <- function(package, code, pos = 2, lib.loc = NULL, character.only = TRUE, logical.return = FALSE, warn.conflicts = FALSE, quietly = TRUE, verbose = getOption("verbose")) { # Only try to attach (and detach) the package if it is not already attached. if (!(package %in% .packages())) { suppressPackageStartupMessages( (get("library"))(package, pos = pos, lib.loc = lib.loc, character.only = character.only, logical.return = logical.return, warn.conflicts = warn.conflicts, quietly = quietly, verbose = verbose)) on.exit(detach(paste0("package:", package), character.only = TRUE)) } force(code) } #' @rdname with_package #' @export local_package <- function(package, pos = 2, lib.loc = NULL, character.only = TRUE, logical.return = FALSE, warn.conflicts = FALSE, quietly = TRUE, verbose = getOption("verbose"), .local_envir = parent.frame()) { suppressPackageStartupMessages( (get("library"))(package, pos = pos, lib.loc = lib.loc, character.only = character.only, logical.return = logical.return, warn.conflicts = warn.conflicts, quietly = quietly, verbose = verbose)) defer(detach(paste0("package:", package), character.only = TRUE), envir = .local_envir) } #' @rdname with_package #' @export with_namespace <- function(package, code, warn.conflicts = FALSE) { ns <- asNamespace(package) name <- format(ns) (get("attach"))(ns, name = name, warn.conflicts = warn.conflicts) on.exit(detach(name, character.only = TRUE)) force(code) } #' @rdname with_package #' @export local_namespace <- function(package, .local_envir = parent.frame(), warn.conflicts = FALSE) { ns <- asNamespace(package) name <- format(ns) (get("attach"))(ns, name = name, warn.conflicts = warn.conflicts) defer(detach(name, character.only = TRUE), envir = .local_envir) } #' @rdname with_package #' @inheritParams base::attach #' @export with_environment <- function(env, code, pos = 2L, name = format(env), warn.conflicts = FALSE) { (get("attach"))(env, name = name, pos = pos, warn.conflicts = warn.conflicts) on.exit(detach(name, character.only = TRUE)) force(code) } #' @rdname with_package #' @export local_environment <- function(env, pos = 2L, name = format(env), warn.conflicts = FALSE, .local_envir = parent.frame()) { (get("attach"))(env, name = name, pos = pos, warn.conflicts = warn.conflicts) defer(detach(name, character.only = TRUE), envir = .local_envir) }
/scratch/gouwar.j/cran-all/cranData/withr/R/namespace.R
#' @include with_.R # options -------------------------------------------------------------------- get_options <- function(new_options) { do.call(options, as.list(names(new_options))) } set_options <- function(new_options) { do.call(options, as.list(new_options)) } reset_options <- function(old_options) { options(old_options) } #' Options #' #' Temporarily change global options. #' #' @template with #' @param new,.new `[named list]`\cr New options and their values #' @param ... Additional options and their values #' @inheritParams with_collate #' @seealso [options()] #' @examples #' # number of significant digits to print #' getOption("digits") #' # modify temporarily the number of significant digits to print #' with_options(list(digits = 3), getOption("digits")) #' with_options(list(digits = 3), print(pi)) #' #' # modify temporarily the character to be used as the decimal point #' getOption("digits") #' with_options(list(OutDec = ","), print(pi)) #' #' # modify temporarily multiple options #' with_options(list(OutDec = ",", digits = 3), print(pi)) #' #' # modify, within the scope of the function, the number of #' # significant digits to print #' print_3_digits <- function(x) { #' # assign 3 to the option "digits" for the rest of this function #' # after the function exits, the option will return to its previous #' # value #' local_options(list(digits = 3)) #' print(x) #' } #' #' print_3_digits(pi) # returns 3.14 #' print(pi) # returns 3.141593 #' @export with_options <- with_( set_options, reset_options ) #' @rdname with_options #' @export local_options <- local_( set_options, reset_options, get = get_options, dots = TRUE )
/scratch/gouwar.j/cran-all/cranData/withr/R/options.R
#' @include with_.R NULL # par ------------------------------------------------------------------------ get_par <- function(...) { new <- auto_splice(list(...)) if (length(new) == 0) { # Only retrieve settable values out <- graphics::par(no.readonly = TRUE) } else { out <- do.call(graphics::par, as.list(names(new))) } # `par()` doesn't wrap in a list if input is length 1 if (length(new) == 1) { out <- list(out) names(out) <- names(new) } out } # `get_par()` must have exactly the same signature as `par()` to be # compatible with `with_()` and `local_()` formals(get_par) <- formals(graphics::par) #' Graphics parameters #' #' Temporarily change graphics parameters. #' #' @template with #' @param new,.new `[named list]`\cr New graphics parameters and their values #' @param no.readonly `[logical(1)]`\cr see [par()] documentation. #' @param ... Additional graphics parameters and their values. #' @inheritParams with_collate #' @seealso [par()] #' @export #' @examples #' old <- par("col" = "black") #' #' # This will be in red #' with_par(list(col = "red", pch = 19), #' plot(mtcars$hp, mtcars$wt) #' ) #' #' # This will still be in black #' plot(mtcars$hp, mtcars$wt) #' #' par(old) with_par <- with_(graphics::par, get = get_par) #' @rdname with_par #' @export local_par <- local_(graphics::par, get = get_par, dots = TRUE)
/scratch/gouwar.j/cran-all/cranData/withr/R/par.R
# path ----------------------------------------------------------------------- get_path <- function(...) { strsplit(Sys.getenv("PATH"), .Platform$path.sep)[[1]] } set_path <- function(path, action = c("prefix", "suffix", "replace")) { action <- match.arg(action) path <- as_character(path) path <- normalizePath(path, mustWork = FALSE) old <- get_path() path <- merge_new(old, path, action) path <- paste(path, collapse = .Platform$path.sep) Sys.setenv(PATH = path) invisible(old) } #' PATH environment variable #' #' Temporarily change the system search path. #' #' @template with #' @param new `[character]`\cr New `PATH` entries #' @param action `[character(1)]`\cr Should new values `"replace"`, `"prefix"` #' (the default) or `"suffix"` existing paths #' @inheritParams with_collate #' @seealso [Sys.setenv()] #' @examples #' # temporarily modify the system PATH, *prefixing* the current path #' with_path(getwd(), Sys.getenv("PATH")) #' # temporarily modify the system PATH, *appending* to the current path #' with_path(getwd(), Sys.getenv("PATH"), "suffix") #' @export with_path <- with_( set_path, reset = function(old) set_path(old, "replace"), get = get_path ) #' @rdname with_path #' @export local_path <- local_( set_path, reset = function(old) set_path(old, "replace"), get = get_path )
/scratch/gouwar.j/cran-all/cranData/withr/R/path.R
#' RNG version #' #' Change the RNG version and restore it afterwards. #' #' `with_rng_version()` runs the code with the specified RNG version and #' resets it afterwards. #' #' `local_rng_version()` changes the RNG version for the caller #' execution environment. #' #' @template with #' @param version `[character(1)]` an R version number, e.g. #' `"3.5.0"`, to switch to the RNG this version of R uses. #' See [RNGversion()]. #' #' @seealso [RNGversion()], [RNGkind()], [with_seed()]. #' @export #' @examples #' RNGkind() #' with_rng_version("3.0.0", RNGkind()) #' with_rng_version("1.6.0", RNGkind()) #' #' with_rng_version("3.0.0", #' with_seed(42, sample(1:100, 3))) #' #' with_rng_version("1.6.0", #' with_seed(42, sample(1:100, 3))) #' #' RNGkind() #' with_rng_version <- function(version, code) { old <- RNGkind() on.exit(restore_rng_kind(old), add = TRUE) suppressWarnings(RNGversion(version)) code } #' @rdname with_rng_version #' @param .local_envir The environment to apply the change to. #' @export #' @examples #' fun1 <- function() { #' local_rng_version("3.0.0") #' with_seed(42, sample(1:100, 3)) #' } #' #' fun2 <- function() { #' local_rng_version("1.6.0") #' with_seed(42, sample(1:100, 3)) #' } #' #' RNGkind() #' fun1() #' fun2() #' RNGkind() local_rng_version <- function(version, .local_envir = parent.frame()) { old <- RNGkind() defer(restore_rng_kind(old), envir = .local_envir) suppressWarnings(RNGversion(version)) old } on_load( is_before_3.6 <- getRversion() < "3.6" ) restore_rng_kind <- function(kind) { # Silence static analysis linting about `RNGkind()` signature on old # R versions RNGkind <- get("RNGkind") RNGkind(kind[[1]], normal.kind = kind[[2]]) # No sample argument on old R versions if (is_before_3.6) { return() } # Within a `local_rng_version("3.5.0")`, we restore to `"Rounding"`, # which causes a warning. See https://github.com/r-lib/withr/issues/167 sample_kind <- kind[[3]] if (identical(sample_kind, "Rounding")) { suppressWarnings( RNGkind(sample.kind = sample_kind) ) } else { RNGkind(sample.kind = sample_kind) } NULL }
/scratch/gouwar.j/cran-all/cranData/withr/R/rng.R
#' Random seed #' #' `with_seed()` runs code with a specific random seed and resets it afterwards. #' #' @template with #' @param seed `[integer(1)]`\cr The random seed to use to evaluate the code. #' @param .local_envir `[environment]`\cr The environment to use for scoping. #' @param .rng_kind,.rng_normal_kind,.rng_sample_kind #' `[character(1)]`\cr Kind of RNG to use. Passed as the `kind`, #' `normal.kind`, and `sample.kind` arguments of [RNGkind()]. #' @examples #' # Same random values: #' with_preserve_seed(runif(5)) #' with_preserve_seed(runif(5)) #' #' # Use a pseudorandom value as seed to advance the RNG and pick a different #' # value for the next call: #' with_seed(seed <- sample.int(.Machine$integer.max, 1L), runif(5)) #' with_seed(seed, runif(5)) #' with_seed(seed <- sample.int(.Machine$integer.max, 1L), runif(5)) #' @export with_seed <- function(seed, code, .rng_kind = NULL, .rng_normal_kind = NULL, .rng_sample_kind = NULL) { force(seed) rng_kind <- list(.rng_kind, .rng_normal_kind, .rng_sample_kind) with_preserve_seed({ set_seed(list(seed = seed, rng_kind = rng_kind)) code }) } #' @rdname with_seed #' @export local_seed <- function(seed, .local_envir = parent.frame(), .rng_kind = NULL, .rng_normal_kind = NULL, .rng_sample_kind = NULL) { old_seed <- get_seed() defer(envir = .local_envir, { if (is.null(old_seed)) { on.exit(rm_seed(), add = TRUE) } else { on.exit(set_seed(old_seed), add = TRUE) } }) rng_kind <- list(.rng_kind, .rng_normal_kind, .rng_sample_kind) set_seed(list(seed = seed, rng_kind = rng_kind)) # FIXME invisible(seed) } #' @rdname with_seed #' @description #' `with_preserve_seed()` runs code with the current random seed and resets it #' afterwards. #' #' @export with_preserve_seed <- function(code) { old_seed <- get_seed() if (is.null(old_seed)) { on.exit(rm_seed(), add = TRUE) } else { on.exit(set_seed(old_seed), add = TRUE) } code } #' @rdname with_seed #' @export local_preserve_seed <- function(.local_envir = parent.frame()) { old_seed <- get_seed() defer( if (is.null(old_seed)) { rm_seed() } else { set_seed(old_seed) }, envir = .local_envir ) invisible(old_seed) } has_seed <- function() { exists(".Random.seed", globalenv(), mode = "integer", inherits = FALSE) } get_seed <- function() { if (!has_seed()) { return(NULL) } list( random_seed = get(".Random.seed", globalenv(), mode = "integer", inherits = FALSE), rng_kind = RNGkind() ) } set_seed <- function(seed) { restore_rng_kind(seed$rng_kind) if (is.null(seed$seed)) { assign(".Random.seed", seed$random_seed, globalenv()) } else { set.seed(seed$seed) } } rm_seed <- function() { if (!has_seed()) { return(NULL) } set.seed(seed = NULL) # also reset Normal RNG state (cf. #162) rm(".Random.seed", envir = globalenv()) }
/scratch/gouwar.j/cran-all/cranData/withr/R/seed.R
# sink ----------------------------------------------------------------------- # FIXME: Use (a better version of) pryr:::partial2 when available output_sink <- function(file = NULL, append = FALSE, split = FALSE) { sink(file = file, append = append, type = "output", split = split) } message_sink <- function(file = NULL, append = FALSE) { sink(file = file, append = append, type = "message", split = FALSE) } #' @include wrap.R set_output_sink <- wrap( output_sink, if (is.null(file)) { stop("file cannot be NULL", call. = FALSE) }, list(n = sink.number())) set_message_sink <- wrap( message_sink, { if (is.null(file)) { stop("file cannot be NULL,", call. = FALSE) } if (sink.number(type = "message") != 2L) { stop("Cannot establish message sink when another sink is active.", call. = FALSE) } con <- if (is.character(file)) { file <- file(file, if (append) "a" else "w") } }, { list(n = sink.number(type = "message"), con = con) }) reset_output_sink <- function(sink_info) { repeat { n <- sink.number() delta <- n - sink_info$n if (delta >= 0L) { sink() if (delta > 0L) { warning("Removing a different sink.", call. = FALSE) } else { return() } } else { warning("Sink #", sink_info$n, " already removed.", call. = FALSE) return() } } } reset_message_sink <- function(sink_info) { if (!is.null(sink_info$con)) { on.exit(close(sink_info$con), add = TRUE) } do_reset_message_sink(sink_info) } do_reset_message_sink <- function(sink_info) { n <- sink.number(type = "message") if (n == 2L) { warning("No message sink to remove.", call. = FALSE) } else if (n == sink_info$n) { sink(type = "message") } else { warning("Not removing a different message sink.", call. = FALSE) } } #' Output redirection #' #' Temporarily divert output to a file via [sink()]. For #' sinks of type `message`, an error is raised if such a sink is already #' active. #' #' @template with #' @param new `[character(1)|connection]`\cr #' A writable \link{connection} or a character string naming the file to write #' to. Passing `NULL` will throw an error. #' @inheritParams base::sink #' @inheritParams with_collate #' @seealso [sink()] #' @export #' @name with_sink with_output_sink <- with_(set_output_sink, reset_output_sink) #' @rdname with_sink #' @export local_output_sink <- local_(set_output_sink, reset_output_sink) #' @rdname with_sink #' @export with_message_sink <- with_(set_message_sink, reset_message_sink) #' @rdname with_sink #' @export local_message_sink <- local_(set_message_sink, reset_message_sink)
/scratch/gouwar.j/cran-all/cranData/withr/R/sink.R
# --- # repo: r-lib/withr # file: standalone-defer.R # last-updated: 2024-01-15 # license: https://unlicense.org # --- # # `defer()` is similar to `on.exit()` but with a better default for # `add` (hardcoded to `TRUE`) and `after` (`FALSE` by default). # It also supports adding handlers to other frames which is useful # to implement `local_` functions. # # # ## Changelog # # 2024-01-15: # * Rewritten to be pure base R. # # nocov start defer <- function(expr, envir = parent.frame(), after = FALSE) { thunk <- as.call(list(function() expr)) do.call( on.exit, list(thunk, add = TRUE, after = after), envir = envir ) } # nocov end
/scratch/gouwar.j/cran-all/cranData/withr/R/standalone-defer.R
#' Temporary files and directories #' #' Temporarily create a file or directory, which will automatically deleted #' once you're finished with it. #' #' @template with #' @param new `[character vector]`\cr (Deprecated for `local_tempfile()`) Names of temporary file handles to create. #' @param envir `[environment]`\cr Deprecated in favor of `.local_envir`. #' @param clean `[logical(1)]`\cr A logical indicating if the temporary #' directory should be deleted after use (`TRUE`, default) or left alone (`FALSE`). #' @inheritParams with_collate #' @inheritParams base::tempfile #' @examples #' # local_tempfile() is the easiest to use because it returns a path #' local({ #' path1 <<- local_tempfile(lines = c("x,y", "1,2")) #' readLines(path1) #' }) #' # the file is deleted automatically #' file.exists(path1) #' #' # with_tempfile() is a bit trickier; the first argument gives the name #' # of a variable that will contain the path: #' with_tempfile("path2", { #' print(path2) #' write.csv(iris, path2) #' file.size(path2) #' }) #' #' # Note that this variable is only available in the scope of with_tempfile #' try(path2) #' @export with_tempfile <- function(new, code, envir = parent.frame(), .local_envir = parent.frame(), pattern = "file", tmpdir = tempdir(), fileext = "") { if (!missing(envir)) { .Deprecated(msg = "`envir` argument of with_tempfile() is deprecated.\n Use `with_tempfile(.local_envir=)` instead.") .local_envir <- envir } env <- new.env(parent = .local_envir) for (f in new) { assign(f, tempfile(pattern = pattern, tmpdir = tmpdir, fileext = fileext), envir = env) } on.exit(unlink(mget(new, envir = env), recursive = TRUE)) eval(substitute(code), envir = env) } #' @rdname with_tempfile #' @param lines Optionally, supply a character vector of lines to be written to #' `path`. This is useful if you want to seed the file with some default #' content. #' @export local_tempfile <- function(new = NULL, lines = NULL, envir = parent.frame(), .local_envir = parent.frame(), pattern = "file", tmpdir = tempdir(), fileext = "") { if (!missing(envir)) { .Deprecated(msg = "`envir` argument of local_tempfile() is deprecated.\n Use `local_tempfile(.local_envir=)` instead.") .local_envir <- envir } if (is.null(new)) { path <- tempfile(pattern = pattern, tmpdir = tmpdir, fileext = fileext) if (!is.null(lines)) { con <- file(path, open = "wb", encoding = "native.enc") defer(close(con)) writeLines(enc2utf8(lines), con, useBytes = TRUE) } defer(unlink(path, recursive = TRUE), envir = .local_envir) return(path) } .Deprecated(msg = "`new` argument of local_tempfile() is deprecated.\n Use `path <- local_tempfile()` instead.") for (f in new) { assign(f, tempfile(pattern = pattern, tmpdir = tmpdir, fileext = fileext), envir = .local_envir) } defer(unlink(mget(new, envir = .local_envir), recursive = TRUE), envir = .local_envir) } #' @rdname with_tempfile #' @export with_tempdir <- function(code, clean = TRUE, pattern = "file", tmpdir = tempdir(), fileext = "") { if (length(clean) > 1 || !is.logical(clean)) { stop("`clean` must be a single TRUE or FALSE", call. = FALSE) } tmp <- tempfile(pattern = pattern, tmpdir = tmpdir, fileext = fileext) dir.create(tmp) if (clean) { on.exit(unlink(tmp, recursive = TRUE), add = TRUE) } withr::with_dir(tmp, code) } #' @rdname with_tempfile #' @export local_tempdir <- function(pattern = "file", tmpdir = tempdir(), fileext = "", .local_envir = parent.frame(), clean = TRUE) { if (length(clean) > 1 || !is.logical(clean)) { stop("`clean` must be a single TRUE or FALSE", call. = FALSE) } path <- tempfile(pattern = pattern, tmpdir = tmpdir, fileext = fileext) dir.create(path, recursive = TRUE) if (isTRUE(clean)) { defer(unlink(path, recursive = TRUE), envir = .local_envir) } path }
/scratch/gouwar.j/cran-all/cranData/withr/R/tempfile.R