Contact author: brianrolek at gmail.com.

Supplemental materials for: Larned, A. F., B. W. Rolek, K. Silaphone, S. Pruett, R. Bowman, B. Lohr. 2023. Habitat use and dynamics of an Endangered passerine, the Florida Grasshopper Sparrow (Ammodramus savannarum floridanus), after prescribed fires.

GitHub repository located online at https://github.com/BrianRolek/Misclassification-models and archived here: DOI:10.5281/zenodo.7987218

We used dynamic occupancy models that accounted for misclassifications and detection probability. We implemented Models in R and Bayesian software NIMBLE.

1. Metadata

Metadata associated with the file “data-final.Rdata”. Once loaded into R, this file contains the object “dat.conv”, a list containing data for input into the JAGS model. Data fields include:

2. Basic Model Without Covariates

m <- "misclass_basic"
library (nimble)
load(".\\data\\final-data.Rdata")

code <- nimbleCode(
  {
    ## PRIORS 
    mean.b ~ dunif(0, 1)
    b.b[1] <- logit(mean.b)
    mean.p10 ~dunif(0, 1)
    p10.b[1] <- logit(mean.p10)
    mean.p11 ~ dunif(0, 1)
    p11.b[1] <- logit(mean.p11)
    mean.phi ~ dunif(0, 1)
    phi.alpha[1] <- logit(mean.phi)
    mean.gamma ~ dunif(0, 1)
    gam.alpha[1] <- logit(mean.gamma)
    mean.psi ~ dunif(0, 1)
    psi.b <- logit(mean.psi)
    
    ## LIKELIHOOD
    ## first year
    for(i in 1:nsite){
      logit(psi1[i,1]) <- psi.b
      z[i,1] ~ dbern(psi1[i,1])
      for (j in 1:nvisit){
        pi[i,j,1,1] <- z[i,1]*(1-p11[i,j,1]) + (1-z[i,1])*(1-p10[i,j,1])      
        pi[i,j,1,2] <- z[i,1]*(1-b[i,j,1])*p11[i,j,1] + (1-z[i,1])*p10[i,j,1]
        pi[i,j,1,3] <- z[i,1]*b[i,j,1]*p11[i,j,1]   
        Y[i,j,1] ~ dcat(pi[i,j,1,1:3])
      } #j
      
      # subsequent years
      for (t in 2:nyear){
        for (j in 1:nvisit){ 
          pi[i,j,t,1] <- z[i,t]*(1-p11[i,j,t]) + (1-z[i,t])*(1-p10[i,j,t])      
          pi[i,j,t,2] <- z[i,t]*(1-b[i,j,t])*p11[i,j,t] + (1-z[i,t])*p10[i,j,t]
          pi[i,j,t,3] <- z[i,t]*b[i,j,t]*p11[i,j,t]
          Y[i,j,t] ~ dcat(pi[i,j,t,1:3])
        } # j
        # dynamics
        z[i,t] ~ dbern(muZ[i,t])
        muZ[i,t]<- z[i,t-1]*phi[i,t-1] + (1-z[i,t-1])*gamma[i,t-1]
        logit(phi[i,t-1]) <- phi.alpha[1]
        logit(gamma[i,t-1]) <- gam.alpha[1]
      } # t nyear 
      
      for (t in 1:nyear){
        for (j in 1:nvisit){
          # detection models
          logit(b[i,j,t]) <- b.b[1] 
          logit(p11[i,j,t]) <- p11.b[1] 
          logit(p10[i,j,t]) <- p10.b[1]  
        } } }# t j i
    
    # Predicted values for certainty, detection, and misclassification
    psi[1] <- mean.psi
    n.occ[1] <- sum(z[1:nsite,1])
    for (t in 2:nyear){
      n.occ[t] <- sum(z[1:nsite,t])
      logit(phi.est[t-1]) <- phi.alpha[1] 
      logit(gam.est[t-1]) <- gam.alpha[1] 
      psi[t] <- psi[t-1]*phi.est[t-1] + (1-psi[t-1])*gam.est[t-1]
      growthr[t-1] <- psi[t]/psi[t-1] 
      turnover[t-1] <- (1-psi[t-1]) * gam.est[t-1]/psi[t]
    } # t
  }
)

datl <- list(
  Y=dat.conv$Y,
  nsite=dim(dat.conv$Y)[[1]],
  nvisit=dim(dat.conv$Y)[[2]],
  nyear=dim(dat.conv$Y)[[3]]
)

params<-c("mean.p11", "p11.b", 
          "mean.b", "b.b",  
          "mean.p10", "p10.b", 
          "psi.b", 
          "mean.phi", "phi.alpha", "phi.est",
          "mean.gamma",  "gam.alpha", "gam.est",
          "psi", "n.occ", "growthr", "turnover",
          "z")

# Inits
maxstates <- apply(datl$Y, c(1, 3), max, na.rm=T )
z.inits <- ifelse(maxstates>1, 1, 0)
z.inits[is.na(z.inits)] <- 0
Y.inits <- array(NA, dim=dim(dat.conv$Y))
Y.inits[is.na(dat.conv$Y)] <- 1
occ1 <- apply(datl$Y, c(1,3), sum)

inits <- function()list ( 
  Y = Y.inits,
  z = z.inits,
  muZ= ifelse(occ1>0, 0.1, 0.8), 
  p10 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.001, 0.1), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  p11 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.3, 0.7), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  b = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.5, 0.99), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  mean.psi=runif(1),
  mean.p11=runif(1),
  mean.p10=runif(1),
  mean.b=runif(1),
  psi.b= runif(1, -5, 5),
  b.b= runif(1, -5, 5),
  p11.b= runif(1, -5, 5),
  p10.b= runif(1, -5, 5),
  mean.gamma= runif(1),
  mean.phi=runif(1),
  phi.alpha= runif(1, -5, 5),
  gam.alpha= runif(1, -5, 5)
) 
n.chains=3; n.thin=200; n.iter=600000; n.burnin=400000

mod <- list()
mod<- nimbleModel(code, calculate=T, constants = datl[-1], 
                  data = list(Y=datl$Y), inits = inits())

out <- nimbleMCMC(
  model = mod,
  code = code,
  monitors = params,
  nchains = n.chains,
  thin = n.thin,
  niter = n.iter,
  nburnin = n.burnin,
  progressBar = T,
  summary = T,
  WAIC = F,
  samplesAsCodaMCMC = T,
  samples=T
)

#flnm <- paste(".\\",m, "_", Sys.Date(), ".Rdata", sep="")
#save(out, mod, file=flnm)

3. Global Detection Model Including All Covariates for Detection

m <- "misclass_global_detection"
library (nimble)
load(".\\data\\final-data.Rdata")

code <- nimbleCode(
  {
    ## PRIORS 
    mean.b ~ dunif(0, 1)
    b.b[1] <- logit(mean.b)
    mean.p10 ~dunif(0, 1)
    p10.b[1] <- logit(mean.p10)
    mean.p11 ~ dunif(0, 1)
    p11.b[1] <- logit(mean.p11)
    mean.phi ~ dunif(0, 1)
    phi.alpha[1] <- logit(mean.phi)
    mean.gamma ~ dunif(0, 1)
    gam.alpha[1] <- logit(mean.gamma)
    mean.psi ~ dunif(0, 1)
    psi.b <- logit(mean.psi)
    for (xx in 2:4) { b.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2:3) { p10.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2:5) { p11.b[xx] ~ dnorm(0, sd=100) }
    for (t in 1:nyear){ 
      eps.p10[t] ~ dnorm(0, sd=sig.p10) 
      eps.p11[t] ~ dnorm(0, sd=sig.p11)
      eps.b[t] ~ dnorm(0, sd=sig.b)} # t 
    sig.p10 ~ T(dnorm(0,sd=10),0, )
    sig.p11 ~ T(dnorm(0,sd=10),0, )
    sig.b ~ T(dnorm(0,sd=10),0, )
    
    ## LIKELIHOOD
    ## first year
    for(i in 1:nsite){
      logit(psi1[i,1]) <- psi.b
      z[i,1] ~ dbern(psi1[i,1])
      for (j in 1:nvisit){
        pi[i,j,1,1] <- z[i,1]*(1-p11[i,j,1]) + (1-z[i,1])*(1-p10[i,j,1])      
        pi[i,j,1,2] <- z[i,1]*(1-b[i,j,1])*p11[i,j,1] + (1-z[i,1])*p10[i,j,1]
        pi[i,j,1,3] <- z[i,1]*b[i,j,1]*p11[i,j,1]   
        Y[i,j,1] ~ dcat(pi[i,j,1,1:3])
      } #j
      
      # subsequent years
      for (t in 2:nyear){
        for (j in 1:nvisit){ 
          pi[i,j,t,1] <- z[i,t]*(1-p11[i,j,t]) + (1-z[i,t])*(1-p10[i,j,t])      
          pi[i,j,t,2] <- z[i,t]*(1-b[i,j,t])*p11[i,j,t] + (1-z[i,t])*p10[i,j,t]
          pi[i,j,t,3] <- z[i,t]*b[i,j,t]*p11[i,j,t]
          Y[i,j,t] ~ dcat(pi[i,j,t,1:3])
        } # j
        # dynamics
        z[i,t] ~ dbern(muZ[i,t])
        muZ[i,t]<- z[i,t-1]*phi[i,t-1] + (1-z[i,t-1])*gamma[i,t-1]
        logit(phi[i,t-1]) <- phi.alpha[1]
        logit(gamma[i,t-1]) <- gam.alpha[1]
      } # t nyear 
      
      for (t in 1:nyear){
        for (j in 1:nvisit){
          # detection models
          logit(b[i,j,t]) <- b.b[1] + b.b[2]*date[i,j,t] + b.b[3]*date[i,j,t]^2+ b.b[4]*date[i,j,t]^3 + eps.b[t]
          logit(p11[i,j,t]) <- p11.b[1] + p11.b[2]*date[i,j,t] + p11.b[3]*date[i,j,t]^2 + p11.b[4]*hr[i,j,t] + p11.b[5]*hr[i,j,t]^2 + eps.p11[t]
          logit(p10[i,j,t]) <- p10.b[1]  + p10.b[2]*date[i,j,t] + p10.b[3]*date[i,j,t]^2 + eps.p10[t]
        } } }# t j i
    
    # Predicted values for certainty, detection, and misclassification
    psi[1] <- mean.psi
    n.occ[1] <- sum(z[1:nsite,1])
    for (t in 2:nyear){
      n.occ[t] <- sum(z[1:nsite,t])
      logit(phi.est[t-1]) <- phi.alpha[1] 
      logit(gam.est[t-1]) <- gam.alpha[1] 
      psi[t] <- psi[t-1]*phi.est[t-1] + (1-psi[t-1])*gam.est[t-1]
      growthr[t-1] <- psi[t]/psi[t-1] 
      turnover[t-1] <- (1-psi[t-1]) * gam.est[t-1]/psi[t]
    } # t
  }
)

datl <- list(
  Y=dat.conv$Y,
  date= dat.conv$date,
  hr= dat.conv$hr,
  nsite=dim(dat.conv$Y)[[1]],
  nvisit=dim(dat.conv$Y)[[2]],
  nyear=dim(dat.conv$Y)[[3]]
)

params<-c("mean.p11", "p11.b", "sig.p11",
          "mean.b", "b.b", "eps.b", "sig.b", 
          "mean.p10", "p10.b", "sig.p10",
          "psi.b", 
          "mean.phi", "phi.alpha", "phi.est",
          "mean.gamma",  "gam.alpha", "gam.est",
          "psi", "n.occ", "growthr", "turnover",
          "z")

# Inits
maxstates <- apply(datl$Y, c(1, 3), max, na.rm=T )
z.inits <- ifelse(maxstates>1, 1, 0)
z.inits[is.na(z.inits)] <- 0
Y.inits <- array(NA, dim=dim(dat.conv$Y))
Y.inits[is.na(dat.conv$Y)] <- 1
occ1 <- apply(datl$Y, c(1,3), sum)

inits <- function()list ( 
  Y = Y.inits,
  z = z.inits,
  muZ= ifelse(occ1>0, 0.1, 0.8), 
  p10 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.001, 0.1), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  p11 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.3, 0.7), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  b = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.5, 0.99), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  mean.psi=runif(1),
  mean.p11=runif(1),
  mean.p10=runif(1),
  mean.b=runif(1),
  psi.b= runif(1, -5, 5),
  b.b= runif(4, -5, 5),
  p11.b= runif(5, -5, 5),
  p10.b= runif(3, -5, 5),
  mean.gamma= runif(1),
  mean.phi=runif(1),
  phi.alpha= runif(1, -5, 5),
  gam.alpha= runif(1, -5, 5),
  sig.p10=runif(1),
  sig.p11=runif(1),
  sig.b=runif(1),
  eps.p10 = runif((datl$nyear), -0.1, 0.1),
  eps.p11 = runif((datl$nyear), -0.1, 0.1),
  eps.b = runif((datl$nyear), -0.1, 0.1)
) 
n.chains=3; n.thin=200; n.iter=600000; n.burnin=400000

mod <- list()
mod<- nimbleModel(code, calculate=T, constants = datl[-1], 
                  data = list(Y=datl$Y), inits = inits())

out <- nimbleMCMC(
  model = mod,
  code = code,
  monitors = params,
  nchains = n.chains,
  thin = n.thin,
  niter = n.iter,
  nburnin = n.burnin,
  progressBar = T,
  summary = T,
  WAIC = F,
  samplesAsCodaMCMC = T,
  samples=T
)

# flnm <- paste(".\\outputs\\",m, "_", Sys.Date(), ".Rdata", sep="")
# save(out, mod, file=flnm)

4. Bayesian latent indicator scale selection (BLISS) to Select Spatial Scales and Summary Statistics of Covariates

library (nimble)
load(".\\data\\final-data.Rdata")
code <- nimbleCode(
  {
    ## PRIORS 
    mean.b ~ dunif(0, 1)
    b.b[1] <- logit(mean.b)
    mean.p10 ~dunif(0, 1)
    p10.b[1] <- logit(mean.p10)
    mean.p11 ~ dunif(0, 1)
    p11.b[1] <- logit(mean.p11)
    mean.phi ~ dunif(0, 1)
    phi.alpha[1] <- logit(mean.phi)
    mean.gamma ~ dunif(0, 1)
    gam.alpha[1] <- logit(mean.gamma)
    mean.psi ~ dunif(0, 1)
    psi.b[1] <- logit(mean.psi)
    for (xx in 2:4) { b.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2:3) { p10.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2) { p11.b[xx] ~ dnorm(0, sd=100) }
    
    for (t in 1:(nyear-1)){ 
      eps.phi[t] ~ dnorm(0, sd=sig.phi) 
      eps.gam[t] ~ dnorm(0, sd=sig.gam) 
    } # t
    for (t in 1:nyear){ 
      eps.p10[t] ~ dnorm(0, sd=sig.p10) 
      eps.p11[t] ~ dnorm(0, sd=sig.p11)
      eps.b[t] ~ dnorm(0, sd=sig.b)} # t 
    sig.p10 ~ T(dnorm(0,sd=10),0, )
    sig.p11 ~ T(dnorm(0,sd=10),0, )
    sig.b ~ T(dnorm(0,sd=10),0, )
    bliss[1] ~ dcat( priors1[1:12] )
    bliss[3] ~ dcat( priors3[1:12] )
    for (jj in 1:12){
    priors1[jj] <-  0.0833
    priors3[jj] <-  0.0833
    } # jj
    bliss[2] ~ dcat( priors2[1:2] )
    bliss[4] ~ dcat( priors4[1:2] )
    for (kk in 1:2){
      priors2[kk] <- 0.5
      priors4[kk] <- 0.5
    } # kk
    ## LIKELIHOOD
    ## first year
    for(i in 1:nsite){
      logit(psi1[i,1]) <- psi.b[1]
      z[i,1] ~ dbern(psi1[i,1])
      for (j in 1:nvisit){
        pi[i,j,1,1] <- z[i,1]*(1-p11[i,j,1]) + (1-z[i,1])*(1-p10[i,j,1])      
        pi[i,j,1,2] <- z[i,1]*(1-b[i,j,1])*p11[i,j,1] + (1-z[i,1])*p10[i,j,1]
        pi[i,j,1,3] <- z[i,1]*b[i,j,1]*p11[i,j,1]   
        Y[i,j,1] ~ dcat(pi[i,j,1,1:3])
      } #j
      
      # subsequent years
      for (t in 2:nyear){
        for (j in 1:nvisit){ 
          pi[i,j,t,1] <- z[i,t]*(1-p11[i,j,t]) + (1-z[i,t])*(1-p10[i,j,t])      
          pi[i,j,t,2] <- z[i,t]*(1-b[i,j,t])*p11[i,j,t] + (1-z[i,t])*p10[i,j,t]
          pi[i,j,t,3] <- z[i,t]*b[i,j,t]*p11[i,j,t]
          Y[i,j,t] ~ dcat(pi[i,j,t,1:3])
        } # j
        # dynamics
        z[i,t] ~ dbern(muZ[i,t])
        muZ[i,t]<- z[i,t-1]*phi[i,t-1] + (1-z[i,t-1])*gamma[i,t-1]
        logit(phi[i,t-1]) <- 
          phi.alpha0 + eps.phi[t-1] +
          equals(bliss[1],1) * phi.alpha[1] *  YSF.std[i,t, 1 ]  +  
          equals(bliss[1],1) * phi.alpha[2]*  YSF.std[i,t, 1 ]^2  +
          equals(bliss[1],2) * phi.alpha[1] *  YSF.std[i,t, 2 ]  +  
          equals(bliss[1],2) * phi.alpha[2]*  YSF.std[i,t, 2 ]^2  +
          equals(bliss[1],3) * phi.alpha[1] *  YSF.std[i,t, 3 ]  +  
          equals(bliss[1],3) * phi.alpha[2]*  YSF.std[i,t, 3 ]^2  +
          equals(bliss[1],4) * phi.alpha[1] *  YSF.std[i,t, 4 ]  +  
          equals(bliss[1],4) * phi.alpha[2]*  YSF.std[i,t, 4 ]^2  +
          equals(bliss[1],5) * phi.alpha[1] *  YSF.std[i,t, 5 ]  +  
          equals(bliss[1],5) * phi.alpha[2]*  YSF.std[i,t, 5 ]^2  +
          equals(bliss[1],6) * phi.alpha[1] *  YSF.std[i,t, 6 ]  +  
          equals(bliss[1],6) * phi.alpha[2]*  YSF.std[i,t, 6 ]^2 +
          equals(bliss[1],7) * phi.alpha[1] *  YSF.std[i,t, 1 ]  +  
          equals(bliss[1],8) * phi.alpha[1] *  YSF.std[i,t, 2 ]  +  
          equals(bliss[1],9) * phi.alpha[1] *  YSF.std[i,t, 3 ]  +  
          equals(bliss[1],10) * phi.alpha[1] *  YSF.std[i,t, 4 ]  +  
          equals(bliss[1],11) * phi.alpha[1] *  YSF.std[i,t, 5 ]  +  
          equals(bliss[1],12) * phi.alpha[1] *  YSF.std[i,t, 6 ]  +  
          equals(bliss[2],1) * phi.alpha[3] * sin(SEAS[i,t, 1 ]*2*3.1416) + equals(bliss[2],1) * phi.alpha[4] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          equals(bliss[2],2) * phi.alpha[3] * sin(SEAS[i,t, 2 ]*2*3.1416) + equals(bliss[2],2) * phi.alpha[4] * cos(SEAS[i,t, 2 ]*2*3.1416)
        
        logit(gamma[i,t-1]) <- 
          gamma.alpha0 + eps.gam[t-1] +
          equals(bliss[3],1) * gam.alpha[1] *  YSF.std[i,t, 1 ]  +  
          equals(bliss[3],1) * gam.alpha[2]*  YSF.std[i,t, 1 ]^2  +
          equals(bliss[3],2) * gam.alpha[1] *  YSF.std[i,t, 2 ]  +  
          equals(bliss[3],2) * gam.alpha[2]*  YSF.std[i,t, 2 ]^2  +
          equals(bliss[3],3) * gam.alpha[1] *  YSF.std[i,t, 3 ]  +  
          equals(bliss[3],3) * gam.alpha[2]*  YSF.std[i,t, 3 ]^2  +
          equals(bliss[3],4) * gam.alpha[1] *  YSF.std[i,t, 4 ]  +  
          equals(bliss[3],4) * gam.alpha[2]*  YSF.std[i,t, 4 ]^2  +
          equals(bliss[3],5) * gam.alpha[1] *  YSF.std[i,t, 5 ]  +  
          equals(bliss[3],5) * gam.alpha[2]*  YSF.std[i,t, 5 ]^2  +
          equals(bliss[3],6) * gam.alpha[1] *  YSF.std[i,t, 6 ]  +  
          equals(bliss[3],6) * gam.alpha[2]*  YSF.std[i,t, 6 ]^2 +
          equals(bliss[3],7) * gam.alpha[1] *  YSF.std[i,t, 1 ]  +  
          equals(bliss[3],8) * gam.alpha[1] *  YSF.std[i,t, 2 ]  +  
          equals(bliss[3],9) * gam.alpha[1] *  YSF.std[i,t, 3 ]  +  
          equals(bliss[3],10) * gam.alpha[1] *  YSF.std[i,t, 4 ]  +  
          equals(bliss[3],11) * gam.alpha[1] *  YSF.std[i,t, 5 ]  +  
          equals(bliss[3],12) * gam.alpha[1] *  YSF.std[i,t, 6 ]  +  
          equals(bliss[4],1) * gam.alpha[3] * sin(SEAS[i,t, 1 ]*2*3.1416) + equals(bliss[4],1) * gam.alpha[4] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          equals(bliss[4],2) * gam.alpha[3] * sin(SEAS[i,t, 2 ]*2*3.1416) + equals(bliss[4],2) * gam.alpha[4] * cos(SEAS[i,t, 2 ]*2*3.1416)
         } # t nyear 
      
        for (t in 1:nyear){
          for (j in 1:nvisit){
          # detection models
          logit(b[i,j,t]) <- b.b[1] + b.b[2]*date[i,j,t] + b.b[3]*date[i,j,t]*2+ b.b[4]*date[i,j,t]^3 + eps.b[t]
          logit(p11[i,j,t]) <- p11.b[1] + p11.b[2]*hr[i,j,t] + eps.p11[t]
          logit(p10[i,j,t]) <- p10.b[1]  + p10.b[2]*date[i,j,t] + p10.b[3]*date[i,j,t]^2 + eps.p10[t]
        } } }# t j i

    psi[1] <- mean.psi
    n.occ[1] <- sum(z[1:nsite,1])
    for (t in 2:nyear){
      n.occ[t] <- sum(z[1:nsite,t])
      logit(phi.est[t-1]) <- phi.alpha0 + eps.phi[t-1]
      logit(gam.est[t-1]) <- gamma.alpha0 + eps.gam[ t-1]
      psi[t] <- psi[t-1]*phi.est[t-1] + (1-psi[t-1])*gam.est[t-1]
      growthr[t-1] <- psi[t]/psi[t-1] 
      turnover[t-1] <- (1-psi[t-1]) * gam.est[t-1]/psi[t]
      } # t
  }
)

YSF.std <- YSF2.std <- array(NA, dim=dim(YSF))
for (i in 1:6){
  YSF.std[,,i] <- (YSF[,,i]-mean(YSF[,,i], na.rm=T)) / sd(YSF[,,i])
}

datl <- list(
  Y=dat.conv$Y,
  date= dat.conv$date,
  hr= dat.conv$hr,
  nsite=dim(dat.conv$Y)[[1]],
  nvisit=dim(dat.conv$Y)[[2]],
  nyear=dim(dat.conv$Y)[[3]],
  YSF.std=YSF.std,
  SEAS=SEAS,
  nYSF= dim(YSF)[[3]],
  nSEAS= dim(SEAS)[[3]]
)

params<-c("mean.p11", "p11.b", "eps.p11", "sig.p11",
          "mean.b", "b.b", "eps.b", "sig.b",  
          "mean.p10", "p10.b", "eps.p10", "sig.p10",
          "psi.b", 
          "mean.phi", "phi.alpha0", "phi.alpha", "eps.phi", "sig.phi", "phi.est",
          "mean.gamma",  "gamma.alpha0", "gam.alpha", "eps.gam", "sig.gam", "gam.est",
          "psi", "n.occ", "growthr", "turnover", 
          "bliss",
          "z")

# Inits
maxstates <- apply(datl$Y, c(1, 3), max, na.rm=T )
z.inits <- ifelse(maxstates>1, 1, 0)
z.inits[is.na(z.inits)] <- 0
Y.inits <- array(NA, dim=dim(dat.conv$Y))
Y.inits[is.na(dat.conv$Y)] <- 1
occ1 <- apply(datl$Y, c(1,3), sum)

inits <- function()list ( 
  Y = Y.inits,
  z = z.inits,
  muZ= ifelse(occ1>0, 0.1, 0.8), 
  p10 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.001, 0.1), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  p11 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.3, 0.7), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  b = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.5, 0.99), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  mean.psi=runif(1),
  mean.p11=runif(1),
  mean.p10=runif(1, 0.01, 0.1),
  mean.b=runif(1),
  psi.b= runif(1, -5, 5),
  b.b= runif(4, -5, 5),
  p11.b= runif(2, -5, 5),
  p10.b= runif(3, -5, 5),
  mean.gamma= runif(1),
  mean.phi=runif(1),
  phi.alpha0= runif(1, -5, 0),
  gamma.alpha0= runif(1, 0, 5),
  phi.alpha= runif(4, -2, 2),
  gam.alpha= runif(4, -2, 2),
  sig.p10=runif(1),
  sig.p11=runif(1),
  sig.b=runif(1),
  sig.phi=runif(1),
  sig.gam=runif(1),
  eps.phi = runif((datl$nyear-1), -0.1, 0.1),
  eps.gam = runif((datl$nyear-1), -0.1, 0.1),
  eps.p10 = runif((datl$nyear), -0.1, 0.1),
  eps.p11 = runif((datl$nyear), -0.1, 0.1),
  eps.b = runif((datl$nyear), -0.1, 0.1),
  bliss = c(sample(1:12,1), sample(c(1,2),1), 
           sample(1:12,1), sample(c(1,2),1))
) 

n.chains=3; n.thin=200; n.iter=600000; n.burnin=400000
mod<- nimbleModel(code, calculate=T, constants = datl[-1], 
                  data = list(Y=datl$Y), inits = inits())

out <- nimbleMCMC(
  model=mod,
  code = code,
  monitors = params,
  nchains=n.chains,
  thin = n.thin,
  niter = n.iter,
  nburnin = n.burnin,
  progressBar=T,
  summary=T,
  WAIC=F,
  samplesAsCodaMCMC = T,
  samples=T
)

# flnm <- paste(".\\outputs\\misclass-BLISS_", Sys.Date(), ".Rdata", sep="")
# save(out, file=flnm)

5. Gibb’s Variable Selection (GVS)

5.1. Calculate Prior Probabilities (step 1)

library(MuMIn)
load( ".\\data\\final-data.Rdata")

vars <- c("eps.phi", "YSF.std","sin.SEAS", "cos.SEAS")
tf <- c(TRUE, FALSE)
eg <- expand.grid(tf, tf, tf, tf)
colnames(eg) <- vars

test1 <- model.matrix(~ YSF.std + sin.SEAS + cos.SEAS, eg)

dat <- matrix(rnorm(100*length(vars)),ncol=length(vars))
dat <- as.data.frame(dat)
names(dat) <- vars
dat$y <- rnorm(nrow(dat))

global <- lm(y ~ YSF.std + sin.SEAS + cos.SEAS +
               YSF.std:sin.SEAS + YSF.std:cos.SEAS, 
             data = dat, na.action = "na.fail")

# dummy dat

combos <- dredge(global, eval=F,
                 subset = 
                   dc(YSF.std, YSF.std:sin.SEAS) &&
                   dc(YSF.std, YSF.std:cos.SEAS) &&
                   dc(sin.SEAS, YSF.std:sin.SEAS) &&
                   dc(cos.SEAS, YSF.std:cos.SEAS) 
)

n.mods <- length(combos)
dat1 <- dat[1,]*0 + 1
terms <- colnames(model.matrix(formula(combos[[n.mods]]),dat1))
n.terms <- length(terms)

combo.mat <- matrix(0,nrow=n.mods,ncol=n.terms)
colnames(combo.mat) <- terms

for (i in 1:n.mods){
  combo.mat[i,match(colnames(model.matrix(formula(combos[[i]]),dat1)),
                    terms)] <- 1
}

combo.mat

# total model probs
apply(combo.mat,2,mean)

# highest order terms
mean(combo.mat[,"cos.SEAS:YSF.std"])
mean(combo.mat[,"sin.SEAS:YSF.std"])

# probabilities without higher order terms
# ba:sf
mean(combo.mat[which(combo.mat[,"cos.SEAS:YSF.std"]==0 & combo.mat[,"sin.SEAS:YSF.std"]==0),"YSF.std"])

##################
# COLONIZATION
#################
global2 <- lm(y ~ YSF.std + I(YSF.std^2) + 
                sin.SEAS + cos.SEAS +
                YSF.std:sin.SEAS + YSF.std:cos.SEAS + 
                I(YSF.std^2):sin.SEAS + I(YSF.std^2):cos.SEAS,
              data = dat, na.action = "na.fail")

combos2 <- dredge(global2, eval=F,
                  subset = 
                    dc(YSF.std, sin.SEAS:YSF.std, {sin.SEAS:I(YSF.std^2)}) &&
                    dc(YSF.std, cos.SEAS:YSF.std, {cos.SEAS:I(YSF.std^2)}) &&
                    dc(sin.SEAS, sin.SEAS:YSF.std) &&
                    dc(cos.SEAS, cos.SEAS:YSF.std) &&
                    dc(YSF.std, {I(YSF.std^2)}, {sin.SEAS:I(YSF.std^2)}) &&
                    dc(YSF.std, {I(YSF.std^2)}, {cos.SEAS:I(YSF.std^2)}) &&
                    dc({I(YSF.std^2)}, YSF.std ) &&
                    dc({sin.SEAS:I(YSF.std^2)}, sin.SEAS:YSF.std) #&&
                    # dc({cos.SEAS:I(YSF.std^2)}, cos.SEAS:YSF.std)
                  )

n.mods2 <- length(combos2)
dat2 <- dat[1,]*0 + 1
terms2 <- colnames(model.matrix(formula(combos2[[n.mods2]]),dat2))
n.terms2 <- length(terms2)

combo.mat2 <- matrix(0, nrow=n.mods2, ncol=n.terms2)
colnames(combo.mat2) <- terms2

for (i in 1:n.mods2){
  combo.mat2[i,match(colnames(model.matrix(formula(combos2[[i]]),dat2)),
                    terms2)] <- 1
}

combo.mat2

# total model probs
sub2 <- apply(combo.mat2[,6:9]==0, 1, all)
apply(combo.mat2[sub2,],2,mean)[1:5]
sub3 <- apply(combo.mat2[,8:9]==0, 1, all)
apply(combo.mat2[sub3,],2,mean)[6:7]
apply(combo.mat2,2,mean)[8:9]


vars <- c("eps.gam", "YSF","sin.SEAS", "cos.SEAS",
          "YSF2", "YSF:sin.SEAS", "YSF:cos.SEAS",
          "YSF2:sin.SEAS", "YSF2:cos.SEAS")
tf <- c(TRUE, FALSE)
tfl <- list()
for (i in 1:length(vars)){ tfl[[i]] <- tf }
eg <- expand.grid( tfl ) 
colnames(eg) <- vars
# subset
ind <- 
  !(eg$YSF==F & eg$YSF2==T) & !(eg$YSF==T & eg$YSF2==F) &
  !(eg$YSF==F & (eg$'YSF:sin.SEAS'==T | eg$'YSF2:sin.SEAS'==T)) &
  !(eg$YSF==F & (eg$'YSF:cos.SEAS'==T | eg$'YSF2:cos.SEAS'==T)) &
  !(eg$sin.SEAS==F & (eg$'YSF:sin.SEAS'==T | eg$'YSF2:sin.SEAS'==T)) &
  !(eg$cos.SEAS==F & (eg$'YSF:cos.SEAS'==T | eg$'YSF2:cos.SEAS'==T)) &
  !(eg$'YSF:sin.SEAS'==F & eg$'YSF2:sin.SEAS'==T) & !(eg$'YSF:sin.SEAS'==T & eg$'YSF2:sin.SEAS'==F) &
  !(eg$'YSF:cos.SEAS'==F & eg$'YSF2:cos.SEAS'==T) & !(eg$'YSF:cos.SEAS'==T & eg$'YSF2:cos.SEAS'==F)
eg <- eg[ind, ]

wg.priors <- list()
wg.priors[[1]] <- 1 
sub1 <- !apply(eg[,6:9], 1, any)
wg.priors[[2]] <-  apply(eg[sub1, c("YSF", "YSF2")], 2, mean)
sub2 <- !apply(eg[,c(6,8)], 1, any)
wg.priors[[3]] <- mean(eg[sub2, c("sin.SEAS")])
sub3 <- !apply(eg[,c(7,9)], 1, any)
wg.priors[[4]] <- mean(eg[sub3, c("cos.SEAS")])
wg.priors[[5]] <- apply(eg[, c("YSF:sin.SEAS", "YSF2:sin.SEAS")], 2, mean)
wg.priors[[6]] <- apply(eg[, c("YSF:cos.SEAS", "YSF2:cos.SEAS")], 2, mean)
wg.priors[[7]] <- mean(eg[sub3, c("eps.gam")])
wg.priors

5.2 Run Global Model to Obtain Estimates for Pseudopriors (step 2)

m <- "05-misclass_GVS_step2"
library (nimble)
load(".\\data\\final-data.Rdata")

code <- nimbleCode(
  {
    ## PRIORS 
    mean.b ~ dunif(0, 1)
    b.b[1] <- logit(mean.b)
    mean.p10 ~dunif(0, 1)
    p10.b[1] <- logit(mean.p10)
    mean.p11 ~ dunif(0, 1)
    p11.b[1] <- logit(mean.p11)
    mean.phi ~ dunif(0, 1)
    phi.alpha[1] <- logit(mean.phi)
    mean.gamma ~ dunif(0, 1)
    gam.alpha[1] <- logit(mean.gamma)
    mean.psi ~ dunif(0, 1)
    psi.b <- logit(mean.psi)
    sig.p10 ~ T(dnorm(0,sd=10),0, )
    sig.p11 ~ T(dnorm(0,sd=10),0, )
    bp.sig <- 100
    bg.sig <- 100
    for (xx in 2:4) { b.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2:3) { p10.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2) { p11.b[xx] ~ dnorm(0, sd=100) }
    for (t in 1:(nyear-1)){ 
      eps.phi[t] ~ dnorm(0, sd=sig.phi) 
      eps.gam[t] ~ dnorm(0, sd=sig.gam) 
    } # t
    for (t in 1:nyear){ 
      eps.p10[t] ~ dnorm(0, sd=sig.p10) 
      eps.p11[t] ~ dnorm(0, sd=sig.p11)
      eps.b[t] ~ dnorm(0, sd=sig.b)} # t 
    
    # priors for the w model inclusion terms, phi.alpha
    # this ensures that each of the 8 model combos has equal probability: Pr(m)= 1/8
    wp[7] ~ dbern(0.5)
    wp[6] ~ dbern(0.23)
    wp[5] ~ dbern(0.23) 
    p.wp4 <- (1-wp[6])*0.5 + wp[6] 
    wp[4] ~ dbern(p.wp4)
    p.wp3 <- (1-wp[5])*0.5 + wp[5] 
    wp[3] ~ dbern(p.wp3)
    p.wp2 <- equals(wp[5]+wp[6],0)*0.5 + (1-equals(wp[5]+wp[6],0)) 
    wp[2] ~ dbern(p.wp2)
    wp[1] ~ dbern(1)
    wptemp[1] <- wp[1]
    
    # priors for the w model inclusion terms, gam.alpha
    # this ensures that each of the 8 model combos has equal probability: Pr(m)= 1/8
    wg[7] ~ dbern(0.5)
    wg[6] ~ dbern(0.23)
    wg[5] ~ dbern(0.23) 
    p.wg4 <- (1-wg[6])*0.5 + wg[6] 
    wg[4] ~ dbern(p.wg4)
    p.wg3 <- (1-wg[5])*0.5 + wg[5] 
    wg[3] ~ dbern(p.wg3)
    p.wg2 <- equals(wg[5]+wg[6],0)*0.5 + (1-equals(wg[5]+wg[6],0)) 
    wg[2] ~ dbern(p.wg2)
    wg[1] ~ dbern(1)
    wgtemp[1] <- wg[1]
    #   posg = c(1, 2, 2, 3, 4, 5, 6, 5, 6, 7), for reference
    
    # set up the vectors/matrices for beta estimation, abundance
    for(b1 in 2:n.betasp){
      #wptemp[b1] <- wp[posp[b1]]                # this uses GVS
      wptemp[b1] <- 1                          # this forces you to fit the full model (all terms included)
      mean.bp[b1] <- post.bp[b1]*(1-wptemp[b1])  # prior is either 0 or full-model posterior mean
      for(b2 in 2:n.betasp){                   # set up the precision matrix (inverse variance) # allows for betas to be multivariate, if desired
        sig.bp[b1,b2] <- equals(b1,b2)*sd.bp[b1]*(1-wptemp[b1]) + (wptemp[b1]*bp.sig)
      } # b2
      phi.alpha[b1] ~ T(dnorm(mean.bp[b1], sd=sig.bp[b1,b1]), -30, 30)   # all beta coefficients
    } # b1
    wptemp[7] <- 1 
    #wptemp[7] <- wp[7]
    mean.sigphi <- post.bp[7]*(1-wptemp[7]) 
    sd.sigphi <- sd.bp[7]*(1-wptemp[7]) + (wptemp[7]*bp.sigphi)
    sig.phi ~ T(dnorm(mean.sigphi, sd=sd.sigphi), 0, )
    bp.sigphi <- 10
    # set up the vectors/matrices for beta estimation, abundance
    for(b1 in 2:n.betasg){
      #wgtemp[b1] <- wg[posg[b1]]                # this uses GVS
      wgtemp[b1] <- 1                          # this forces you to fit the full model (all terms included)
      mean.bg[b1] <- post.bg[b1]*(1-wgtemp[b1])  # prior is either 0 or full-model posterior mean
      for(b2 in 2:n.betasg){                   # set up the precision matrix (inverse variance) # allows for betas to be multivariate, if desired
        sig.bg[b1,b2] <- equals(b1,b2)*sd.bg[b1]*(1-wgtemp[b1]) + (wgtemp[b1]*bg.sig)
      } # b2
      gam.alpha[b1] ~ T(dnorm(mean.bg[b1], sd=sig.bg[b1,b1]), -30, 30)   # all beta coefficients
    } # b1
    wgtemp[10] <- 1 
    #wgtemp[10] <- wg[7]
    mean.siggam <- post.bg[10]*(1-wgtemp[10]) 
    sd.siggam <- sd.bg[10]*(1-wgtemp[10]) + (wgtemp[10]*bg.siggam)
    sig.gam ~ T(dnorm(mean.siggam, sd=sd.siggam), 0, )
    bg.siggam <- 10
    ## LIKELIHOOD
    ## first year
    for(i in 1:nsite){
      logit(psi1[i,1]) <- psi.b
      z[i,1] ~ dbern(psi1[i,1])
      for (j in 1:nvisit){
        pi[i,j,1,1] <- z[i,1]*(1-p11[i,j,1]) + (1-z[i,1])*(1-p10[i,j,1])      
        pi[i,j,1,2] <- z[i,1]*(1-b[i,j,1])*p11[i,j,1] + (1-z[i,1])*p10[i,j,1]
        pi[i,j,1,3] <- z[i,1]*b[i,j,1]*p11[i,j,1]   
        Y[i,j,1] ~ dcat(pi[i,j,1,1:3])
      } #j
      
      # subsequent years
      for (t in 2:nyear){
        for (j in 1:nvisit){ 
          pi[i,j,t,1] <- z[i,t]*(1-p11[i,j,t]) + (1-z[i,t])*(1-p10[i,j,t])      
          pi[i,j,t,2] <- z[i,t]*(1-b[i,j,t])*p11[i,j,t] + (1-z[i,t])*p10[i,j,t]
          pi[i,j,t,3] <- z[i,t]*b[i,j,t]*p11[i,j,t]
          Y[i,j,t] ~ dcat(pi[i,j,t,1:3])
        } # j
        # dynamics
        z[i,t] ~ dbern(muZ[i,t])
        muZ[i,t]<- z[i,t-1]*phi[i,t-1] + (1-z[i,t-1])*gamma[i,t-1]
        logit(phi[i,t-1]) <- 
          wptemp[1] * phi.alpha[1] + 
          wptemp[2] * phi.alpha[2] * YSF.std[i,t, 6 ] + 
          wptemp[3] * phi.alpha[3] * sin(SEAS[i,t, 1 ]*2*3.1416) + 
          wptemp[4] * phi.alpha[4] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          wptemp[5] * phi.alpha[5] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 6 ] + 
          wptemp[6] * phi.alpha[6] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 6 ] +
          wptemp[7] * eps.phi[t-1]
        logit(gamma[i,t-1]) <- 
          wgtemp[1] * gam.alpha[1] + 
          wgtemp[2] * gam.alpha[2] * YSF.std[i,t, 5 ] + wgtemp[3] * gam.alpha[3] * YSF.std[i,t, 5 ]^2 +
          wgtemp[4] * gam.alpha[4] * sin(SEAS[i,t, 1 ]*2*3.1416) + 
          wgtemp[5] * gam.alpha[5] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          wgtemp[6] * gam.alpha[6] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] +
          wgtemp[7] * gam.alpha[7] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] +
          wgtemp[8] * gam.alpha[8] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] * YSF.std[i,t, 5 ]^2 +
          wgtemp[9] * gam.alpha[9] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] * YSF.std[i,t, 5 ]^2 +
          wgtemp[10] * eps.gam[t-1]
      } # t nyear 
      
      for (t in 1:nyear){
        for (j in 1:nvisit){
          # detection models
          logit(b[i,j,t]) <-  b.b[1] + b.b[2]*date[i,j,t] + b.b[3]*date[i,j,t]^2+ b.b[4]*date[i,j,t]^3 + eps.b[t]
          logit(p11[i,j,t]) <- p11.b[1] + p11.b[2]*hr[i,j,t] + eps.p11[t]
          logit(p10[i,j,t]) <- p10.b[1]  + p10.b[2]*date[i,j,t] + p10.b[3]*date[i,j,t]^2 + eps.p10[t]
        } } }# t j i
    
    # Predicted occupancy values 
    psi[1] <- mean.psi
    n.occ[1] <- sum(z[1:nsite,1])
    for (t in 2:nyear){
      n.occ[t] <- sum(z[1:nsite,t])
      phi.est[t-1] <- mean(phi[1:nsite,t-1])
      gam.est[t-1] <- mean(gamma[1:nsite,t-1])
      psi[t] <- psi[t-1]*phi.est[t-1] + (1-psi[t-1])*gam.est[t-1]
      growthr[t-1] <- psi[t]/psi[t-1] 
      turnover[t-1] <- (1-psi[t-1]) * gam.est[t-1]/psi[t]
    } # t
  }
)

#  scale and center fire data
YSF.std <- array(NA, dim=dim(YSF))
for (j in 1:6){
  YSF.std[,,j] <- (YSF[,,j]-mean(YSF[,,j], na.rm=T)) / sd(YSF[,,j])
}

# function to extract samples from nimble output  
extr <- function (mod, var){
  var2 <- paste("^", var, sep="")
  col.ind <- grep(var2, colnames(outg$samples$chain1))
  mat1 <- as.matrix(outg$samples$chain1[,col.ind])
  mat2 <- as.matrix(outg$samples$chain2[,col.ind])
  mat3 <- as.matrix(outg$samples$chain3[,col.ind])
  all <- rbind(mat1, mat2, mat3) 
  print(colnames(all))
  return(all)
}

post.bp <-  c(rep(0, 6), 0)
sd.bp <-  c(rep(100, 6), 10)
post.bg <-  c(rep(0, 10), 0)
sd.bg <-  c(rep(100, 10), 10)

datl <- list(
  Y=dat.conv$Y,
  date= dat.conv$date,
  hr= dat.conv$hr,
  nsite=dim(dat.conv$Y)[[1]],
  nvisit=dim(dat.conv$Y)[[2]],
  nyear=dim(dat.conv$Y)[[3]],
  YSF.std=YSF.std,
  SEAS=SEAS,
  nYSF= dim(YSF)[[3]],
  nSEAS= dim(SEAS)[[3]], 
  n.betasp = 6,
  n.betasg = 9,
  posp = c(1, 2, 3, 4, 5, 6), 
  posg = c(1, 2, 2, 3, 4, 5, 6, 5, 6, 7), # forces YSF and YSF2 to occur together
  post.bp = post.bp,
  sd.bp = sd.bp,
  post.bg = post.bg,
  sd.bg = sd.bg 
)

params<-c("mean.p11", "p11.b", "eps.p11", "sig.p11",
          "mean.b", "b.b", "eps.b", "sig.b",
          "mean.p10", "p10.b", "eps.p10", "sig.p10",
          "psi.b", 
          "mean.phi", "phi.alpha", "eps.phi", "sig.phi", 
          "mean.gamma",  "gam.alpha", "eps.gam", "sig.gam", 
          "psi", "n.occ", 
          "wg", "wp", "wgtemp", "wptemp",
          "z")

# Inits
maxstates <- apply(datl$Y, c(1, 3), max, na.rm=T )
z.inits <- ifelse(maxstates>1, 1, 0)
z.inits[is.na(z.inits)] <- 0
Y.inits <- array(NA, dim=dim(dat.conv$Y))
Y.inits[is.na(dat.conv$Y)] <- 1

inits <- function()list ( 
  Y = Y.inits,
  z = z.inits,
  muZ= ifelse(z.inits>0, 0.1, 0.8), 
  p10 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.001, 0.1), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  p11 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.3, 0.7), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  b = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.5, 0.99), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  mean.psi=runif(1),
  mean.p11=runif(1),
  mean.p10=runif(1, 0.01, 0.1),
  mean.b=runif(1),
  b.b=c(runif(4, -5, 5)),
  p10.b=c(runif(3, -5, 5)),
  p11.b=runif(2, -5, 5),
  mean.gamma= runif(1),
  mean.phi=runif(1),
  phi.alpha= runif(6, -5, 5),
  gam.alpha= runif(9, -5, 5),
  sig.p10=runif(1),
  sig.p11=runif(1),
  sig.b=runif(1),
  sig.phi=runif(1),
  sig.gam=runif(1),
  eps.phi = runif((datl$nyear-1), -0.1, 0.1),
  eps.gam = runif((datl$nyear-1), -0.1, 0.1),
  eps.p10 = runif((datl$nyear), -0.1, 0.1),
  eps.p11 = runif((datl$nyear), -0.1, 0.1),
  eps.b = runif((datl$nyear), -0.1, 0.1),
  wp= rbinom(n=7, size=1, prob=1),
  wg= rbinom(n=7, size=1, prob=1),
  bp.sig=100,
  bg.sig=100,
  gamma= array(runif(datl$nsite*(datl$nyear-1), 0, 1), dim=c(datl$nsite,datl$nyear-1)),
  phi= array(runif(datl$nsite*(datl$nyear-1), 0, 1), dim=c(datl$nsite,datl$nyear-1)),
  psi= runif(datl$nyear, 0, 1),
  gam.est=runif(datl$nyear-1, 0, 1),
  phi.est=runif(datl$nyear-1, 0, 1),
  growthr=runif(datl$nyear-1, 0.95, 1.05),
  turnover=runif(datl$nyear-1, 0, 1), 
  wgtemp = rep(1, 10),
  wptemp = rep(1, 7)
) 
n.chains=6; n.thin=200; n.iter=600000; n.burnin=400000
# n.chains=3; n.thin=1; n.iter=5000; n.burnin=100 # trial runs

mod <- list()
mod<- nimbleModel(code, calculate=T, constants = datl[-1], 
                  data = list(Y=datl$Y), inits = inits())

out <- nimbleMCMC(
  model = mod,
  code = code,
  monitors = params,
  nchains = n.chains,
  thin = n.thin,
  niter = n.iter,
  nburnin = n.burnin,
  progressBar = T,
  summary = T,
  WAIC = F,
  samplesAsCodaMCMC = T,
  samples=T
)

#flnm <- paste(".\\outputs\\",m, "_", Sys.Date(), ".Rdata", sep="")
#save(out, mod, file=flnm)

5.3 Run Model with GVS (step 3)

m <- "06-misclass_GVS_step3"
library (nimble)
load(".\\data\\final-data.Rdata")
# load results from step 2 to use as pseudopriors
load(".\\outputs\\05-misclass_GVS_step2_2020-07-24.Rdata")
outg <- out
rm(list="out")

code <- nimbleCode(
  {
    ## PRIORS 
    mean.b ~ dunif(0, 1)
    b.b[1] <- logit(mean.b)
    mean.p10 ~dunif(0, 1)
    p10.b[1] <- logit(mean.p10)
    mean.p11 ~ dunif(0, 1)
    p11.b[1] <- logit(mean.p11)
    mean.phi ~ dunif(0, 1)
    phi.alpha[1] <- logit(mean.phi)
    mean.gamma ~ dunif(0, 1)
    gam.alpha[1] <- logit(mean.gamma)
    mean.psi ~ dunif(0, 1)
    psi.b <- logit(mean.psi)
    sig.p10 ~ T(dnorm(0,sd=10),0, )
    sig.p11 ~ T(dnorm(0,sd=10),0, )
    bp.sig <- 100
    bg.sig <- 100
    for (xx in 2:4) { b.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2:3) { p10.b[xx] ~ dnorm(0, sd=100) }
    for (xx in 2) { p11.b[xx] ~ dnorm(0, sd=100) }
    for (t in 1:(nyear-1)){ 
      eps.phi[t] ~ dnorm(0, sd=sig.phi) 
      eps.gam[t] ~ dnorm(0, sd=sig.gam) 
    } # t
    for (t in 1:nyear){ 
      eps.p10[t] ~ dnorm(0, sd=sig.p10) 
      eps.p11[t] ~ dnorm(0, sd=sig.p11)
      eps.b[t] ~ dnorm(0, sd=sig.b)} # t 
    
    # priors for the w model inclusion terms, phi.alpha
    # this ensures that each of the 8 model combos has equal probability: Pr(m)= 1/8
    wp[7] ~ dbern(0.5)
    wp[6] ~ dbern(0.23)
    wp[5] ~ dbern(0.23) 
    p.wp4 <- (1-wp[6])*0.5 + wp[6] 
    wp[4] ~ dbern(p.wp4)
    p.wp3 <- (1-wp[5])*0.5 + wp[5] 
    wp[3] ~ dbern(p.wp3)
    p.wp2 <- equals(wp[5]+wp[6],0)*0.5 + (1-equals(wp[5]+wp[6],0)) 
    wp[2] ~ dbern(p.wp2)
    wp[1] ~ dbern(1)
    wptemp[1] <- wp[1]
    
    # priors for the w model inclusion terms, gam.alpha
    # this ensures that each of the 8 model combos has equal probability: Pr(m)= 1/8
    wg[7] ~ dbern(0.5)
    wg[6] ~ dbern(0.23)
    wg[5] ~ dbern(0.23) 
    p.wg4 <- (1-wg[6])*0.5 + wg[6] 
    wg[4] ~ dbern(p.wg4)
    p.wg3 <- (1-wg[5])*0.5 + wg[5] 
    wg[3] ~ dbern(p.wg3)
    p.wg2 <- equals(wg[5]+wg[6],0)*0.5 + (1-equals(wg[5]+wg[6],0)) 
    wg[2] ~ dbern(p.wg2)
    wg[1] ~ dbern(1)
    wgtemp[1] <- wg[1]
    #   posg = c(1, 2, 2, 3, 4, 5, 6, 5, 6, 7), for reference
    
    # set up the vectors/matrices for beta estimation, abundance
    for(b1 in 2:n.betasp){
      wptemp[b1] <- wp[posp[b1]]                # this uses GVS
      # wptemp[b1] <- 1                          # this forces you to fit the full model (all terms included)
      mean.bp[b1] <- post.bp[b1]*(1-wptemp[b1])  # prior is either 0 or full-model posterior mean
      for(b2 in 2:n.betasp){                   # set up the precision matrix (inverse variance) # allows for betas to be multivariate, if desired
        sig.bp[b1,b2] <- equals(b1,b2)*sd.bp[b1]*(1-wptemp[b1]) + (wptemp[b1]*bp.sig)
      } # b2
      phi.alpha[b1] ~ T(dnorm(mean.bp[b1], sd=sig.bp[b1,b1]), -30, 30)   # all beta coefficients
    } # b1
    #wptemp[7] <- 1 
    wptemp[7] <- wp[7]
    mean.sigphi <- post.bp[7]*(1-wptemp[7]) 
    sd.sigphi <- sd.bp[7]*(1-wptemp[7]) + (wptemp[7]*bp.sigphi)
    sig.phi ~ T(dnorm(mean.sigphi, sd=sd.sigphi), 0, )
    bp.sigphi <- 10
    # set up the vectors/matrices for beta estimation, abundance
    for(b1 in 2:n.betasg){
      wgtemp[b1] <- wg[posg[b1]]                # this uses GVS
      # wgtemp[b1] <- 1                          # this forces you to fit the full model (all terms included)
      mean.bg[b1] <- post.bg[b1]*(1-wgtemp[b1])  # prior is either 0 or full-model posterior mean
      for(b2 in 2:n.betasg){                   # set up the precision matrix (inverse variance) # allows for betas to be multivariate, if desired
        sig.bg[b1,b2] <- equals(b1,b2)*sd.bg[b1]*(1-wgtemp[b1]) + (wgtemp[b1]*bg.sig)
      } # b2
      gam.alpha[b1] ~ T(dnorm(mean.bg[b1], sd=sig.bg[b1,b1]), -30, 30)   # all beta coefficients
    } # b1
    #wgtemp[10] <- 1 
    wgtemp[10] <- wg[7]
    mean.siggam <- post.bg[10]*(1-wgtemp[10]) 
    sd.siggam <- sd.bg[10]*(1-wgtemp[10]) + (wgtemp[10]*bg.siggam)
    sig.gam ~ T(dnorm(mean.siggam, sd=sd.siggam), 0, )
    bg.siggam <- 10
    ## LIKELIHOOD
    ## first year
    for(i in 1:nsite){
      logit(psi1[i,1]) <- psi.b
      z[i,1] ~ dbern(psi1[i,1])
      for (j in 1:nvisit){
        pi[i,j,1,1] <- z[i,1]*(1-p11[i,j,1]) + (1-z[i,1])*(1-p10[i,j,1])      
        pi[i,j,1,2] <- z[i,1]*(1-b[i,j,1])*p11[i,j,1] + (1-z[i,1])*p10[i,j,1]
        pi[i,j,1,3] <- z[i,1]*b[i,j,1]*p11[i,j,1]   
        Y[i,j,1] ~ dcat(pi[i,j,1,1:3])
      } #j
      
      # subsequent years
      for (t in 2:nyear){
        for (j in 1:nvisit){ 
          pi[i,j,t,1] <- z[i,t]*(1-p11[i,j,t]) + (1-z[i,t])*(1-p10[i,j,t])      
          pi[i,j,t,2] <- z[i,t]*(1-b[i,j,t])*p11[i,j,t] + (1-z[i,t])*p10[i,j,t]
          pi[i,j,t,3] <- z[i,t]*b[i,j,t]*p11[i,j,t]
          Y[i,j,t] ~ dcat(pi[i,j,t,1:3])
        } # j
        # dynamics
        z[i,t] ~ dbern(muZ[i,t])
        muZ[i,t]<- z[i,t-1]*phi[i,t-1] + (1-z[i,t-1])*gamma[i,t-1]
        logit(phi[i,t-1]) <- 
          wptemp[1] * phi.alpha[1] + 
          wptemp[2] * phi.alpha[2] * YSF.std[i,t, 6 ] + 
          wptemp[3] * phi.alpha[3] * sin(SEAS[i,t, 1 ]*2*3.1416) + 
          wptemp[4] * phi.alpha[4] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          wptemp[5] * phi.alpha[5] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 6 ] + 
          wptemp[6] * phi.alpha[6] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 6 ] +
          wptemp[7] * eps.phi[t-1]
        logit(gamma[i,t-1]) <- 
          wgtemp[1] * gam.alpha[1] + 
          wgtemp[2] * gam.alpha[2] * YSF.std[i,t, 5 ] + wgtemp[3] * gam.alpha[3] * YSF.std[i,t, 5 ]^2 +
          wgtemp[4] * gam.alpha[4] * sin(SEAS[i,t, 1 ]*2*3.1416) + 
          wgtemp[5] * gam.alpha[5] * cos(SEAS[i,t, 1 ]*2*3.1416) +
          wgtemp[6] * gam.alpha[6] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] +
          wgtemp[7] * gam.alpha[7] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] +
          wgtemp[8] * gam.alpha[8] * sin(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] * YSF.std[i,t, 5 ]^2 +
          wgtemp[9] * gam.alpha[9] * cos(SEAS[i,t, 1 ]*2*3.1416) * YSF.std[i,t, 5 ] * YSF.std[i,t, 5 ]^2 +
          wgtemp[10] * eps.gam[t-1]
        } # t nyear 
      
      for (t in 1:nyear){
        for (j in 1:nvisit){
          # detection models
          logit(b[i,j,t]) <-  b.b[1] + b.b[2]*date[i,j,t] + b.b[3]*date[i,j,t]^2+ b.b[4]*date[i,j,t]^3 + eps.b[t]
          logit(p11[i,j,t]) <- p11.b[1] + p11.b[2]*hr[i,j,t] + eps.p11[t]
          logit(p10[i,j,t]) <- p10.b[1]  + p10.b[2]*date[i,j,t] + p10.b[3]*date[i,j,t]^2 + eps.p10[t]
        } } }# t j i
    
    # Predicted occupancy values 
    psi[1] <- mean.psi
    n.occ[1] <- sum(z[1:nsite,1])
    for (t in 2:nyear){
      n.occ[t] <- sum(z[1:nsite,t])
      phi.est[t-1] <- mean(phi[1:nsite,t-1])
      gam.est[t-1] <- mean(gamma[1:nsite,t-1])
      psi[t] <- psi[t-1]*phi.est[t-1] + (1-psi[t-1])*gam.est[t-1]
      growthr[t-1] <- psi[t]/psi[t-1] 
      turnover[t-1] <- (1-psi[t-1]) * gam.est[t-1]/psi[t]
    } # t
  }
)

#  scale and center fire data
YSF.std <- array(NA, dim=dim(YSF))
  for (j in 1:6){
    YSF.std[,,j] <- (YSF[,,j]-mean(YSF[,,j], na.rm=T)) / sd(YSF[,,j])
  }

# function to extract samples from nimble output  
extr <- function (mod, var){
  var2 <- paste("^", var, sep="")
  col.ind <- grep(var2, colnames(outg$samples$chain1))
  mat1 <- as.matrix(outg$samples$chain1[,col.ind])
  mat2 <- as.matrix(outg$samples$chain2[,col.ind])
  mat3 <- as.matrix(outg$samples$chain3[,col.ind])
  mat4 <- as.matrix(outg$samples$chain4[,col.ind])
  mat5 <- as.matrix(outg$samples$chain5[,col.ind])
  mat6 <- as.matrix(outg$samples$chain6[,col.ind])
  all <- rbind(mat1, mat2, mat3, mat4, mat5, mat6) 
  print(colnames(all))
  return(all)
}

# extract samples
pa <- extr(outg, "phi.alpha")
pa.sig <- extr(outg, "sig.phi")
pg <- extr(outg, "gam.alpha")
pg.sig <- extr(outg, "sig.gam")
post.bp <-  c(apply(pa, 2, mean), mean(pa.sig))
sd.bp <-  c(apply(pa, 2, sd), sd(pa.sig))
post.bg <-  c(apply(pg, 2, mean), mean(pg.sig))
sd.bg <- c(apply(pg, 2, sd), sd(pg.sig))

# post.bp <-  c(rep(0, 6), 0)
# sd.bp <-  c(rep(100, 6), 10)
# post.bg <-  c(rep(0, 10), 0)
# sd.bg <-  c(rep(100, 10), 10)

datl <- list(
  Y=dat.conv$Y,
  date= dat.conv$date,
  hr= dat.conv$hr,
  nsite=dim(dat.conv$Y)[[1]],
  nvisit=dim(dat.conv$Y)[[2]],
  nyear=dim(dat.conv$Y)[[3]],
  YSF.std=YSF.std,
  SEAS=SEAS,
  nYSF= dim(YSF)[[3]],
  nSEAS= dim(SEAS)[[3]], 
  n.betasp = 6,
  n.betasg = 9,
  posp = c(1, 2, 3, 4, 5, 6), 
  posg = c(1, 2, 2, 3, 4, 5, 6, 5, 6, 7), # forces YSF and YSF2 to occur together
  post.bp = post.bp,
  sd.bp = sd.bp,
  post.bg = post.bg,
  sd.bg = sd.bg 
)

params<-c("mean.p11", "p11.b", "eps.p11", "sig.p11",
          "mean.b", "b.b", "eps.b", "sig.b",
          "mean.p10", "p10.b", "eps.p10", "sig.p10",
          "psi.b", 
          "mean.phi", "phi.alpha", "eps.phi", "sig.phi", "phi.est",
          "mean.gamma",  "gam.alpha", "eps.gam", "sig.gam", "gam.est",
          "psi", "n.occ", "growthr", "turnover", 
          "wg", "wp", "wgtemp", "wptemp",
          "z")

# Inits
maxstates <- apply(datl$Y, c(1, 3), max, na.rm=T )
z.inits <- ifelse(maxstates>1, 1, 0)
z.inits[is.na(z.inits)] <- 0
Y.inits <- array(NA, dim=dim(dat.conv$Y))
Y.inits[is.na(dat.conv$Y)] <- 1

inits <- function()list ( 
  Y = Y.inits,
  z = z.inits,
  muZ= ifelse(z.inits>0, 0.1, 0.8), 
  p10 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.001, 0.1), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  p11 = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.3, 0.7), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  b = array(runif(datl$nsite*datl$nvisit*datl$nyear, 0.5, 0.99), dim=c(datl$nsite,datl$nvisit,datl$nyear)),
  mean.psi=runif(1),
  mean.p11=runif(1),
  mean.p10=runif(1, 0.01, 0.1),
  mean.b=runif(1),
  b.b=c(runif(4, -5, 5)),
  p10.b=c(runif(3, -5, 5)),
  p11.b=runif(2, -5, 5),
  mean.gamma= runif(1),
  mean.phi=runif(1),
  phi.alpha= runif(6, -5, 5),
  gam.alpha= runif(9, -5, 5),
  sig.p10=runif(1),
  sig.p11=runif(1),
  sig.b=runif(1),
  sig.phi=runif(1),
  sig.gam=runif(1),
  eps.phi = runif((datl$nyear-1), -0.1, 0.1),
  eps.gam = runif((datl$nyear-1), -0.1, 0.1),
  eps.p10 = runif((datl$nyear), -0.1, 0.1),
  eps.p11 = runif((datl$nyear), -0.1, 0.1),
  eps.b = runif((datl$nyear), -0.1, 0.1),
  wp= rbinom(n=7, size=1, prob=1),
  wg= rbinom(n=7, size=1, prob=1),
  bp.sig=100,
  bg.sig=100,
  gamma= array(runif(datl$nsite*(datl$nyear-1), 0, 1), dim=c(datl$nsite,datl$nyear-1)),
  phi= array(runif(datl$nsite*(datl$nyear-1), 0, 1), dim=c(datl$nsite,datl$nyear-1)),
  psi= runif(datl$nyear, 0, 1),
  gam.est=runif(datl$nyear-1, 0, 1),
  phi.est=runif(datl$nyear-1, 0, 1),
  growthr=runif(datl$nyear-1, 0.95, 1.05),
  turnover=runif(datl$nyear-1, 0, 1), 
  wgtemp = rep(1, 10),
  wptemp = rep(1, 7)
) 
n.chains=6; n.thin=200; n.iter=600000; n.burnin=400000
# n.chains=3; n.thin=1; n.iter=5000; n.burnin=100 # trial runs

mod <- list()
mod<- nimbleModel(code, calculate=T, constants = datl[-1], 
                  data = list(Y=datl$Y), inits = inits())
  
  out <- nimbleMCMC(
    model = mod,
    code = code,
    monitors = params,
    nchains = n.chains,
    thin = n.thin,
    niter = n.iter,
    nburnin = n.burnin,
    progressBar = T,
    summary = T,
    WAIC = F,
    samplesAsCodaMCMC = T,
    samples=T
  )
  
# flnm <- paste(".\\outputs\\",m, "_", Sys.Date(), ".Rdata", sep="")
# save(out, mod, file=flnm)