# library(readxl)
# coral_health <- read_excel("Ike_Wai_Research_Program_2019.xlsx",
# sheet = "Compiled_Mosaic_Coral_Health_Da")
coral_health <- read_csv("coral_health.csv")
## Parsed with column specification:
## cols(
## .default = col_double(),
## OBS_DATE = col_datetime(format = ""),
## MONTH_YEAR = col_character(),
## Site = col_character(),
## Quad_ID = col_character(),
## RECORDER = col_character(),
## SP_CODE = col_character(),
## M_CODE = col_character(),
## C_Condition_1 = col_character(),
## C_Condition_2 = col_character(),
## C_Condition_3 = col_character(),
## C_Condition_4 = col_character(),
## `1_User` = col_character(),
## `1_Species` = col_character(),
## `1_Morphology` = col_character(),
## `1_C_Cond_1` = col_character(),
## `1_C_Cond_2` = col_character(),
## `1_C_Cond_3` = col_character(),
## `1_C_Cond_4` = col_character(),
## `2_User` = col_character(),
## `2_Species` = col_character()
## # ... with 33 more columns
## )
## See spec(...) for full column specifications.
Helper functions to summarize the raw data
##' Count the samples where scuba and digital approach (dis)agree
summarize_condition <- function(cond_name, data){
## pipeline to check if cond_name is found in a data.frame
has_condition <- . %>% equals(cond_name) %>% replace_na(FALSE) %>% apply(1, any) %>% as.numeric
## check if cond_name is found in the scuba or digital columns
scuba <- data %>% select(contains("_Condition_")) %>% has_condition
dig <- data %>% select(matches("1_[CP]_Cond_[0-9]+")) %>% has_condition
#calculate ++,+-,-+,-- occurences of condition (scuba,digital)
c("++"= sum(scuba & dig),
"+-"= sum(scuba & !dig),
"-+"= sum(!scuba & dig),
"--"= sum(!scuba & !dig))
}
summarize_majority <- function(cond_name, data){
## pipeline to check if cond_name is found in a data.frame
has_condition <- . %>% equals(cond_name) %>% replace_na(FALSE) %>% apply(1, any) %>% as.numeric
## check if cond_name is found in the scuba columns
scuba <- data %>% select(contains("_Condition_")) %>% has_condition
## is cond_name in a majority of digital user's columns?
dig <- sapply(paste(1:3, "_[CP]_Cond_[0-9]+", sep=""),
. %>% {select(data, matches(.))} %>% has_condition) %>%
apply(1,mean) %>% is_greater_than(0.5)
#calculate ++,+-,-+,-- occurences of condition (scuba,digital)
c("++"= sum(scuba & dig),
"+-"= sum(scuba & !dig),
"-+"= sum(!scuba & dig),
"--"= sum(!scuba & !dig))
}
coral_health %>% select(contains("_Condition_"), matches("_[CP]_Cond_[0-9]+")) %>% unlist %>% unique
## [1] "BLE" "PRS" NA "TIN" "MACA" "PTR" "TLS" "PRED" "DIS" "SGA"
## [11] "SEDI" "DAMA"
disease_names <- c("BLE", "PRS", "TIN", "MACA", "PTR", "TLS", "PRED", "DIS", "SGA", "SEDI", "DAMA")
single_digital <- sapply(disease_names, summarize_condition, coral_health)
single_digital
## BLE PRS TIN MACA PTR TLS PRED DIS SGA SEDI DAMA
## ++ 53 21 15 38 0 7 6 0 0 1 0
## +- 29 24 17 36 4 16 18 8 12 0 1
## -+ 15 31 14 25 2 29 7 7 2 6 1
## -- 94 115 145 92 185 139 160 176 177 184 189
majority_digital <- sapply(disease_names, summarize_majority, coral_health)
majority_digital
## BLE PRS TIN MACA PTR TLS PRED DIS SGA SEDI DAMA
## ++ 56 26 11 41 0 5 5 0 0 1 0
## +- 26 19 21 33 4 18 19 8 12 0 1
## -+ 9 27 14 21 0 17 7 3 0 6 0
## -- 100 119 145 96 187 151 160 180 179 184 190
pie_priors <- rbind(ble=c(8,92),
prs=c(19,31),
tin=c(10,40),
maca=c(32,68),
ptr=c(5,95),
tls=c(3,47),
pred=c(15,35),
dis=c(1,24),
sga=c(19,81),
sedi=c(1,24),
dama=c(1,24))
hui_model <- stan_model("hui_walter.stan")
stan_data_single <- list(P=length(disease_names),
y=single_digital,
pie_priors=pie_priors,
eta_prior=c(20,10),
theta_prior=c(9,1))
single_posterior <- sampling(hui_model, stan_data_single, chains=4, iter=10000)
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 4.3e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.43 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 1: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.79729 seconds (Warm-up)
## Chain 1: 1.11787 seconds (Sampling)
## Chain 1: 1.91516 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 4.4e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.44 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 2: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.848683 seconds (Warm-up)
## Chain 2: 0.737738 seconds (Sampling)
## Chain 2: 1.58642 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 2.1e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.21 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 3: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.798715 seconds (Warm-up)
## Chain 3: 0.727499 seconds (Sampling)
## Chain 3: 1.52621 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 2.1e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.21 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 4: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.799265 seconds (Warm-up)
## Chain 4: 0.847921 seconds (Sampling)
## Chain 4: 1.64719 seconds (Total)
## Chain 4:
print(single_posterior, pars=c("etas","etad", "thetas", "thetad"))
## Inference for Stan model: hui_walter.
## 4 chains, each with iter=10000; warmup=5000; thin=1;
## post-warmup draws per chain=5000, total post-warmup draws=20000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## etas 0.65 0 0.03 0.58 0.62 0.64 0.67 0.71 15280 1
## etad 0.58 0 0.03 0.51 0.55 0.58 0.60 0.64 15921 1
## thetas 0.98 0 0.01 0.96 0.97 0.98 0.98 0.99 13367 1
## thetad 0.97 0 0.01 0.96 0.97 0.97 0.98 0.99 14467 1
##
## Samples were drawn using NUTS(diag_e) at Mon Oct 7 13:40:04 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
traceplot(single_posterior, pars=c("etas","etad", "thetas", "thetad"))
plot(single_posterior, pars=c("etas","etad", "thetas", "thetad"), ci_level=0.5, outer_level=0.9)
## ci_level: 0.5 (50% intervals)
## outer_level: 0.9 (90% intervals)
Majority-rule diagnosis
stan_data_majority <- list(P=length(disease_names),
y=majority_digital,
pie_priors=pie_priors,
eta_prior=c(20,10),
theta_prior=c(9,1))
majority_posterior <- sampling(hui_model, stan_data_majority, chains=4, iter=10000)
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 2.3e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.23 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 1: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.786374 seconds (Warm-up)
## Chain 1: 1.08301 seconds (Sampling)
## Chain 1: 1.86939 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 2.3e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.23 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 2: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.847178 seconds (Warm-up)
## Chain 2: 0.919403 seconds (Sampling)
## Chain 2: 1.76658 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 2.2e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.22 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 3: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.853009 seconds (Warm-up)
## Chain 3: 1.06872 seconds (Sampling)
## Chain 3: 1.92172 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'hui_walter' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 2.1e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.21 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 4: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.758138 seconds (Warm-up)
## Chain 4: 0.950869 seconds (Sampling)
## Chain 4: 1.70901 seconds (Total)
## Chain 4:
print(majority_posterior,pars=c("etas","etad", "thetas", "thetad"))
## Inference for Stan model: hui_walter.
## 4 chains, each with iter=10000; warmup=5000; thin=1;
## post-warmup draws per chain=5000, total post-warmup draws=20000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## etas 0.67 0 0.03 0.61 0.65 0.67 0.69 0.73 15111 1
## etad 0.59 0 0.03 0.53 0.57 0.59 0.61 0.66 14938 1
## thetas 0.97 0 0.01 0.96 0.97 0.97 0.98 0.99 13418 1
## thetad 0.99 0 0.01 0.98 0.98 0.99 0.99 1.00 13991 1
##
## Samples were drawn using NUTS(diag_e) at Mon Oct 7 13:40:21 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
traceplot(majority_posterior, pars=c("etas","etad", "thetas", "thetad"))
plot(majority_posterior, pars=c("etas","etad", "thetas", "thetad"), ci_level=0.5, outer_level=0.9)
## ci_level: 0.5 (50% intervals)
## outer_level: 0.9 (90% intervals)
hierarchical_model <- stan_model("hierarchical_hw.stan")
stan_data_majority$precision_mean = 75
stan_data_majority$precision_sd = 10
hierarchical_posterior <- sampling(hierarchical_model, stan_data_majority, chains=4, iter=10000)
##
## SAMPLING FOR MODEL 'hierarchical_hw' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 5.6e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.56 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 1: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 3.15472 seconds (Warm-up)
## Chain 1: 2.27416 seconds (Sampling)
## Chain 1: 5.42888 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'hierarchical_hw' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 3.5e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.35 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 2: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 4.42933 seconds (Warm-up)
## Chain 2: 4.764 seconds (Sampling)
## Chain 2: 9.19333 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'hierarchical_hw' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 3.4e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.34 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 3: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 3.52561 seconds (Warm-up)
## Chain 3: 2.44994 seconds (Sampling)
## Chain 3: 5.97555 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'hierarchical_hw' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 3.9e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.39 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Warmup)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Warmup)
## Chain 4: Iteration: 5001 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 5.86138 seconds (Warm-up)
## Chain 4: 2.57839 seconds (Sampling)
## Chain 4: 8.43977 seconds (Total)
## Chain 4:
print(hierarchical_posterior, pars=c("etas","etad", "thetas", "thetad"))
## Inference for Stan model: hierarchical_hw.
## 4 chains, each with iter=10000; warmup=5000; thin=1;
## post-warmup draws per chain=5000, total post-warmup draws=20000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## etas[1] 0.79 0 0.05 0.69 0.76 0.79 0.82 0.88 3713 1
## etas[2] 0.63 0 0.06 0.51 0.58 0.62 0.67 0.76 3323 1
## etas[3] 0.66 0 0.07 0.52 0.61 0.66 0.71 0.81 3281 1
## etas[4] 0.71 0 0.05 0.61 0.67 0.71 0.75 0.82 3339 1
## etas[5] 0.69 0 0.07 0.54 0.64 0.69 0.74 0.82 3955 1
## etas[6] 0.68 0 0.08 0.52 0.62 0.68 0.73 0.82 3204 1
## etas[7] 0.66 0 0.07 0.52 0.62 0.67 0.71 0.80 3706 1
## etas[8] 0.68 0 0.07 0.53 0.63 0.69 0.74 0.82 3603 1
## etas[9] 0.69 0 0.07 0.54 0.64 0.69 0.74 0.83 4053 1
## etas[10] 0.68 0 0.07 0.53 0.63 0.68 0.73 0.82 3660 1
## etas[11] 0.69 0 0.07 0.54 0.64 0.69 0.74 0.83 3848 1
## etad[1] 0.68 0 0.06 0.58 0.65 0.68 0.72 0.80 4223 1
## etad[2] 0.62 0 0.06 0.51 0.58 0.62 0.66 0.73 4781 1
## etad[3] 0.57 0 0.07 0.43 0.52 0.57 0.62 0.72 3911 1
## etad[4] 0.62 0 0.06 0.51 0.58 0.61 0.65 0.73 4209 1
## etad[5] 0.58 0 0.08 0.42 0.52 0.58 0.63 0.73 4434 1
## etad[6] 0.58 0 0.08 0.42 0.53 0.58 0.64 0.74 3992 1
## etad[7] 0.53 0 0.08 0.38 0.48 0.53 0.58 0.69 3817 1
## etad[8] 0.59 0 0.08 0.43 0.53 0.59 0.64 0.74 4392 1
## etad[9] 0.52 0 0.08 0.36 0.47 0.52 0.58 0.69 4058 1
## etad[10] 0.60 0 0.08 0.44 0.55 0.60 0.65 0.75 4570 1
## etad[11] 0.59 0 0.08 0.44 0.54 0.59 0.64 0.74 4619 1
## thetas[1] 0.93 0 0.03 0.86 0.91 0.93 0.96 0.99 5552 1
## thetas[2] 0.97 0 0.02 0.92 0.96 0.97 0.98 1.00 6795 1
## thetas[3] 0.95 0 0.02 0.90 0.94 0.95 0.97 0.99 5687 1
## thetas[4] 0.94 0 0.03 0.86 0.92 0.95 0.97 0.99 5213 1
## thetas[5] 0.98 0 0.01 0.96 0.97 0.98 0.99 1.00 11166 1
## thetas[6] 0.94 0 0.02 0.89 0.92 0.94 0.95 0.98 8502 1
## thetas[7] 0.96 0 0.02 0.91 0.95 0.96 0.98 1.00 5376 1
## thetas[8] 0.96 0 0.01 0.93 0.95 0.96 0.97 0.98 12924 1
## thetas[9] 0.97 0 0.02 0.93 0.96 0.97 0.98 0.99 6975 1
## thetas[10] 0.99 0 0.01 0.97 0.99 0.99 1.00 1.00 11065 1
## thetas[11] 0.99 0 0.01 0.97 0.98 0.99 0.99 1.00 10938 1
## thetad[1] 0.98 0 0.02 0.94 0.97 0.98 0.99 1.00 4838 1
## thetad[2] 0.97 0 0.03 0.89 0.95 0.97 0.99 1.00 3636 1
## thetad[3] 0.97 0 0.02 0.93 0.96 0.97 0.99 1.00 4202 1
## thetad[4] 0.97 0 0.03 0.90 0.95 0.97 0.99 1.00 3819 1
## thetad[5] 0.99 0 0.01 0.98 0.99 1.00 1.00 1.00 7864 1
## thetad[6] 0.94 0 0.02 0.90 0.93 0.94 0.95 0.98 5025 1
## thetad[7] 0.99 0 0.01 0.96 0.98 0.99 1.00 1.00 5215 1
## thetad[8] 0.99 0 0.01 0.96 0.98 0.99 0.99 1.00 8612 1
## thetad[9] 0.99 0 0.01 0.98 0.99 1.00 1.00 1.00 7834 1
## thetad[10] 0.97 0 0.01 0.95 0.97 0.97 0.98 0.99 9845 1
## thetad[11] 0.99 0 0.01 0.98 0.99 1.00 1.00 1.00 8470 1
##
## Samples were drawn using NUTS(diag_e) at Mon Oct 7 13:41:46 2019.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
traceplot(hierarchical_posterior, pars=c("etas"))
traceplot(hierarchical_posterior, pars=c("thetas"))
traceplot(hierarchical_posterior, pars=c("etad"))
traceplot(hierarchical_posterior, pars=c("thetad"))
plot(hierarchical_posterior, pars=c("etas","etad"))
## ci_level: 0.8 (80% intervals)
## outer_level: 0.95 (95% intervals)
plot(hierarchical_posterior, pars=c("thetas","thetad"))
## ci_level: 0.8 (80% intervals)
## outer_level: 0.95 (95% intervals)
save(coral_health, pie_priors, single_digital, majority_digital, file="coral_health.Rdata")
save(single_posterior, majority_posterior, hierarchical_posterior, file="posterior_samples.Rdata")