Skip to content

Commit

Permalink
Fix down-weighting
Browse files Browse the repository at this point in the history
* fixing the weights parameterization
  • Loading branch information
FBartos committed Jul 16, 2022
1 parent d40f77b commit 3a2995c
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 10 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: RoBMA
Title: Robust Bayesian Meta-Analyses
Version: 2.3.0
Version: 2.3.1
Maintainer: František Bartoš <[email protected]>
Authors@R: c(
person("František", "Bartoš", role = c("aut", "cre"),
Expand Down
6 changes: 5 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
## version 2.3.1
### Fixes
- fixing weighted meta-analysis parameterization

## version 2.3
### Features
- weighted meta-analysis by specifying `study_ids` argument in `RoBMA` and setting `weighted = TRUE`. The likelihood contribution of estimates from each study is down-weighted proportionally to the number of estimates in that study. Note that this experimental feature is supposed to provide a conservative alternative for estimating RoBMA in cases with multiple estimates from a study where the multivariate option is not computationally feasible.
- weighted meta-analysis by specifying `study_ids` argument in `RoBMA()` and setting `weighted = TRUE`. The likelihood contribution of estimates from each study is down-weighted proportionally to the number of estimates in that study. Note that this experimental feature is supposed to provide a conservative alternative for estimating RoBMA in cases with multiple estimates from a study where the multivariate option is not computationally feasible.

## version 2.2.3
### Fixes
Expand Down
10 changes: 5 additions & 5 deletions R/fit-and-marglik.R
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@
marglik <- list()

if(attr(model, "weighted")){
marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE) + log(fit_data[["weight"]]))
marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE) * fit_data[["weight"]])
}else{
marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE))
}
Expand Down Expand Up @@ -445,11 +445,11 @@
# the individual studies
if(!is.null(data[["weight"]])){
if(is.null(priors[["omega"]])){
log_lik <- log_lik + sum(stats::dnorm(data[["y"]], mean = eff, sd = pop_sd, log = TRUE) + log(data[["weight"]]))
log_lik <- log_lik + sum(stats::dnorm(data[["y"]], mean = eff, sd = pop_sd, log = TRUE) * data[["weight"]])
}else if(priors[["omega"]]$distribution == "one.sided"){
log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "one.sided", log = TRUE) + log(data[["weight"]]))
log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "one.sided", log = TRUE) * data[["weight"]])
}else if(priors[["omega"]]$distribution == "two.sided"){
log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "two.sided", log = TRUE) + log(data[["weight"]]))
log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "two.sided", log = TRUE) * data[["weight"]])
}
}else{
if(is.null(priors[["omega"]])){
Expand Down Expand Up @@ -654,7 +654,7 @@

# fill their weights
for(i in seq_along(ids_weights$id)){
weights[!is.na(data[,"study_ids"]) & data[,"study_ids"] == ids_weights$id[i]] <- 1/ids_weights$weight[ids_weights$id == ids_weights$id[i]]
weights[!is.na(data[,"study_ids"]) & data[,"study_ids"] == ids_weights$id[i]] <- ids_weights$weight[ids_weights$id == ids_weights$id[i]]
}

# assign all remaining studies weight 1
Expand Down
1 change: 1 addition & 0 deletions R/utilities.R
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ assign("max_cores", parallel::detectCores(logical = TRUE) - 1, envir = Ro
"2.2.2" = c("0.2.3", "999.999.999"),
"2.2.3" = c("0.2.3", "999.999.999"),
"2.3.0" = c("0.2.3", "999.999.999"),
"2.3.1" = c("0.2.3", "999.999.999"),
stop("New RoBMA version needs to be defined in '.check_BayesTools' function!")
)

Expand Down
2 changes: 1 addition & 1 deletion src/distributions/DWN.cc
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ double DWN::logDensity(double const *x, unsigned int length, PDFType type,
double weight = *par[2];

// compute the nominator
double log_lik = dnorm(*x, mu, std::sqrt(var), true) + std::log(weight);
double log_lik = dnorm(*x, mu, std::sqrt(var), true) * weight;

return log_lik;
}
Expand Down
2 changes: 1 addition & 1 deletion src/distributions/DWWN1.cc
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ double DWWN1::logDensity(double const *x, unsigned int length, PDFType type,
}

// compute the log likelihood
double log_lik = nom-std::log(denom)+std::log(weight);
double log_lik = (nom-std::log(denom)) * weight;

return log_lik;
}
Expand Down
2 changes: 1 addition & 1 deletion src/distributions/DWWN2.cc
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ double DWWN2::logDensity(double const *x, unsigned int length, PDFType type,
}

// compute the log likelihood
double log_lik = nom-std::log(denom)+std::log(weight);
double log_lik = (nom-std::log(denom)) * weight;

return log_lik;
}
Expand Down

0 comments on commit 3a2995c

Please sign in to comment.