From 3a2995c8a9544d2e0ab52a047ef5e70b8005be39 Mon Sep 17 00:00:00 2001 From: Frantisek Bartos Date: Sun, 17 Jul 2022 00:09:54 +0200 Subject: [PATCH] Fix down-weighting * fixing the weights parameterization --- DESCRIPTION | 2 +- NEWS.md | 6 +++++- R/fit-and-marglik.R | 10 +++++----- R/utilities.R | 1 + src/distributions/DWN.cc | 2 +- src/distributions/DWWN1.cc | 2 +- src/distributions/DWWN2.cc | 2 +- 7 files changed, 15 insertions(+), 10 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index be0a3d3a..2dcf1f42 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: RoBMA Title: Robust Bayesian Meta-Analyses -Version: 2.3.0 +Version: 2.3.1 Maintainer: František Bartoš Authors@R: c( person("František", "Bartoš", role = c("aut", "cre"), diff --git a/NEWS.md b/NEWS.md index 24bd059c..c8869eb4 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,6 +1,10 @@ +## version 2.3.1 +### Fixes +- fixing weighted meta-analysis parameterization + ## version 2.3 ### Features -- weighted meta-analysis by specifying `study_ids` argument in `RoBMA` and setting `weighted = TRUE`. The likelihood contribution of estimates from each study is down-weighted proportionally to the number of estimates in that study. Note that this experimental feature is supposed to provide a conservative alternative for estimating RoBMA in cases with multiple estimates from a study where the multivariate option is not computationally feasible. +- weighted meta-analysis by specifying `study_ids` argument in `RoBMA()` and setting `weighted = TRUE`. The likelihood contribution of estimates from each study is down-weighted proportionally to the number of estimates in that study. Note that this experimental feature is supposed to provide a conservative alternative for estimating RoBMA in cases with multiple estimates from a study where the multivariate option is not computationally feasible. ## version 2.2.3 ### Fixes diff --git a/R/fit-and-marglik.R b/R/fit-and-marglik.R index c3bea6cb..8067223f 100644 --- a/R/fit-and-marglik.R +++ b/R/fit-and-marglik.R @@ -133,7 +133,7 @@ marglik <- list() if(attr(model, "weighted")){ - marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE) + log(fit_data[["weight"]])) + marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE) * fit_data[["weight"]]) }else{ marglik$logml <- sum(stats::dnorm(fit_data[["y"]], priors$mu$parameters[["location"]], fit_data[["se"]], log = TRUE)) } @@ -445,11 +445,11 @@ # the individual studies if(!is.null(data[["weight"]])){ if(is.null(priors[["omega"]])){ - log_lik <- log_lik + sum(stats::dnorm(data[["y"]], mean = eff, sd = pop_sd, log = TRUE) + log(data[["weight"]])) + log_lik <- log_lik + sum(stats::dnorm(data[["y"]], mean = eff, sd = pop_sd, log = TRUE) * data[["weight"]]) }else if(priors[["omega"]]$distribution == "one.sided"){ - log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "one.sided", log = TRUE) + log(data[["weight"]])) + log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "one.sided", log = TRUE) * data[["weight"]]) }else if(priors[["omega"]]$distribution == "two.sided"){ - log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "two.sided", log = TRUE) + log(data[["weight"]])) + log_lik <- log_lik + sum(.dwnorm_fast(data[["y"]], mean = eff, sd = pop_sd, omega = omega, crit_x = t(data[["crit_y"]]), type = "two.sided", log = TRUE) * data[["weight"]]) } }else{ if(is.null(priors[["omega"]])){ @@ -654,7 +654,7 @@ # fill their weights for(i in seq_along(ids_weights$id)){ - weights[!is.na(data[,"study_ids"]) & data[,"study_ids"] == ids_weights$id[i]] <- 1/ids_weights$weight[ids_weights$id == ids_weights$id[i]] + weights[!is.na(data[,"study_ids"]) & data[,"study_ids"] == ids_weights$id[i]] <- ids_weights$weight[ids_weights$id == ids_weights$id[i]] } # assign all remaining studies weight 1 diff --git a/R/utilities.R b/R/utilities.R index cd8b452c..09cbf964 100644 --- a/R/utilities.R +++ b/R/utilities.R @@ -85,6 +85,7 @@ assign("max_cores", parallel::detectCores(logical = TRUE) - 1, envir = Ro "2.2.2" = c("0.2.3", "999.999.999"), "2.2.3" = c("0.2.3", "999.999.999"), "2.3.0" = c("0.2.3", "999.999.999"), + "2.3.1" = c("0.2.3", "999.999.999"), stop("New RoBMA version needs to be defined in '.check_BayesTools' function!") ) diff --git a/src/distributions/DWN.cc b/src/distributions/DWN.cc index 4aa3d38f..99ae6f27 100644 --- a/src/distributions/DWN.cc +++ b/src/distributions/DWN.cc @@ -47,7 +47,7 @@ double DWN::logDensity(double const *x, unsigned int length, PDFType type, double weight = *par[2]; // compute the nominator - double log_lik = dnorm(*x, mu, std::sqrt(var), true) + std::log(weight); + double log_lik = dnorm(*x, mu, std::sqrt(var), true) * weight; return log_lik; } diff --git a/src/distributions/DWWN1.cc b/src/distributions/DWWN1.cc index 7fd888d1..f3566620 100644 --- a/src/distributions/DWWN1.cc +++ b/src/distributions/DWWN1.cc @@ -108,7 +108,7 @@ double DWWN1::logDensity(double const *x, unsigned int length, PDFType type, } // compute the log likelihood - double log_lik = nom-std::log(denom)+std::log(weight); + double log_lik = (nom-std::log(denom)) * weight; return log_lik; } diff --git a/src/distributions/DWWN2.cc b/src/distributions/DWWN2.cc index b9d1b514..a27c52ec 100644 --- a/src/distributions/DWWN2.cc +++ b/src/distributions/DWWN2.cc @@ -120,7 +120,7 @@ double DWWN2::logDensity(double const *x, unsigned int length, PDFType type, } // compute the log likelihood - double log_lik = nom-std::log(denom)+std::log(weight); + double log_lik = (nom-std::log(denom)) * weight; return log_lik; }