Essec\Faculty\Model\Contribution {#2216
#_index: "academ_contributions"
#_id: "15761"
#_source: array:26 [
"id" => "15761"
"slug" => "15761-how-good-is-your-laplace-approximation-of-the-bayesian-posterior-finite-sample-computable-error-bounds-for-a-variety-of-useful-divergences"
"yearMonth" => "2025-05"
"year" => "2025"
"title" => "How good is your Laplace approximation of the Bayesian posterior? Finite-sample computable error bounds for a variety of useful divergences"
"description" => "KASPRZAK, M., GIORDANO, R. et BRODERICK, T. (2025). How good is your Laplace approximation of the Bayesian posterior? Finite-sample computable error bounds for a variety of useful divergences. <i>Journal of Machine Learning Research</i>, 26(87), pp. 1−81."
"authors" => array:3 [
0 => array:3 [
"name" => "KASPRZAK Mikolaj"
"bid" => "B00820408"
"slug" => "kasprzak-mikolaj"
]
1 => array:1 [
"name" => "GIORDANO Ryan"
]
2 => array:1 [
"name" => "BRODERICK Tamara"
]
]
"ouvrage" => ""
"keywords" => array:1 [
0 => "HorsECOGestion"
]
"updatedAt" => "2025-10-10 12:10:51"
"publicationUrl" => "https://jmlr.org/papers/v26/24-0619.html"
"publicationInfo" => array:3 [
"pages" => "1−81"
"volume" => "26"
"number" => "87"
]
"type" => array:2 [
"fr" => "Articles"
"en" => "Journal articles"
]
"support_type" => array:2 [
"fr" => "Revue scientifique"
"en" => "Scientific journal"
]
"countries" => array:2 [
"fr" => "États-Unis"
"en" => "United States of America"
]
"abstract" => array:2 [
"fr" => "The Laplace approximation is a popular method for constructing a Gaussian approximation to the Bayesian posterior and thereby approximating the posterior mean and variance. But approximation quality is a concern. One might consider using rate-of-convergence bounds from certain versions of the Bayesian Central Limit Theorem (BCLT) to provide quality guarantees. But existing bounds require assumptions that are unrealistic even for relatively simple real-life Bayesian analyses; more specifically, existing bounds either (1) require knowing the true data-generating parameter, (2) are asymptotic in the number of samples, (3) do not control the Bayesian posterior mean, or (4) require strongly log concave models to compute. In this work, we provide the first computable bounds on quality that simultaneously (1) do not require knowing the true parameter, (2) apply to finite samples, (3) control posterior means and variances, and (4) apply generally to models that satisfy the conditions of the asymptotic BCLT. Moreover, we substantially improve the dimension dependence of existing bounds; in fact, we achieve the lowest-order dimension dependence possible in the general case. We compute exact constants in our bounds for a variety of standard models, including logistic regression, and numerically demonstrate their utility. We provide a framework for analysis of more complex models."
"en" => "The Laplace approximation is a popular method for constructing a Gaussian approximation to the Bayesian posterior and thereby approximating the posterior mean and variance. But approximation quality is a concern. One might consider using rate-of-convergence bounds from certain versions of the Bayesian Central Limit Theorem (BCLT) to provide quality guarantees. But existing bounds require assumptions that are unrealistic even for relatively simple real-life Bayesian analyses; more specifically, existing bounds either (1) require knowing the true data-generating parameter, (2) are asymptotic in the number of samples, (3) do not control the Bayesian posterior mean, or (4) require strongly log concave models to compute. In this work, we provide the first computable bounds on quality that simultaneously (1) do not require knowing the true parameter, (2) apply to finite samples, (3) control posterior means and variances, and (4) apply generally to models that satisfy the conditions of the asymptotic BCLT. Moreover, we substantially improve the dimension dependence of existing bounds; in fact, we achieve the lowest-order dimension dependence possible in the general case. We compute exact constants in our bounds for a variety of standard models, including logistic regression, and numerically demonstrate their utility. We provide a framework for analysis of more complex models."
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-12-06T05:21:43.000Z"
"docTitle" => "How good is your Laplace approximation of the Bayesian posterior? Finite-sample computable error bounds for a variety of useful divergences"
"docSurtitle" => "Journal articles"
"authorNames" => "<a href="/cv/kasprzak-mikolaj">KASPRZAK Mikolaj</a>, GIORDANO Ryan, BRODERICK Tamara"
"docDescription" => "<span class="document-property-authors">KASPRZAK Mikolaj, GIORDANO Ryan, BRODERICK Tamara</span><br><span class="document-property-authors_fields">Information Systems, Data Analytics and Operations</span> | <span class="document-property-year">2025</span>"
"keywordList" => "<a href="#">HorsECOGestion</a>"
"docPreview" => "<b>How good is your Laplace approximation of the Bayesian posterior? Finite-sample computable error bounds for a variety of useful divergences</b><br><span>2025-05 | Journal articles </span>"
"docType" => "research"
"publicationLink" => "<a href="https://jmlr.org/papers/v26/24-0619.html" target="_blank">How good is your Laplace approximation of the Bayesian posterior? Finite-sample computable error bounds for a variety of useful divergences</a>"
]
+lang: "en"
+"_score": 8.714403
+"_ignored": array:2 [
0 => "abstract.en.keyword"
1 => "abstract.fr.keyword"
]
+"parent": null
}