Essec\Faculty\Model\Profile {#2216
#_id: "B00812202"
#_source: array:40 [
"bid" => "B00812202"
"academId" => "33210"
"slug" => "daudel-kamelia"
"fullName" => "Kamélia DAUDEL"
"lastName" => "DAUDEL"
"firstName" => "Kamélia"
"title" => array:2 [
"fr" => "Professeur assistant"
"en" => "Assistant Professor"
]
"email" => "kamelia.daudel@essec.edu"
"status" => "ACTIF"
"campus" => "Campus de Cergy"
"departments" => []
"phone" => ""
"sites" => []
"facNumber" => "33210"
"externalCvUrl" => "https://faculty.essec.edu/en/cv/daudel-kamelia/pdf"
"googleScholarUrl" => "https://scholar.google.com/citations?hl=en&user=q1xj2FgAAAAJ"
"facOrcId" => "https://orcid.org/0009-0006-3232-9240"
"career" => array:2 [
0 => Essec\Faculty\Model\CareerItem {#2219
#_index: null
#_id: null
#_source: array:7 [
"startDate" => "2023-09-01"
"endDate" => null
"isInternalPosition" => true
"type" => array:2 [
"fr" => "Positions académiques principales"
"en" => "Full-time academic appointments"
]
"label" => array:2 [
"fr" => "Professeur assistant"
"en" => "Assistant Professor"
]
"institution" => array:2 [
"fr" => "ESSEC Business School"
"en" => "ESSEC Business School"
]
"country" => array:2 [
"fr" => "France"
"en" => "France"
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
1 => Essec\Faculty\Model\CareerItem {#2222
#_index: null
#_id: null
#_source: array:7 [
"startDate" => "2021-09-01"
"endDate" => "2022-12-31"
"isInternalPosition" => true
"type" => array:2 [
"fr" => "Positions académiques principales"
"en" => "Full-time academic appointments"
]
"label" => array:2 [
"fr" => "Post-Doctorante"
"en" => "Post-Doctorate"
]
"institution" => array:2 [
"fr" => "University of Oxford"
"en" => "University of Oxford"
]
"country" => array:2 [
"fr" => "Royaume-Uni"
"en" => "United Kingdom"
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
]
"diplomes" => array:3 [
0 => Essec\Faculty\Model\Diplome {#2218
#_index: null
#_id: null
#_source: array:6 [
"diplome" => "DIPLOMA"
"type" => array:2 [
"fr" => "Diplômes"
"en" => "Diplomas"
]
"year" => "2021"
"label" => array:2 [
"en" => "PhD in Applied Mathematics"
"fr" => "Doctorat en Mathématiques Appliquées"
]
"institution" => array:2 [
"fr" => "Télécom Paris"
"en" => "Télécom Paris"
]
"country" => array:2 [
"fr" => "France"
"en" => "France"
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
1 => Essec\Faculty\Model\Diplome {#2220
#_index: null
#_id: null
#_source: array:6 [
"diplome" => "DIPLOMA"
"type" => array:2 [
"fr" => "Diplômes"
"en" => "Diplomas"
]
"year" => "2018"
"label" => array:2 [
"en" => "MSc in Mathematical and Computational Finance"
"fr" => "MSc in Mathematical and Computational Finance"
]
"institution" => array:2 [
"fr" => "University of Oxford"
"en" => "University of Oxford"
]
"country" => array:2 [
"fr" => "Royaume-Uni"
"en" => "United Kingdom"
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
2 => Essec\Faculty\Model\Diplome {#2217
#_index: null
#_id: null
#_source: array:6 [
"diplome" => "DIPLOMA"
"type" => array:2 [
"fr" => "Diplômes"
"en" => "Diplomas"
]
"year" => "2018"
"label" => array:2 [
"en" => "Diplôme d’Ingénieur"
"fr" => "Diplôme d’Ingénieur"
]
"institution" => array:2 [
"fr" => "Télécom Paris"
"en" => "Télécom Paris"
]
"country" => array:2 [
"fr" => "France"
"en" => "France"
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
]
"bio" => array:2 [
"fr" => null
"en" => null
]
"department" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"industrrySectors" => array:2 [
"fr" => null
"en" => null
]
"researchFields" => array:2 [
"fr" => "Méthodes d'inférence variationnelle - Inférence approximative"
"en" => "Variational inference methods - Approximate inference"
]
"teachingFields" => array:2 [
"fr" => null
"en" => null
]
"distinctions" => array:1 [
0 => Essec\Faculty\Model\Distinction {#2223
#_index: null
#_id: null
#_source: array:6 [
"date" => "2022-07-01"
"label" => array:2 [
"fr" => "Premier Prix de Thèse 2022 de l'Institut Polytechnique de Paris"
"en" => "First prize of Institut Polytechnique de Paris Best Thesis Award 2022"
]
"type" => array:2 [
"fr" => "Prix"
"en" => "Awards"
]
"tri" => " 1 "
"institution" => array:2 [
"fr" => null
"en" => null
]
"country" => array:2 [
"fr" => null
"en" => null
]
]
+lang: "en"
+"parent": Essec\Faculty\Model\Profile {#2216}
}
]
"teaching" => array:2 [
0 => Essec\Faculty\Model\TeachingItem {#2221
#_index: null
#_id: null
#_source: array:7 [
"startDate" => "2023"
"endDate" => null
"program" => "Master in Data science and Business analytics"
"label" => array:2 [
"fr" => "Statistical Inference"
"en" => "Statistical Inference"
]
"type" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"institution" => array:2 [
"fr" => "ESSEC Business School"
"en" => "ESSEC Business School"
]
"country" => array:2 [
"fr" => "France"
"en" => "France"
]
]
+lang: "en"
}
1 => Essec\Faculty\Model\TeachingItem {#2215
#_index: null
#_id: null
#_source: array:7 [
"startDate" => "2023"
"endDate" => null
"program" => "Grande Ecole - Master in Management"
"label" => array:2 [
"fr" => "Business Statistics & Introduction to Analytics"
"en" => "Business Statistics & Introduction to Analytics"
]
"type" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"institution" => array:2 [
"fr" => "ESSEC Business School"
"en" => "ESSEC Business School"
]
"country" => array:2 [
"fr" => "France"
"en" => "France"
]
]
+lang: "en"
}
]
"otherActivities" => []
"theses" => []
"sitePerso" => "https://kdaudel.github.io/"
"indexedAt" => "2025-06-15T20:21:22.000Z"
"contributions" => array:8 [
0 => Essec\Faculty\Model\Contribution {#2225
#_index: "academ_contributions"
#_id: "14316"
#_source: array:18 [
"id" => "14316"
"slug" => "14316-infinite-dimensional-gradient-based-descent-for-alpha-divergence-minimisation"
"yearMonth" => "2021-08"
"year" => "2021"
"title" => "Infinite-dimensional gradient-based descent for alpha-divergence minimisation"
"description" => "DAUDEL, K., DOUC, R. et PORTIER, F. (2021). Infinite-dimensional gradient-based descent for alpha-divergence minimisation. <i>Annals of Statistics</i>, 49(4), pp. 2250 - 2270."
"authors" => array:3 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "DOUC Randal"
]
2 => array:1 [
"name" => "PORTIER François"
]
]
"ouvrage" => ""
"keywords" => []
"updatedAt" => "2023-09-12 15:34:27"
"publicationUrl" => "https://projecteuclid.org/journals/annals-of-statistics/volume-49/issue-4/Infinite-dimensional-gradient-based-descent-for-alpha-divergence-minimisation/10.1214/20-AOS2035.short"
"publicationInfo" => array:3 [
"pages" => "2250 - 2270"
"volume" => "49"
"number" => "4"
]
"type" => array:2 [
"fr" => "Articles"
"en" => "Journal articles"
]
"support_type" => array:2 [
"fr" => "Revue scientifique"
"en" => "Scientific journal"
]
"countries" => array:2 [
"fr" => null
"en" => null
]
"abstract" => array:2 [
"fr" => "We demonstrate empirically on both toy and real-world examples the benefit of using the Power Descent and going beyond the Entropic Mirror Descent framework, which fails as the dimension grows."
"en" => "We demonstrate empirically on both toy and real-world examples the benefit of using the Power Descent and going beyond the Entropic Mirror Descent framework, which fails as the dimension grows."
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
1 => Essec\Faculty\Model\Contribution {#2227
#_index: "academ_contributions"
#_id: "14317"
#_source: array:18 [
"id" => "14317"
"slug" => "14317-monotonic-alpha-divergence-minimisation-for-variational-inference"
"yearMonth" => "2023-01"
"year" => "2023"
"title" => "Monotonic Alpha-divergence Minimisation for Variational Inference"
"description" => "DAUDEL, K., DOUC, R. et ROUEFF, F. (2023). Monotonic Alpha-divergence Minimisation for Variational Inference. <i>Journal of Machine Learning Research</i>, 24(62), pp. 1-76."
"authors" => array:3 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "DOUC Randal"
]
2 => array:1 [
"name" => "ROUEFF François"
]
]
"ouvrage" => ""
"keywords" => []
"updatedAt" => "2024-10-31 13:51:19"
"publicationUrl" => "http://jmlr.org/papers/v24/21-0249.html"
"publicationInfo" => array:3 [
"pages" => "1-76"
"volume" => "24"
"number" => "62"
]
"type" => array:2 [
"fr" => "Articles"
"en" => "Journal articles"
]
"support_type" => array:2 [
"fr" => "Revue scientifique"
"en" => "Scientific journal"
]
"countries" => array:2 [
"fr" => "États-Unis"
"en" => "United States of America"
]
"abstract" => array:2 [
"fr" => """
In this paper, we introduce a novel family of iterative algorithms which carry out α\n
-divergence minimisation in a Variational Inference context. They do so by ensuring a systematic decrease at each step in the α\n
-divergence between the variational and the posterior distributions. In its most general form, the variational distribution is a mixture model and our framework allows us to simultaneously optimise the weights and components parameters of this mixture model. Our approach permits us to build on various methods previously proposed for α\n
-divergence minimisation such as Gradient or Power Descent schemes and we also shed a new light on an integrated Expectation Maximization algorithm. Lastly, we provide empirical evidence that our methodology yields improved results on several multimodal target distributions and on a real data example.
"""
"en" => """
In this paper, we introduce a novel family of iterative algorithms which carry out α\n
-divergence minimisation in a Variational Inference context. They do so by ensuring a systematic decrease at each step in the α\n
-divergence between the variational and the posterior distributions. In its most general form, the variational distribution is a mixture model and our framework allows us to simultaneously optimise the weights and components parameters of this mixture model. Our approach permits us to build on various methods previously proposed for α\n
-divergence minimisation such as Gradient or Power Descent schemes and we also shed a new light on an integrated Expectation Maximization algorithm. Lastly, we provide empirical evidence that our methodology yields improved results on several multimodal target distributions and on a real data example.
"""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
2 => Essec\Faculty\Model\Contribution {#2229
#_index: "academ_contributions"
#_id: "14318"
#_source: array:18 [
"id" => "14318"
"slug" => "14318-alpha-divergence-variational-inference-meets-importance-weighted-auto-encoders-methodology-and-asymptotics"
"yearMonth" => "2023-08"
"year" => "2023"
"title" => "Alpha-divergence Variational Inference Meets Importance Weighted Auto-Encoders: Methodology and Asymptotics"
"description" => "DAUDEL, K., BENTON, J., SHI, Y. et DOUCET, A. (2023). Alpha-divergence Variational Inference Meets Importance Weighted Auto-Encoders: Methodology and Asymptotics. <i>Journal of Machine Learning Research</i>, 24(243), pp. 1-83."
"authors" => array:4 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "BENTON Joe"
]
2 => array:1 [
"name" => "SHI Yuyang"
]
3 => array:1 [
"name" => "DOUCET Arnaud"
]
]
"ouvrage" => ""
"keywords" => []
"updatedAt" => "2023-09-12 15:58:22"
"publicationUrl" => "https://www.jmlr.org/papers/volume24/22-1160/22-1160.pdf"
"publicationInfo" => array:3 [
"pages" => "1-83"
"volume" => "24"
"number" => "243"
]
"type" => array:2 [
"fr" => "Articles"
"en" => "Journal articles"
]
"support_type" => array:2 [
"fr" => "Revue scientifique"
"en" => "Scientific journal"
]
"countries" => array:2 [
"fr" => "États-Unis"
"en" => "United States of America"
]
"abstract" => array:2 [
"fr" => ""
"en" => ""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
3 => Essec\Faculty\Model\Contribution {#2226
#_index: "academ_contributions"
#_id: "14319"
#_source: array:18 [
"id" => "14319"
"slug" => "14319-mixture-weights-optimisation-for-alpha-divergence-variational-inference"
"yearMonth" => "2021-12"
"year" => "2021"
"title" => "Mixture weights optimisation for Alpha-Divergence Variational Inference"
"description" => "DAUDEL, K. et DOUC, R. (2021). Mixture weights optimisation for Alpha-Divergence Variational Inference. Dans: <i>35th Conference on Neural Information Processing Systems (NeurIPS 2021)</i>. Curran Associates, Inc. pp. 4397–4408."
"authors" => array:2 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "DOUC Randal"
]
]
"ouvrage" => "35th Conference on Neural Information Processing Systems (NeurIPS 2021)"
"keywords" => []
"updatedAt" => "2023-09-12 01:00:39"
"publicationUrl" => "https://proceedings.neurips.cc/paper_files/paper/2021/file/233f1dd0f3f537bcb7a338ea74d63483-Paper.pdf"
"publicationInfo" => array:3 [
"pages" => "4397–4408"
"volume" => ""
"number" => ""
]
"type" => array:2 [
"fr" => "Actes d'une conférence"
"en" => "Conference Proceedings"
]
"support_type" => array:2 [
"fr" => "Editeur"
"en" => "Publisher"
]
"countries" => array:2 [
"fr" => "États-Unis"
"en" => "United States of America"
]
"abstract" => array:2 [
"fr" => ""
"en" => ""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
4 => Essec\Faculty\Model\Contribution {#2230
#_index: "academ_contributions"
#_id: "14350"
#_source: array:18 [
"id" => "14350"
"slug" => "14350-alpha-divergence-variational-inference-meets-importance-weighted-auto-encoders-methodology-and-asymptotics"
"yearMonth" => "2023-12"
"year" => "2023"
"title" => "Alpha-divergence Variational Inference Meets Importance Weighted Auto-Encoders: Methodology and Asymptotics"
"description" => "DAUDEL, K., BENTON, J., SHI, Y. et DOUCET, A. (2023). Alpha-divergence Variational Inference Meets Importance Weighted Auto-Encoders: Methodology and Asymptotics. Dans: 37th Conference on Neural Information Processing Systems 2023 (NeurIPS 2023). New-Orleans."
"authors" => array:4 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "BENTON Joe"
]
2 => array:1 [
"name" => "SHI Yuyang"
]
3 => array:1 [
"name" => "DOUCET Arnaud"
]
]
"ouvrage" => "37th Conference on Neural Information Processing Systems 2023 (NeurIPS 2023)"
"keywords" => []
"updatedAt" => "2023-09-27 01:00:43"
"publicationUrl" => null
"publicationInfo" => array:3 [
"pages" => ""
"volume" => ""
"number" => ""
]
"type" => array:2 [
"fr" => "Communications dans une conférence"
"en" => "Presentations at an Academic or Professional conference"
]
"support_type" => array:2 [
"fr" => null
"en" => null
]
"countries" => array:2 [
"fr" => null
"en" => null
]
"abstract" => array:2 [
"fr" => ""
"en" => ""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
5 => Essec\Faculty\Model\Contribution {#2224
#_index: "academ_contributions"
#_id: "15303"
#_source: array:18 [
"id" => "15303"
"slug" => "15303-learning-with-importance-weighted-variational-inference-asymptotics-for-gradient-estimators-of-the-vr-iwae-bound"
"yearMonth" => "2024-10"
"year" => "2024"
"title" => "Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound"
"description" => "DAUDEL, K. et ROUEFF, F. (2024). <i>Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound</i>. arXiv."
"authors" => array:2 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "ROUEFF François"
]
]
"ouvrage" => ""
"keywords" => []
"updatedAt" => "2025-04-04 09:49:16"
"publicationUrl" => "https://doi.org/10.48550/arXiv.2410.12035"
"publicationInfo" => array:3 [
"pages" => ""
"volume" => ""
"number" => ""
]
"type" => array:2 [
"fr" => "Documents de travail"
"en" => "Working Papers"
]
"support_type" => array:2 [
"fr" => "Cahier de Recherche"
"en" => "Working Papers"
]
"countries" => array:2 [
"fr" => "États-Unis"
"en" => "United States of America"
]
"abstract" => array:2 [
"fr" => "Several popular variational bounds involving importance weighting ideas have been proposed to generalize and improve on the Evidence Lower BOund (ELBO) in the context of maximum likelihood optimization, such as the Importance Weighted Auto-Encoder (IWAE) and the Variational Rényi (VR) bounds. The methodology to learn the parameters of interest using these bounds typically amounts to running gradient-based variational inference algorithms that incorporate the reparameterization trick. However, the way the choice of the variational bound impacts the outcome of variational inference algorithms can be unclear. Recently, the VR-IWAE bound was introduced as a variational bound that unifies the ELBO, IWAE and VR bounds methodologies. In this paper, we provide two analyses for the reparameterized and doubly-reparameterized gradient estimators of the VR-IWAE bound, which reveal the advantages and limitations of these gradient estimators while enabling us to compare of the ELBO, IWAE and VR bounds methodologies. Our work advances the understanding of importance weighted variational inference methods and we illustrate our theoretical findings empirically."
"en" => "Several popular variational bounds involving importance weighting ideas have been proposed to generalize and improve on the Evidence Lower BOund (ELBO) in the context of maximum likelihood optimization, such as the Importance Weighted Auto-Encoder (IWAE) and the Variational Rényi (VR) bounds. The methodology to learn the parameters of interest using these bounds typically amounts to running gradient-based variational inference algorithms that incorporate the reparameterization trick. However, the way the choice of the variational bound impacts the outcome of variational inference algorithms can be unclear. Recently, the VR-IWAE bound was introduced as a variational bound that unifies the ELBO, IWAE and VR bounds methodologies. In this paper, we provide two analyses for the reparameterized and doubly-reparameterized gradient estimators of the VR-IWAE bound, which reveal the advantages and limitations of these gradient estimators while enabling us to compare of the ELBO, IWAE and VR bounds methodologies. Our work advances the understanding of importance weighted variational inference methods and we illustrate our theoretical findings empirically."
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
6 => Essec\Faculty\Model\Contribution {#2228
#_index: "academ_contributions"
#_id: "15592"
#_source: array:18 [
"id" => "15592"
"slug" => "15592-learning-with-importance-weighted-variational-inference-asymptotics-for-gradient-estimators-of-the-vr-iwae-bound"
"yearMonth" => "2025-03"
"year" => "2025"
"title" => "Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound"
"description" => "DAUDEL, K. et ROUEFF, F. (2025). Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound. Dans: 2025 BIRS Workshop "Efficient Approximate Bayesian Inference". Banff."
"authors" => array:2 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "ROUEFF François"
]
]
"ouvrage" => "2025 BIRS Workshop "Efficient Approximate Bayesian Inference""
"keywords" => []
"updatedAt" => "2025-04-24 17:22:53"
"publicationUrl" => null
"publicationInfo" => array:3 [
"pages" => ""
"volume" => ""
"number" => ""
]
"type" => array:2 [
"fr" => "Communications dans une conférence"
"en" => "Presentations at an Academic or Professional conference"
]
"support_type" => array:2 [
"fr" => null
"en" => null
]
"countries" => array:2 [
"fr" => null
"en" => null
]
"abstract" => array:2 [
"fr" => ""
"en" => ""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
7 => Essec\Faculty\Model\Contribution {#2231
#_index: "academ_contributions"
#_id: "15648"
#_source: array:18 [
"id" => "15648"
"slug" => "15648-learning-with-importance-weighted-variational-inference-asymptotics-for-gradient-estimators-of-the-vr-iwae-bound"
"yearMonth" => "2024-11"
"year" => "2024"
"title" => "Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound"
"description" => "DAUDEL, K. et ROUEFF, F. (2024). Learning with Importance Weighted Variational Inference: Asymptotics for Gradient Estimators of the VR-IWAE Bound. Dans: 2024 Rethinking the Role of Bayesianism in the Age of Modern AI. Saarbrücken."
"authors" => array:2 [
0 => array:3 [
"name" => "DAUDEL Kamélia"
"bid" => "B00812202"
"slug" => "daudel-kamelia"
]
1 => array:1 [
"name" => "ROUEFF François"
]
]
"ouvrage" => "2024 Rethinking the Role of Bayesianism in the Age of Modern AI"
"keywords" => []
"updatedAt" => "2025-05-02 10:09:57"
"publicationUrl" => null
"publicationInfo" => array:3 [
"pages" => ""
"volume" => ""
"number" => ""
]
"type" => array:2 [
"fr" => "Communications dans une conférence"
"en" => "Presentations at an Academic or Professional conference"
]
"support_type" => array:2 [
"fr" => null
"en" => null
]
"countries" => array:2 [
"fr" => null
"en" => null
]
"abstract" => array:2 [
"fr" => ""
"en" => ""
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2025-06-15T20:21:41.000Z"
]
+lang: "en"
+"_type": "_doc"
+"_score": 7.0262403
+"parent": null
}
]
"avatar" => "https://faculty.essec.edu/wp-content/uploads/avatars/B00812202.jpg"
"contributionCounts" => 8
"personalLinks" => array:3 [
0 => "<a href="https://orcid.org/0009-0006-3232-9240" target="_blank">ORCID</a>"
1 => "<a href="https://scholar.google.com/citations?hl=en&user=q1xj2FgAAAAJ" target="_blank">Google scholar</a>"
2 => "<a href="https://kdaudel.github.io/" target="_blank">Personal site</a>"
]
"docTitle" => "Kamélia DAUDEL"
"docSubtitle" => "Assistant Professor"
"docDescription" => "Department: Information Systems, Data Analytics and Operations<br>Campus de Cergy"
"docType" => "cv"
"docPreview" => "<img src="https://faculty.essec.edu/wp-content/uploads/avatars/B00812202.jpg"><span><span>Kamélia DAUDEL</span><span>B00812202</span></span>"
"academ_cv_info" => ""
]
#_index: "academ_cv"
+lang: "en"
+"_type": "_doc"
+"_score": 5.028257
+"parent": null
}