Essec\Faculty\Model\Contribution {#2233
#_index: "academ_contributions"
#_id: "15863"
#_source: array:26 [
"id" => "15863"
"slug" => "15863-a-geometrical-analysis-of-kernel-ridge-regression-and-its-applications"
"yearMonth" => "2025-12"
"year" => "2025"
"title" => "A Geometrical Analysis of Kernel Ridge Regression and its Applications"
"description" => "GAVRILOPOULOS, G., LECUE, G. et SHANG, Z. (2025). A Geometrical Analysis of Kernel Ridge Regression and its Applications. <i>Annals of Statistics</i>, 53(6), pp. 2592-2616."
"authors" => array:3 [
0 => array:3 [
"name" => "LECUE Guillaume"
"bid" => "B00806953"
"slug" => "lecue-guillaume"
]
1 => array:1 [
"name" => "GAVRILOPOULOS Georgios"
]
2 => array:1 [
"name" => "SHANG Zong"
]
]
"ouvrage" => ""
"keywords" => array:5 [
0 => "benign overfitting "
1 => "deep learning models "
2 => "Dvoretsky–Milman theorem "
3 => "kernel ridge regression "
4 => "restricted isomorphy property"
]
"updatedAt" => "2026-01-14 08:41:19"
"publicationUrl" => "https://doi.org/10.1214/25-AOS2556"
"publicationInfo" => array:3 [
"pages" => "2592-2616"
"volume" => "53"
"number" => "6"
]
"type" => array:2 [
"fr" => "Articles"
"en" => "Journal articles"
]
"support_type" => array:2 [
"fr" => "Revue scientifique"
"en" => "Scientific journal"
]
"countries" => array:2 [
"fr" => null
"en" => null
]
"abstract" => array:2 [
"fr" => "We obtain upper bounds for the estimation error of Kernel Ridge Regression (KRR) for all non-negative regularization parameters, offering a geometric perspective on various phenomena in KRR. As applications: 1. We address the Multiple Descents problem, unifying the proofs of [47] and [33] for polynomial kernels in non-asymptotic regime and we establish Multiple Descents for the generalization error of KRR for polynomial kernel under sub-Gaussian design in asymptotic regimes. 2. In the non-asymptotic regime, we have established a one-sided isomorphic version of the Gaussian Equivalent Conjecture for sub-Gaussian design vectors. 3. We offer a novel perspective on the linearization of kernel matrices of non-linear kernel, extending it to the power regime for polynomial kernels. 4. Our theory is applicable to data-dependent kernels, providing a convenient and accurate tool for the feature learning regime in deep learning theory. 5. Our theory extends the results in [72] under weak moment assumption. Ourproofis basedonthreemathematical tools developed in this paper that can be of independent interest: 1. Dvoretzky-Milman theorem for ellipsoids under (very) weak moment assumptions. 2. Restricted Isomorphic Property in Reproducing Kernel Hilbert Spaces with embedding index conditions. 3. Aconcentration inequality for finite-degree polynomial kernel functions."
"en" => "We obtain upper bounds for the estimation error of Kernel Ridge Regression (KRR) for all non-negative regularization parameters, offering a geometric perspective on various phenomena in KRR. As applications: 1. We address the Multiple Descents problem, unifying the proofs of [47] and [33] for polynomial kernels in non-asymptotic regime and we establish Multiple Descents for the generalization error of KRR for polynomial kernel under sub-Gaussian design in asymptotic regimes. 2. In the non-asymptotic regime, we have established a one-sided isomorphic version of the Gaussian Equivalent Conjecture for sub-Gaussian design vectors. 3. We offer a novel perspective on the linearization of kernel matrices of non-linear kernel, extending it to the power regime for polynomial kernels. 4. Our theory is applicable to data-dependent kernels, providing a convenient and accurate tool for the feature learning regime in deep learning theory. 5. Our theory extends the results in [72] under weak moment assumption. Ourproofis basedonthreemathematical tools developed in this paper that can be of independent interest: 1. Dvoretzky-Milman theorem for ellipsoids under (very) weak moment assumptions. 2. Restricted Isomorphic Property in Reproducing Kernel Hilbert Spaces with embedding index conditions. 3. Aconcentration inequality for finite-degree polynomial kernel functions."
]
"authors_fields" => array:2 [
"fr" => "Systèmes d'Information, Data Analytics et Opérations"
"en" => "Information Systems, Data Analytics and Operations"
]
"indexedAt" => "2026-02-02T14:21:48.000Z"
"docTitle" => "A Geometrical Analysis of Kernel Ridge Regression and its Applications"
"docSurtitle" => "Articles"
"authorNames" => "<a href="/cv/lecue-guillaume">LECUE Guillaume</a>, GAVRILOPOULOS Georgios, SHANG Zong"
"docDescription" => "<span class="document-property-authors">LECUE Guillaume, GAVRILOPOULOS Georgios, SHANG Zong</span><br><span class="document-property-authors_fields">Systèmes d'Information, Data Analytics et Opérations</span> | <span class="document-property-year">2025</span>"
"keywordList" => "<a href="#">benign overfitting </a>, <a href="#">deep learning models </a>, <a href="#">Dvoretsky–Milman theorem </a>, <a href="#">kernel ridge regression </a>, <a href="#">restricted isomorphy property</a>"
"docPreview" => "<b>A Geometrical Analysis of Kernel Ridge Regression and its Applications</b><br><span>2025-12 | Articles </span>"
"docType" => "research"
"publicationLink" => "<a href="https://doi.org/10.1214/25-AOS2556" target="_blank">A Geometrical Analysis of Kernel Ridge Regression and its Applications</a>"
]
+lang: "fr"
+"_score": 8.672143
+"_ignored": array:2 [
0 => "abstract.en.keyword"
1 => "abstract.fr.keyword"
]
+"parent": null
}