@techreport{66e92bcf7187442e8de466fdc709d2cb,
title = "Gaussian regression and optimal finite dimensional linear models",
abstract = "The problem of regression under Gaussian assumptions is treated generally. The relationship between Bayesian prediction, regularization and smoothing is elucidated. The ideal regression is the posterior mean and its computation scales as O(n3), where n is the sample size. We show that the optimal m-dimensional linear model under a given prior is spanned by the first m eigenfunctions of a covariance operator, which is a trace-class operator. This is an infinite dimensional analogue of principal component analysis. The importance of Hilbert space methods to practical statistics is also discussed.",
keywords = "regression, Gaussian assumptions, Bayesian prediction, regularization, smoothing, posterior mean, linear model, infinite dimensional analogue, principal component analysis, Hilbert space methods, practical statistics",
author = "Huaiyu Zhu and Williams, {Christopher K. I.} and Richard Rohwer and Michal Morciniec",
note = "Copyright {\textcopyright} 1997, The Authors. This work is licensed under a Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License (https://creativecommons.org/licenses/by-nc-nd/4.0/). ",
year = "1997",
month = jul,
day = "3",
language = "English",
series = "NCRG",
publisher = "Aston University",
number = "97/011",
type = "WorkingPaper",
institution = "Aston University",
}