Back
The entropy regularization information criterion
Effective methods of capacity control via uniform convergence bounds for function expansions have been largely limited to Support Vector machines, where good bounds are obtainable by the entropy number approach. We extend these methods to systems with expansions in terms of arbitrary (parametrized) basis functions and a wide range of regularization methods covering the whole range of general linear additive models. This is achieved by a data dependent analysis of the eigenvalues of the corresponding design matrix.
@inproceedings{816, title = {The entropy regularization information criterion}, journal = {Advances in Neural Information Processing Systems}, booktitle = {Advances in Neural Information Processing Systems 12}, abstract = {Effective methods of capacity control via uniform convergence bounds for function expansions have been largely limited to Support Vector machines, where good bounds are obtainable by the entropy number approach. We extend these methods to systems with expansions in terms of arbitrary (parametrized) basis functions and a wide range of regularization methods covering the whole range of general linear additive models. This is achieved by a data dependent analysis of the eigenvalues of the corresponding design matrix. }, pages = {342-348}, editors = {SA Solla and TK Leen and K-R M{\"u}ller}, publisher = {MIT Press}, organization = {Max-Planck-Gesellschaft}, school = {Biologische Kybernetik}, address = {Cambridge, MA, USA}, month = jun, year = {2000}, slug = {816}, author = {Smola, AJ. and Shawe-Taylor, J. and Sch{\"o}lkopf, B. and Williamson, RC.}, month_numeric = {6} }
More information