Output kernel learning techniques allow to simultaneously learn a vector-valued function and a positive semidefinite matrix which describes the relationships between the outputs. In this paper, we introduce a new formulation that imposes a low-rank constraint on the output kernel and operates directly on a factor of the kernel matrix. First, we investigate the connection between output kernel learning and a regularization problem for an architecture with two layers. Then, we show that a variety of methods such as nuclear norm regularized regression, reduced-rank regression, principal component analysis, and low rank matrix approximation can be seen as special cases of the output kernel learning framework. Finally, we introduce a block coordinate descent strategy for learning low-rank output kernels.
| Author(s): | Dinuzzo, F. and Fukumizu, K. |
| Links: | |
| Book Title: | JMLR Workshop and Conference Proceedings Volume 20 |
| Pages: | 181-196 |
| Year: | 2011 |
| Month: | November |
| Day: | 0 |
| Editors: | Hsu, C.-N. , W.S. Lee |
| Publisher: | JMLR |
| BibTeX Type: | Conference Paper (inproceedings) |
| Address: | Cambridge, MA, USA |
| Event Name: | 3rd Asian Conference on Machine Learning (ACML 2011) |
| Event Place: | Taoyuan, Taiwan |
| Digital: | 0 |
| Electronic Archiving: | grant_archive |
BibTeX
@inproceedings{DinuzzoF2011,
title = {Learning low-rank output kernels},
booktitle = {JMLR Workshop and Conference Proceedings Volume 20},
abstract = {Output kernel learning techniques allow to simultaneously learn a vector-valued function and a positive semidefinite matrix which describes the relationships between the outputs. In this paper, we introduce a new formulation that imposes a low-rank constraint on the output kernel and operates directly on a factor of the kernel matrix. First, we investigate the connection between output kernel learning and a regularization problem for an architecture
with two layers. Then, we show that a variety of methods such as nuclear norm regularized regression, reduced-rank regression, principal component analysis, and low rank matrix approximation can be seen as special cases of the output kernel learning framework. Finally, we introduce a block coordinate descent strategy for learning low-rank output kernels.},
pages = {181-196},
editors = {Hsu, C.-N. , W.S. Lee},
publisher = {JMLR},
address = {Cambridge, MA, USA},
month = nov,
year = {2011},
author = {Dinuzzo, F. and Fukumizu, K.},
month_numeric = {11}
}
