@inproceedings{ef0064ec49e24d3691abeb4ca3a07cdc,
title = "Focused multi-task learning using Gaussian processes",
abstract = "Given a learning task for a data set, learning it together with related tasks (data sets) can improve performance. Gaussian process models have been applied to such multi-task learning scenarios, based on joint priors for functions underlying the tasks. In previous Gaussian process approaches, all tasks have been assumed to be of equal importance, whereas in transfer learning the goal is asymmetric: to enhance performance on a target task given all other tasks. In both settings, transfer learning and joint modelling, negative transfer is a key problem: performance may actually decrease if the tasks are not related closely enough. In this paper, we propose a Gaussian process model for the asymmetric setting, which learns to {"}explain away{"} non-related variation in the additional tasks, in order to focus on improving performance on the target task. In experiments, our model improves performance compared to single-task learning, symmetric multi-task learning using hierarchical Dirichlet processes, and transfer learning based on predictive structure learning.",
keywords = "Gaussian processes, asymmetric setting, multi-task learning, negative transfer, Gaussian processes, asymmetric setting, multi-task learning, negative transfer",
author = "Gayle Leen and Jaakko Peltonen and Samuel Kaski",
year = "2011",
doi = "10.1007/978-3-642-23783-6_20",
language = "English",
isbn = "978-3-642-23782-9",
volume = "6912 LNAI",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
number = "PART 2",
pages = "310--325",
booktitle = "Machine Learning and Knowledge Discovery in Databases - European Conference, ECML PKDD 2011, Proceedings",
edition = "PART 2",
note = "European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases, ECML PKDD 2011 ; Conference date: 01-01-2011",
}