@inproceedings{d29975524aa14bd1861b99d47e12d927,
title = "DiSCO: Distributed optimization for self-concordant empirical loss",
abstract = "We propose a new distributed algorithm for empirical risk minimization in machine learning. The algorithm is based on an inexact damped Newton method, where the inexact Newton steps are computed by a distributed preconditioned conjugate gradient method. We analyze its iteration complexity and communication efficiency for minimizing self-concordant empirical loss functions, and discuss the results for distributed ridge regression, logistic regression and binary classification with a smoothed hinge loss. In a standard setting for supervised learning, where the n data points are i.i.d. sampled and when the regularization parameter scales as 1√n, we show that the proposed algorithm is communication efficient: the required round of communication does not increase with the sample size n, and only grows slowly with the number of machines.",
author = "Yuchen Zhang and Lin Xiao",
year = "2015",
language = "English",
series = "32nd International Conference on Machine Learning, ICML 2015",
publisher = "International Machine Learning Society (IMLS)",
pages = "362--370",
editor = "Francis Bach and David Blei",
booktitle = "32nd International Conference on Machine Learning, ICML 2015",
note = "32nd International Conference on Machine Learning, ICML 2015 ; Conference date: 06-07-2015 Through 11-07-2015",
}