@inproceedings{86378077eeef43edafec1d23a6cd3da3,
title = "Distributed stochastic gradient MCMC",
abstract = "Probabilistic inference on a big data scale is becoming increasingly relevant to both the machine learning and statistics communities. Here we introduce the first fully distributed MCMC algorithm based on stochastic gradients. We argue that stochastic gradient MCMC algorithms are particularly suited for distributed inference because individual chains can draw mini-batches from their local pool of data for a flexible amount of time before jumping to or syncing with other chains. This greatly reduces communication overhead and allows adaptive load balancing. Our experiments for LDA on Wikipedia and Pubmed show that relative to the state of the art in distributed MCMC we reduce compute time from 27 hours to half an hour in order to reach the same perplexity level.",
author = "Sungjin Ahn and Babak Shahbaba and Max Welling",
note = "Publisher Copyright: Copyright {\textcopyright} (2014) by the International Machine Learning Society (IMLS) All rights reserved.; 31st International Conference on Machine Learning, ICML 2014 ; Conference date: 21-06-2014 Through 26-06-2014",
year = "2014",
language = "English (US)",
series = "31st International Conference on Machine Learning, ICML 2014",
publisher = "International Machine Learning Society (IMLS)",
pages = "2735--2745",
booktitle = "31st International Conference on Machine Learning, ICML 2014",
}