@inproceedings{430b25b980684145992aaed2cb11f67a,
title = "Accelerated stochastic block coordinate gradient descent for sparsity constrained nonconvex optimization",
abstract = "We propose an accelerated stochastic block coordinate descent algorithm for nonconvex optimization under sparsity constraint in the high dimensional regime. The core of our algorithm is leveraging both stochastic partial gradient and full partial gradient restricted to each coordinate block to accelerate the convergence. We prove that the algorithm converges to the unknown true parameter at a linear rate, up to the statistical error of the underlying model. Experiments on both synthetic and real datasets backup our theory.",
author = "Jinghui Chen and Quanquan Gu",
note = "Funding Information: We would like to thank the anonymous reviewers for their helpful comments. Research was sponsored by Quanquan Gu's startup funding at Department of Systems and Information Engineering, University of Virginia.; 32nd Conference on Uncertainty in Artificial Intelligence 2016, UAI 2016 ; Conference date: 25-06-2016 Through 29-06-2016",
year = "2016",
language = "English (US)",
series = "32nd Conference on Uncertainty in Artificial Intelligence 2016, UAI 2016",
publisher = "Association For Uncertainty in Artificial Intelligence (AUAI)",
pages = "132--141",
editor = "Dominik Janzing and Alexander Ihler",
booktitle = "32nd Conference on Uncertainty in Artificial Intelligence 2016, UAI 2016",
}