@inproceedings{kong_consensus_2021, title = {Consensus Control for Decentralized Deep Learning}, volume = {139}, url = {https://proceedings.mlr.press/v139/kong21a.html}, series = {Proceedings of Machine Learning Research}, abstract = {Decentralized training of deep learning models enables on-device learning over networks, as well as efficient scaling to large compute clusters. Experiments in earlier works reveal that, even in a data-center setup, decentralized training often suffers from the degradation in the quality of the model: the training and test performance of models trained in a decentralized fashion is in general worse than that of models trained in a centralized fashion, and this performance drop is impacted by parameters such as network size, communication topology and data partitioning.}, eventtitle = {38th International Conference on Machine Learning ({ICML})}, pages = {5686--5696}, booktitle = {Proceedings of the 38th International Conference on Machine Learning}, publisher = {{PMLR}}, eprinttype = {arxiv}, eprint = {https://arxiv.org/abs/2102.04828}, author = {Kong, Lingjing and Lin, Tao and Koloskova, Anastasia and Jaggi, Martin and Stich, Sebastian U}, date = {2021}, langid = {english}, }