@inproceedings{1119, author = {Shuang Zhou and Yuankai Huo and Shunxing Bao and Bennett Landman and Aniruddha Gokhale}, title = {FedACA: An Adaptive Communication-Efficient Asynchronous Framework for Federated Learning}, abstract = {Federated Learning (FL) is a type of distributed machine learning, which avoids sharing privacy and sensitive data with a central server. Despite the advances in FL, current approaches cannot provide satisfactory performance when dealing with heterogeneity in data and unpredictability of system devices. First, straggler devices can adversely impact convergence speed of the global model training. Second, for model aggregation in traditional FL, edge devices communicate frequently with a central server using their local updates. However, this process may encounter communication bottleneck caused by substantial bandwidth usage. To address these challenges, this paper presents an adaptive, communication-efficient and asynchronous FL technique called FedACA comprising feedback loops at two levels. Our approach contains a self-adjusting local training step with active participant selection to accelerate the convergence of the global model. To reduce the communication overhead, FedACA supports an adaptive uploading policy at the edge devices, which leverages the model similarity and L2-norm differences between the current and previous local gradient. It also utilizes contrastive learning to tackle data heterogeneity by regularizing the local training if the local model has deviated from the global model and helps with the model similarity measurement in the uploading policy. Extensive experiments on a benchmark comprising three image datasets with non-independent and identically distributed (non-i.i.d) data show that FedACA adapts well to the straggler effect in asynchronous environments and also provides significant reductions in communication costs compared to other state-of-the-art FL algorithms.}, year = {2022}, journal = {IEEE International Conference on Autonomic Computing and Self-Organizing Systems (ACSOS),}, pages = {71-80}, month = {09/2022}, publisher = {IEEE}, address = {CA, USA}, isbn = {978-1-6654-7137-4}, url = {https://ieeexplore.ieee.org/document/9935015}, doi = {10.1109/ACSOS55765.2022.00025}, }