@inproceedings{340a12784bf3442f80c0a140d3eb35ae,
title = "Neural Network Compression via Learnable Wavelet Transforms",
abstract = "Wavelets are well known for data compression, yet have rarely been applied to the compression of neural networks. This paper shows how the fast wavelet transform can be used to compress linear layers in neural networks. Linear layers still occupy a significant portion of the parameters in recurrent neural networks (RNNs). Through our method, we can learn both the wavelet bases and corresponding coefficients to efficiently represent the linear layers of RNNs. Our wavelet compressed RNNs have significantly fewer parameters yet still perform competitively with the state-of-the-art on synthetic and real-world RNN benchmarks (Source code is available at https://github.com/v0lta/Wavelet-network-compression). Wavelet optimization adds basis flexibility, without large numbers of extra weights.",
keywords = "Network compression, Wavelets",
author = "Moritz Wolter and Shaohui Lin and Angela Yao",
note = "Publisher Copyright: {\textcopyright} 2020, Springer Nature Switzerland AG.; 29th International Conference on Artificial Neural Networks, ICANN 2020 ; Conference date: 15-09-2020 Through 18-09-2020",
year = "2020",
doi = "10.1007/978-3-030-61616-8\_4",
language = "英语",
isbn = "9783030616151",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "39--51",
editor = "Igor Farka{\v s} and Paolo Masulli and Stefan Wermter",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2020 - 29th International Conference on Artificial Neural Networks, Proceedings",
address = "德国",
}