@inproceedings{eed8cd2d97ec4c1b9ae7b4deb7612875,
title = "MMReLU: A Simple and Smooth Activation Function with High Convergence Speed",
abstract = "Activation functions have a major effect on deep networks' performance. In past few years, there is an increasing interest in the construction of novel activation functions. In this paper, we introduced a novel non-monotonic activation function, named Moreau Mish Rectified Linear Unit (MMReLU). It's simple, efficient, and robust, comparing with Mish, ReLU, and etc. The experimental results on several classical datasets demonstrate that MMReLU outperforms its counterparts in both convergence speed and accuracy. We show that the capacity of neural networks could be enhanced by MMReLU without changing the network structure, especially the convergence speed.",
keywords = "activation function, simple, smooth",
author = "Longda Wu and Shuai Wang and Liping Fang and Huiqian Du",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 7th International Conference on Computer and Communications, ICCC 2021 ; Conference date: 10-12-2021 Through 13-12-2021",
year = "2021",
doi = "10.1109/ICCC54389.2021.9674529",
language = "English",
series = "2021 7th International Conference on Computer and Communications, ICCC 2021",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1444--1448",
booktitle = "2021 7th International Conference on Computer and Communications, ICCC 2021",
address = "United States",
}