@inproceedings{4c8ab9c2925e4395b259c38b6cf33bcb,
title = "BCKD: Block-Correlation Knowledge Distillation",
abstract = "In this paper, we propose Block-Correlation Knowledge Distillation (BCKD), a novel and efficient knowledge distillation method that differs from the classical method, using the simple multilayer-perceptron (MLP) and the classifier of the pre-trained teacher to train the correlations between adjacent blocks of the model. Over the past few years, the performance of some methods has been restricted by the feature map size or the lack of samples in small-scale datasets. By our proposed BCKD, the above problem is satisfactorily solved and has a superior performance without introducing additional overhead. Our method is validated on CIFAR100 and CI-FAR10 datasets, and experimental results demonstrate the effectiveness and superiority of our method.",
keywords = "Block, Classifier, Correlation, Knowledge Distillation, Multilayer Perceptron",
author = "Qi Wang and Lu Liu and Wenxin Yu and Shiyu Chen and Jun Gong and Peng Chen",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 30th IEEE International Conference on Image Processing, ICIP 2023 ; Conference date: 08-10-2023 Through 11-10-2023",
year = "2023",
doi = "10.1109/ICIP49359.2023.10222195",
language = "English",
series = "Proceedings - International Conference on Image Processing, ICIP",
publisher = "IEEE Computer Society",
pages = "3225--3229",
booktitle = "2023 IEEE International Conference on Image Processing, ICIP 2023 - Proceedings",
address = "United States",
}