@inproceedings{3a08183e36af4abc81d1a8d26d400ee3,
title = "Efficient and low-cost deep-learning based gaze estimator for surgical robot control",
abstract = "Surgical robots are playing more and more important role in modern operating room. However, operations by using surgical robot are not easy to handle by doctors. Vision based human-computer interaction (HCI) is a way to ease the difficulty to control surgical robots. While the problem of this method is that eyes tracking devices are expensive. In this paper, a low cost and robust deep-learning based on gaze estimator is proposed to control surgical robots. By this method, doctors can easily control the robot by specifying the starting point and ending point of the surgical robot using eye gazing. Surgical robots can also be controlled to move in 9 directions using controllers' eyes gazing information. A Densely Connected convolutional Neural Networks (Dense CNN) model for 9-direction/36-direction gaze estimation is built. The Dense CNN architecture has much more less trainable parameters compared to traditional CNN network architecture (AlexNet like/VGG like) which is more feasible to deploy on the Field-Programmable Gate Array (FPGA) and other hardware with limited memories.",
keywords = "Convolutional Neural Neural, Deep Learning, Gaze estimation, Minimally Invasive Surgery, Surgical robot",
author = "Peng Li and Xuebin Hou and Le Wei and Guoli Song and Xingguang Duan",
note = "Publisher Copyright: {\textcopyright} 2018 IEEE.; 2018 IEEE International Conference on Real-Time Computing and Robotics, RCAR 2018 ; Conference date: 01-08-2018 Through 05-08-2018",
year = "2018",
month = jul,
day = "2",
doi = "10.1109/RCAR.2018.8621810",
language = "English",
series = "2018 IEEE International Conference on Real-Time Computing and Robotics, RCAR 2018",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "58--63",
booktitle = "2018 IEEE International Conference on Real-Time Computing and Robotics, RCAR 2018",
address = "United States",
}