@inproceedings{cf8c19d2f5364d60baebbb27ff1c8ee3,
title = "Toward the flexible automation for robot learning from human demonstration using multimodal perception approach",
abstract = "This study proposes a multi-modal perception approach to make a robotic arm perform flexible automation and further simplify the complicated coding process of controlling a robotic arm. The depth camera is utilized for detect face and hand gesture for recognizing operator identification and commands. In addition, the kinematics of the robotic arm associated with the position of manipulated objects can be derived based on the information through human demonstrations and detected objects. In the experiments, the proposed multi-modal perception system can firstly recognize the operator. Then, the operator can demonstrate a task to generate the learning data with the assistance of using gesture. Afterward, the robotic arm can perform the same task as human demonstration. During the process of imitating task, the robotic arm can also be guided by the gesture command of operator.",
keywords = "face recognition, gesture recognition, human demonstration, multi-modal perception, object recognition",
author = "Chen, {Jing Hao} and Lu, {Guan Yi} and Chien, {Yi Hsing} and Chiang, {Hsin Han} and Wang, {Wei Yen} and Hsu, {Chen Chien}",
note = "Publisher Copyright: {\textcopyright} 2019 IEEE.; 2019 International Conference on System Science and Engineering, ICSSE 2019 ; Conference date: 20-07-2019 Through 21-07-2019",
year = "2019",
month = jul,
doi = "10.1109/ICSSE.2019.8823444",
language = "English",
series = "Proceedings of 2019 International Conference on System Science and Engineering, ICSSE 2019",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "148--153",
booktitle = "Proceedings of 2019 International Conference on System Science and Engineering, ICSSE 2019",
}