@inproceedings{5542504a73074ece91e9b9aa6de95aa5,
title = "A Kinect based vibrotactile feedback system to assist the visually impaired",
abstract = "This paper presents a Microsoft Kinect based vibrotactile feedback system to aid in navigation for the visually impaired. The lightweight wearable system interprets the visual scene and presents obstacle distance and characteristic information to the user. The scene is converted into a distance map using the Kinect, then processed and interpreted using an Intel Next Unit of Computing (NUC). That information is then converted via a microcontroller into vibrotactile feedback, presented to the user through two four-by-four vibration motor arrays woven into gloves. The system is shown to successfully identify, track, and present closest objects, closest humans, multiple humans, and perform distance measurements.",
keywords = "blind, kinect sensor, navigation assistance, tactile feedback, visually impaired",
author = "Kumar Yelamarthi and Dejong, {Brian P.} and Kevin Laubhan",
note = "Publisher Copyright: {\textcopyright} 2014 IEEE.; null ; Conference date: 03-08-2014 Through 06-08-2014",
year = "2014",
month = sep,
day = "23",
doi = "10.1109/MWSCAS.2014.6908495",
language = "English",
series = "Midwest Symposium on Circuits and Systems",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "635--638",
booktitle = "2014 IEEE 57th International Midwest Symposium on Circuits and Systems, MWSCAS 2014",
}