@inproceedings{d3f33f2343a341cc9dcb727287e03d89,
title = "Touch & Activate: Adding interactivity to existing objects using active acoustic sensing",
abstract = "In this paper, we present a novel acoustic touch sensing technique called Touch & Activate. It recognizes a rich context of touches including grasp on existing objects by attaching only a vibration speaker and a piezo-electric microphone paired as a sensor. It provides easy hardware configuration for prototyping interactive objects that have touch input capability. We conducted a controlled experiment to measure the accuracy and trade-off between the accuracy and number of training rounds for our technique. From its results, per-user recognition accuracies with five touch gestures for a plastic toy as a simple example and six hand postures for the posture recognition as a complex example were 99.6% and 86.3%, respectively. Walk up user recognition accuracies for the two applications were 97.8% and 71.2%, respectively. Since the results of our experiment showed a promising accuracy for the recognition of touch gestures and hand postures, Touch & Activate should be feasible for prototyping interactive objects that have touch input capability.",
keywords = "Acoustic classification, Gestures, Grasp, Machine learning, Piezo-electric sensor, Prototyping, Sensors, Support vector machine, Tangibles, Touch",
author = "Makoto Ono and Buntarou Shizuki and Jiro Tanaka",
year = "2013",
doi = "10.1145/2501988.2501989",
language = "English",
isbn = "9781450322683",
series = "UIST 2013 - Proceedings of the 26th Annual ACM Symposium on User Interface Software and Technology",
pages = "31--40",
booktitle = "UIST 2013 - Proceedings of the 26th Annual ACM Symposium on User Interface Software and Technology",
note = "26th Annual ACM Symposium on User Interface Software and Technology, UIST 2013 ; Conference date: 08-10-2013 Through 11-10-2013",
}