@inproceedings{9930b826bbc9432b92fdf35a4bcb2676,
title = "Audiovisual speech perception: Time for a paradigm shift",
abstract = "Most adults cannot lipread very well, and when viewing only the visual speech signal, recognize very little of the content. Even so, the visual speech greatly enhances a degraded auditory speech signal, as when listening in the presence of background noise or with hearing loss. Although previous researchers have suggested that this super-additive effect is due to a distinct audiovisual integration ability, recent findings indicated that it can be accounted solely by unimodal performance.",
keywords = "Audiovisual, Integration, Lipreading",
author = "Nancy Tye-Murray",
note = "Publisher Copyright: {\textcopyright} 2019 Proceedings of the International Congress on Acoustics. All rights reserved.; 23rd International Congress on Acoustics: Integrating 4th EAA Euroregio, ICA 2019 ; Conference date: 09-09-2019 Through 23-09-2019",
year = "2019",
doi = "10.18154/RWTH-CONV-238895",
language = "English",
series = "Proceedings of the International Congress on Acoustics",
publisher = "International Commission for Acoustics (ICA)",
pages = "3870--3874",
editor = "Martin Ochmann and Vorlander Michael and Janina Fels",
booktitle = "Proceedings of the 23rd International Congress on Acoustics",
}