@inproceedings{cf681b3a1c3e4d1986a6b11f65cab7bb,
title = "How far out? The effect of peripheral visual speech on speech perception",
abstract = "![CDATA[Seeing the talker{\textquoteright}s moving face (visual speech) can facilitate or distort auditory speech perception. Our previous study showed that these effects occur even when visual speech was presented in the periphery and participants performed a central visual task. The current study examined the extent to which these effects were modulated by the eccentricity of visual speech: Visual speech presented at a visual angle of 10.40 (Exp 1) and 23.60 (Exp 2). In both experiments spoken /aba/ stimuli were presented in noise (-6 dB) with congruent or incongruent visual speech in full-face or upper-face (baseline) conditions. Other AV vCv syllables were also presented as filler items. Participants were to identify what they heard while performing a central visual task with their eyemovements monitored. Congruent visual speech facilitated speech perception; incongruent interfered. The sizes of the visual speech effects were smaller for the more eccentric presentation but were still significant. We discuss these results in terms of the form and timing cues that visual speech provides for incoming auditory speech and the robustness of the speech processes that use these cues.]]",
author = "Jeesun Kim and Chris Davis",
year = "2013",
language = "English",
publisher = "Inria",
pages = "123--128",
booktitle = "Proceedings of the 12th International Conference on Auditory-Visual Speech Processing, 29 August - 1 September 2013, Annecy, France",
note = "International Conference on Auditory-Visual Speech Processing ; Conference date: 29-08-2013",
}