@inproceedings{f12eb425bb3249ecb15f08a28b2addc5,
title = "Temporal relationship between auditory and visual prosodic cues",
abstract = "It has been reported that non-articulatory visual cues to prosody tend to align with auditory cues, emphasizing auditory events that are in close alignment (visual alignment hypothesis). We investigated the temporal relationship between visual and auditory prosodic cues in a large corpus of utterances to determine the extent to which non-articulatory visual prosodic cues align with auditory ones. Six speakers saying 30 sentences in three prosodic conditions (x2 repetitions) were recorded in a dialogue exchange task, to measure how often eyebrow movements and rigid head tilts aligned with auditory prosodic cues, the temporal distribution of such movements, and the variation across prosodic conditions. The timing of brow raises and head tilts were not aligned with auditory cues, and the occurrence of visual cures was inconsistent, lending little support for the visual alignment hypothesis. Different types of visual cues may combine with auditory cues in different ways to signal prosody.",
author = "Erin Cvejic and Jeesun Kim and Chris Davis",
year = "2011",
language = "English",
publisher = "Causal Productions",
pages = "981--984",
booktitle = "Proceedings of the 12th Annual Conference of the International Speech Communication Association (INTERSPEECH 2011), Florence, Italy, 27 - 31 August 2011",
note = "International Speech Communication Association. Conference ; Conference date: 09-09-2012",
}