@inproceedings{Choi:2004:N3F,
optpostscript = {},
optorganization = {},
author = {Soo-Mi Choi and Yong-Guk Kim and Don-Soo Lee and Sung-Oh Lee and
Gwi-Tae Park},
optkey = {},
series = LNICS,
optannote = {},
editor = {Andreas Butz and Antonio Kr{\"u}ger and Patrick Olivier},
url = {http://springerlink.metapress.com/link.asp?id=dcq99vu63b9mqycu},
address = SpringerAdr,
localfile = {papers/Choi.2004.N3F.pdf},
optisbn = {},
publisher = SpringerPub,
optmonth = {},
optciteseer = {},
doi = {http://dx.doi.org/10.1007/b97744},
volume = {3031},
optcrossref = {},
optwww = {},
booktitle = {Proceedings of 4th International Symposium on Smart Graphics},
optnumber = {},
abstract = {This paper presents a facial expression recognition-synthesis
system. In the front, it detects a facial area within the given
image and then classifies its facial expression into 7 emotional
weightings. Such weighting information, transmitted to the PDA via
a mobile network, is used for non-photorealistic facial expression
animation. The cartoon-like shading method, adopted as a
non-photorealistic 3-D technique, is developed to render a 3-D
avatar that conveys a familiar and yet unique facial character,
even without employing extensive polygons. We found that facial
expression animation using emotional curves is more effective in
expressing the timing of an expression comparing to the linear
interpolation method. The present emotional avatar embedded on a
mobile platform can be used in the cyberspace.},
title = {{N}on-photorealistic {3-D} {F}acial {A}nimation on the {PDA} {B}ased
on {F}acial {E}xpression {R}ecognition},
year = {2004},
pages = {11--20},
}
|