Publications
Search
Jonsdottir, Gudny Ragna; Gratch, Jonathan; Fast, Edward; Thórisson, Kristinn R.
Fluid Semantic Back-Channel Feedback in Dialogue: Challenges & Progress Proceedings Article
In: Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA), Paris, France, 2007.
@inproceedings{jonsdottir_fluid_2007,
title = {Fluid Semantic Back-Channel Feedback in Dialogue: Challenges & Progress},
author = {Gudny Ragna Jonsdottir and Jonathan Gratch and Edward Fast and Kristinn R. Thórisson},
url = {http://ict.usc.edu/pubs/Fluid%20Semantic%20Back-Channel%20Feedback%20in%20Dialogue-%20Challenges%20&%20Progress.pdf},
year = {2007},
date = {2007-09-01},
booktitle = {Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA)},
address = {Paris, France},
abstract = {Participation in natural, real-time dialogue calls for behaviors supported by perception-action cycles from around 100 msec and up. Generating certain kinds of such behaviors, namely envelope feedback, has been possible since the early 90s. Real-time backchannel feedback related to the content of a dialogue has been more difficult to achieve. In this paper we describe our progress in allowing virtual humans to give rapid within-utterance content-specific feedback in real-time dialogue. We present results from human-subject studies of content feedback, where results show that content feedback to a particular phrase or word in human-human dialogue comes 560-2500 msec from the phrase's onset, 1 second on average. We also describe a system that produces such feedback with an autonomous agent in limited topic domains, present performance data of this agent in human-agent interactions experiments and discuss technical challenges in light of the observed human-subject data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Traum, David; Roque, Antonio; Leuski, Anton; Georgiou, Panayiotis G.; Gerten, Jillian; Martinovski, Bilyana; Narayanan, Shrikanth; Robinson, Susan; Vaswani, Ashish
Hassan: A Virtual Human for Tactical Questioning Proceedings Article
In: 8th SIGdial Workshop on Discourse and Dialogue, Antwerp, Belgium, 2007.
@inproceedings{traum_hassan_2007,
title = {Hassan: A Virtual Human for Tactical Questioning},
author = {David Traum and Antonio Roque and Anton Leuski and Panayiotis G. Georgiou and Jillian Gerten and Bilyana Martinovski and Shrikanth Narayanan and Susan Robinson and Ashish Vaswani},
url = {http://ict.usc.edu/pubs/Hassan-%20A%20Virtual%20Human%20for%20Tactical%20Questioning%20.pdf},
year = {2007},
date = {2007-09-01},
booktitle = {8th SIGdial Workshop on Discourse and Dialogue},
address = {Antwerp, Belgium},
abstract = {We present Hassan, a virtual human who engages in Tactical Questioning dialogues. We describe the tactical questioning domain, the motivation for this character, the speciï¬c architecture and present brief examples and an evaluation.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Oh, Sejin; Gratch, Jonathan; Woontack, Woo
Explanatory Style for Socially Interactive Agents Proceedings Article
In: Lecture Notes in Computer Science, Lisbon, Portugal, 2007.
@inproceedings{oh_explanatory_2007,
title = {Explanatory Style for Socially Interactive Agents},
author = {Sejin Oh and Jonathan Gratch and Woo Woontack},
url = {http://ict.usc.edu/pubs/Explanatory%20Style%20for%20Socially%20Interactive%20Agents.pdf},
year = {2007},
date = {2007-09-01},
booktitle = {Lecture Notes in Computer Science},
address = {Lisbon, Portugal},
abstract = {Recent years have seen an explosion of interest in computational models of socio-emotional processes, both as a mean to deepen understanding of human behavior and as a mechanism to drive a variety of training and entertainment applications. In contrast with work on emotion, where research groups have developed detailed models of emotional processes, models of personality have emphasized shallow surface behavior. Here, we build on computational appraisal models of emotion to better characterize dispositional differences in how people come to understand social situations. Known as explanatory style, this dispositional factor plays a key role in social interactions and certain socio-emotional disorders, such as depression. Building on appraisal and attribution theories, we model key conceptual variables underlying the explanatory style, and enable agents to exhibit different explanatory tendencies according to their personalities. We describe an interactive virtual environment that uses the model to allow participants to explore individual differences in the explanation of social events, with the goal of encouraging the development of perspective taking and emotion-regulatory skills.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Roque, Antonio; Traum, David
A Model of Compliance and Emotion for Potentially Adversarial Dialogue Agents Proceedings Article
In: 8th SIGdial Workshop on Discourse and Dialogue, Antwerp, Belgium, 2007.
@inproceedings{roque_model_2007,
title = {A Model of Compliance and Emotion for Potentially Adversarial Dialogue Agents},
author = {Antonio Roque and David Traum},
url = {http://ict.usc.edu/pubs/A%20Model%20of%20Compliance%20and%20Emotion%20for%20Potentially%20Adversarial%20Dialogue%20%20Agents.pdf},
year = {2007},
date = {2007-09-01},
booktitle = {8th SIGdial Workshop on Discourse and Dialogue},
address = {Antwerp, Belgium},
abstract = {We present a model of compliance, for domains in which a dialogue agent may become adversarial. This model includes a set of emotions and a set of levels of compliance, and strategies for changing these.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ai, Hua; Roque, Antonio; Leuski, Anton; Traum, David
Using Information State to Improve Dialogue Move Identification in a Spoken Dialogue System Proceedings Article
In: Proceedings of the 10th Interspeech Conference, Antwerp, Belgium, 2007.
@inproceedings{ai_using_2007,
title = {Using Information State to Improve Dialogue Move Identification in a Spoken Dialogue System},
author = {Hua Ai and Antonio Roque and Anton Leuski and David Traum},
url = {http://ict.usc.edu/pubs/Using%20Information%20State%20to%20Improve%20Dialogue%20Move%20Identification%20in%20a%20Spoken%20Dialogue%20System.pdf},
year = {2007},
date = {2007-08-01},
booktitle = {Proceedings of the 10th Interspeech Conference},
address = {Antwerp, Belgium},
abstract = {In this paper we investigate how to improve the performance of a dialogue move and parameter tagger for a taskoriented dialogue system using the information-state approach. We use a corpus of utterances and information states from an implemented system to train and evaluate a tagger, and then evaluate the tagger in an on-line system. Use of information state context is shown to improve performance of the system.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Gandhe, Sudeep; Traum, David
Creating Spoken Dialogue Characters from Corpora without Annotations Proceedings Article
In: Interspeech 2007, Antwerp, Belgium, 2007.
@inproceedings{gandhe_creating_2007,
title = {Creating Spoken Dialogue Characters from Corpora without Annotations},
author = {Sudeep Gandhe and David Traum},
url = {http://ict.usc.edu/pubs/Creating%20Spoken%20Dialogue%20Characters%20from%20Corpora%20without%20Annotations%20.pdf},
year = {2007},
date = {2007-08-01},
booktitle = {Interspeech 2007},
address = {Antwerp, Belgium},
abstract = {Virtual humans are being used in a number of applications, including simulation-based training, multi-player games, and museum kiosks. Natural language dialogue capabilities are an essential part of their human-like persona. These dialogue systems have a goal of being believable and generally have to operate within the bounds of their restricted domains. Most dialogue systems operate on a dialogue-act level and require extensive annotation efforts. Semantic annotation and rule authoring have long been known as bottlenecks for developing dialogue systems for new domains. In this paper, we investigate several dialogue models for virtual humans that are trained on an unannotated human-human corpus. These are inspired by information retrieval and work on the surface text level. We evaluate these in text-based and spoken interactions and also against the upper baseline of human-human dialogues.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Robinson, Susan; Roque, Antonio; Vaswani, Ashish; Traum, David; Hernandez, Charles; Millspaugh, Bill
Evaluation of a Spoken Dialogue System for Virtual Reality Call for Fire Training Proceedings Article
In: 10th International Pragmatics Conference, Gotenborg, Sweden, 2007.
@inproceedings{robinson_evaluation_2007,
title = {Evaluation of a Spoken Dialogue System for Virtual Reality Call for Fire Training},
author = {Susan Robinson and Antonio Roque and Ashish Vaswani and David Traum and Charles Hernandez and Bill Millspaugh},
url = {http://ict.usc.edu/pubs/Evaluation%20of%20a%20Spoken%20Dialogue%20System%20for%20Virtual%20Reality%20Call%20for%20Fire%20Training.pdf},
year = {2007},
date = {2007-07-01},
booktitle = {10th International Pragmatics Conference},
address = {Gotenborg, Sweden},
abstract = {We present an evaluation of a spoken dialogue system that engages in dialogues with soldiers training in an immersive Call for Fire (CFF) simulation. We briefly describe aspects of the Joint Fires and Effects Trainer System, and the Radiobot-CFF dialogue system, which can engage in voice communications with a trainee in call for fire dialogues. An experiment is described to judge performance of the Radiobot CFF system compared with human radio operators. Results show that while the current version of the system is not quite at humanperformance levels, it is already viable for training interaction and as an operator-controller aid.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Kenny, Patrick G.; Hartholt, Arno; Gratch, Jonathan; Traum, David; Marsella, Stacy C.; Swartout, William
The More the Merrier: Multi-Party Negotiation with Virtual Humans Proceedings Article
In: AAAI Conference On Artificial Intelligence; Proceedings of the 22nd National Conference on Artificial Intelligence, pp. 1970–1971, 2007.
@inproceedings{kenny_more_2007,
title = {The More the Merrier: Multi-Party Negotiation with Virtual Humans},
author = {Patrick G. Kenny and Arno Hartholt and Jonathan Gratch and David Traum and Stacy C. Marsella and William Swartout},
url = {http://ict.usc.edu/pubs/The%20More%20the%20Merrier-%20Multi-Party%20Negotiation%20with%20Virtual%20Humans.pdf},
year = {2007},
date = {2007-07-01},
booktitle = {AAAI Conference On Artificial Intelligence; Proceedings of the 22nd National Conference on Artificial Intelligence},
volume = {2},
pages = {1970–1971},
abstract = {The goal of the Virtual Humans Project at the University of Southern California�s Institute for Creative Technologies is to enrich virtual training environments with virtual humans � autonomous agents that support face-to-face interaction with trainees in a variety of roles � through bringing together many different areas of research including speech recognition, natural language understanding, dialogue management, cognitive modeling, emotion modeling, non-verbal behavior and speech and knowledge management. The demo at AAAI will focus on our work using virtual humans to train negotiation skills. Conference attendees will negotiate with a virtual human doctor and elder to try to move a clinic out of harm�s way in single and multi-party negotiation scenarios using the latest iteration of our Virtual Humans framework. The user will use natural speech to talk to the embodied agents, who will respond in accordance with their internal task model and state. The characters will carry out a multi-party dialogue with verbal and non-verbal behavior. A video of a single-party version of the scenario was shown at AAAI-06. This new interactive demo introduces several new features, including multi-party negotiation, dynamically generated non-verbal behavior and a central ontology.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jan, Dusan; Traum, David
Dynamic Movement and Positioning of Embodied Agents in Multiparty Conversations Proceedings Article
In: ACL 2007 Workshop on Embodied Language Processing, Prague, Czech Republic, 2007.
@inproceedings{jan_dynamic_2007,
title = {Dynamic Movement and Positioning of Embodied Agents in Multiparty Conversations},
author = {Dusan Jan and David Traum},
url = {http://ict.usc.edu/pubs/Dynamic%20Movement%20and%20Positioning%20of%20Embodied%20Agents%20in%20Multiparty%20%20Conversations.pdf},
year = {2007},
date = {2007-06-01},
booktitle = {ACL 2007 Workshop on Embodied Language Processing},
address = {Prague, Czech Republic},
abstract = {For embodied agents to engage in realistic multiparty conversation, they must stand in appropriate places with respect to other agents and the environment. When these factors change, for example when an agent joins a conversation, the agents must dynamically move to a new location and/or orientation to accommodate. This paper presents an algorithm for simulating the movement of agents based on observed human behavior using techniques developed for pedestrian movement in crowd simulations. We extend a previous group conversation simulation to include an agent motion algorithm. We examine several test cases and show how the simulation generates results that mirror real-life conversation settings.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Kenny, Patrick G.; Rizzo, Albert; Parsons, Thomas D.; Gratch, Jonathan; Swartout, William
A Virtual Human Agent for Training Novice Therapist Clinical Interviewing Skills Proceedings Article
In: Annual Review of CyberTherapy and Telemedicine, Washington D.C., 2007.
@inproceedings{kenny_virtual_2007,
title = {A Virtual Human Agent for Training Novice Therapist Clinical Interviewing Skills},
author = {Patrick G. Kenny and Albert Rizzo and Thomas D. Parsons and Jonathan Gratch and William Swartout},
url = {http://ict.usc.edu/pubs/A%20Virtual%20Human%20Agent%20for%20Training%20Novice%20Therapist%20Clinical%20Interviewing%20Skills.pdf},
year = {2007},
date = {2007-06-01},
booktitle = {Annual Review of CyberTherapy and Telemedicine},
address = {Washington D.C.},
abstract = {Virtual Reality (VR) is rapidly evolving into a pragmatically usable technology for mental health (MH) applications. Over the last five years, the technology for creating virtual humans (VHs) has evolved to the point where they are no longer regarded as simple background characters, but rather can serve a functional interactional role. Our current project involves the construction of a natural language-capable virtual client named “Justin,” which derived from a military negotiation train- ing tool into a virtual therapy patient for training novice clinicians the art of clinical interviewing with a resistant client. Justin portrays a 16-year old male with a conduct disorder who is being forced to par- ticipate in therapy by his family. The system uses a sophisticated natural language interface that al- lows novice clinicians to practice asking interview questions in an effort to create a positive therapeu- tic alliance with this very challenging virtual client. Herein we proffer a description of our iterative de- sign process and outline our long term vision.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Thagard, Paul; Ditto, Peter; Gratch, Jonathan; Marsella, Stacy C.; Westen, Drew
Emotional Cognition in the Real World Proceedings Article
In: Proceedings of the Twenty-Ninth Annual Meeting of the Cognitive Science Society, Nashville, TN, 2007.
@inproceedings{thagard_emotional_2007,
title = {Emotional Cognition in the Real World},
author = {Paul Thagard and Peter Ditto and Jonathan Gratch and Stacy C. Marsella and Drew Westen},
url = {http://ict.usc.edu/pubs/Emotional%20Cognition%20in%20the%20Real%20World.pdf},
year = {2007},
date = {2007-06-01},
booktitle = {Proceedings of the Twenty-Ninth Annual Meeting of the Cognitive Science Society},
address = {Nashville, TN},
abstract = {There is increasing appreciation in cognitive science of the impact of emotions on many kinds of thinking, from decision making to scientific discovery. This appreciation has developed in all the fields of cognitive science, including, psychology, philosophy, artificial intelligence, and linguistics, and anthropology. The purpose of the proposed symposium is to report and discuss new investigations of the impact of emotion on cognitive processes, in particular ones that are important in real life situations. We will approach the practical importance of emotional cognition from a variety of disciplinary perspectives: social psychology (Ditto), clinical psychology (Westen), computer science (Gratch and Marsella), and philosophy and neuroscience (Thagard). In order to provide integration across these approaches, we will try to address a fundamental set of questions, including: 1. How do emotions interact with basic cognitive processes? 2. What are the positive contributions of emotions to various kinds of thinking in real world situations? 3. How do emotions sometimes bias thinking in real world situations? 4. How can understanding of the psychology and neuroscience of emotional cognition be used to improve the effectiveness of real world thinking?},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
The Architectural Role of Emotion in Cognitive Systems Book Section
In: Integrated Models of Cognitive Systems, Oxford University Press, New York, 2007.
@incollection{gratch_architectural_2007,
title = {The Architectural Role of Emotion in Cognitive Systems},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/The%20Architectural%20Role%20of%20Emotion%20in%20Cognitive%20Systems.pdf},
year = {2007},
date = {2007-03-01},
booktitle = {Integrated Models of Cognitive Systems},
publisher = {Oxford University Press},
address = {New York},
abstract = {In this chapter, we will revive an old argument that theories of human emotion can give insight into the design and control of complex cognitive systems. In particular, we claim that appraisal theories of emotion provide essential insight into the influences of emotion over cognition and can help translate such findings into concrete guidance for the design of cognitive systems. Ap- praisal theory claims that emotion plays a central and functional role in sensing external events, characterizing them as opportunity or threats and recruiting the cognitive, physical and social resources needed to adaptively respond. Further, because it argues for a close association be- tween emotion and cognition, the theoretical claims of appraisal theory can be recast as a re- quirement specification for how to build a cognitive system. This specification asserts a set of judgments that must be supported in order to correctly interpret and respond to stimuli and pro- vides a unifying framework for integrating these judgments into a coherent physical or social re- sponse. This chapter elaborates argument in some detail based on our joint experience in build- ing complex cognitive systems and computational models of emotion.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Martinovski, Bilyana; Traum, David; Marsella, Stacy C.
Rejection of empathy in negotiation Journal Article
In: Group Decision and Negotiation, vol. 16, pp. 61–76, 2007, ISSN: 0926-2644.
@article{martinovski_rejection_2007,
title = {Rejection of empathy in negotiation},
author = {Bilyana Martinovski and David Traum and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Rejection%20of%20empathy%20in%20negotiation.pdf},
issn = {0926-2644},
year = {2007},
date = {2007-01-01},
journal = {Group Decision and Negotiation},
volume = {16},
pages = {61–76},
abstract = {Trust is a crucial quality in the development of individuals and societies and empathy plays a key role in the formation of trust. Trust and empathy have growing importance in studies of negotiation. However, empathy can be rejected which complicates its role in negotiation. This paper presents a linguistic analysis of empathy by focusing on rejection of empathy in negotiation. Some of the rejections are due to failed recognition of the rejector's needs and desires whereas others have mainly strategic functions gaining momentum in the negotiation. In both cases, rejection of empathy is a phase in the negotiation not a breakdown.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Jan, Dusan; Herrera, David; Martinovski, Bilyana; Novick, David; Traum, David
A Computational Model of Culture-Specific Conversational Behavior Proceedings Article
In: Proceedings of the Intelligent Virtual Humans Conference, Paris, France, 2007.
@inproceedings{jan_computational_2007,
title = {A Computational Model of Culture-Specific Conversational Behavior},
author = {Dusan Jan and David Herrera and Bilyana Martinovski and David Novick and David Traum},
url = {http://ict.usc.edu/pubs/A%20Computational%20Model%20of%20Culture-Specific%20Conversational%20Behavior.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Proceedings of the Intelligent Virtual Humans Conference},
address = {Paris, France},
abstract = {This paper presents a model for simulating cultural differences in the conversational behavior of virtual agents. The model provides parameters for differences in proxemics, gaze and overlap in turn taking.We present a review of literature on these factors and show results of a study where native speakers of North American English, Mexican Spanish and Arabic were asked to rate the realism of the simulations generated based on different cultural parameters with respect to their culture.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lee, Jina; Marsella, Stacy C.; Traum, David; Gratch, Jonathan; Lance, Brent
The Rickel Gaze Model: A Window on the Mind of a Virtual Human Proceedings Article
In: Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA), pp. 296–303, Paris, France, 2007.
@inproceedings{lee_rickel_2007,
title = {The Rickel Gaze Model: A Window on the Mind of a Virtual Human},
author = {Jina Lee and Stacy C. Marsella and David Traum and Jonathan Gratch and Brent Lance},
url = {http://ict.usc.edu/pubs/The%20Rickel%20Gaze%20Model-%20A%20Window%20on%20the%20Mind%20of%20a%20Virtual%20Human.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA)},
volume = {4722},
pages = {296–303},
address = {Paris, France},
abstract = {Gaze plays a large number of cognitive, communicative and affective roles in face-to-face human interaction. To build a believable virtual human, it is imperative to construct a gaze model that generates realistic gaze behaviors. However, it is not enough to merely imitate a person's eye movements. The gaze behaviors should reflect the internal states of the virtual human and users should be able to derive them by observing the behaviors. In this paper, we present a gaze model driven by the cognitive operations; the model processes the virtual human's reasoning, dialog management, and goals to generate behaviors that reflect the agent's inner thoughts. It has been implemented in our virtual human system and operates in real-time. The gaze model introduced in this paper was originally designed and developed by Jeff Rickel but has since been extended by the authors.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Kenny, Patrick G.; Parsons, Thomas D.; Gratch, Jonathan; Leuski, Anton; Rizzo, Albert
Virtual Patients for Clinical Therapist Skills Training Proceedings Article
In: Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA), pp. 197–210, Paris, France, 2007.
@inproceedings{kenny_virtual_2007-1,
title = {Virtual Patients for Clinical Therapist Skills Training},
author = {Patrick G. Kenny and Thomas D. Parsons and Jonathan Gratch and Anton Leuski and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Virtual%20Patients%20for%20Clinical%20Therapist%20Skills%20Training.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA)},
volume = {4722},
pages = {197–210},
address = {Paris, France},
abstract = {Virtual humans offer an exciting and powerful potential for rich interactive experiences. Fully embodied virtual humans are growing in capability, ease, and utility. As a result, they present an opportunity for expanding research into burgeoning virtual patient medical applications. In this paper we consider the ways in which one may go about building and applying virtual human technology to the virtual patient domain. Specifically we aim to show that virtual human technology may be used to help develop the interviewing and diagnostics skills of developing clinicians. Herein we proffer a description of our iterative design process and preliminary results to show that virtual patients may be a useful adjunct to psychotherapy education.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Melo, Celso M.; Gratch, Jonathan
Evolving Expression of Emotions through Color in Virtual Humans using Genetic Algorithms Proceedings Article
In: Proceedings of the 1st International Conference on Computational Creativity (ICCC-X), pp. 248–257, 2007, ISBN: 978-989-96001-2-6.
@inproceedings{de_melo_evolving_2007,
title = {Evolving Expression of Emotions through Color in Virtual Humans using Genetic Algorithms},
author = {Celso M. Melo and Jonathan Gratch},
url = {http://ict.usc.edu/pubs/Evolving%20Expression%20of%20Emotions%20through%20Color%20in%20Virtual%20Humans%20using%20Genetic%20Algorithms.pdf},
isbn = {978-989-96001-2-6},
year = {2007},
date = {2007-01-01},
booktitle = {Proceedings of the 1st International Conference on Computational Creativity (ICCC-X)},
pages = {248–257},
abstract = {For centuries artists have been exploring the formal elements of art (lines, space, mass, light, color, sound, etc.) to express emotions. This paper takes this insight to explore new forms of expression for virtual humans which go beyond the usual bodily, facial and vocal expression channels. In particular, the paper focuses on how to use color to influence the perception of emotions in virtual humans. First, a lighting model and filters are used to manipulate color. Next, an evolutionary model, based on genetic algorithms, is developed to learn novel associations between emotions and color. An experiment is then conducted where non-experts evolve mappings for joy and sadness, without being aware that genetic algorithms are used. In a second experiment, the mappings are analyzed with respect to its features and how general they are. Results indicate that the average fitness increases with each new generation, thus suggesting that people are succeeding in creating novel and useful mappings for the emotions. Moreover, the results show consistent differences between the evolved images of joy and the evolved images of sadness.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Wang, Ning; Okhmatovskaia, Anna; Lamothe, Francois; Morales, Mathieu; Werf, R. J.; Morency, Louis-Philippe
Can virtual humans be more engaging than real ones? Proceedings Article
In: Proceedings of the International Conference on Human-Computer Interaction, HCI Intelligent Multimodal Interaction Environments, pp. 286–297, Beijing, China, 2007.
@inproceedings{gratch_can_2007,
title = {Can virtual humans be more engaging than real ones?},
author = {Jonathan Gratch and Ning Wang and Anna Okhmatovskaia and Francois Lamothe and Mathieu Morales and R. J. Werf and Louis-Philippe Morency},
url = {http://ict.usc.edu/pubs/Can%20virtual%20humans%20be%20more%20engaging%20than%20real%20ones.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Proceedings of the International Conference on Human-Computer Interaction, HCI Intelligent Multimodal Interaction Environments},
pages = {286–297},
address = {Beijing, China},
abstract = {Emotional bonds don't arise from a simple exchange of facial displays, but often emerge through the dynamic give and take of face-to-face interactions. This article explores the phenomenon of rapport, a feeling of connectedness that seems to arise from rapid and contingent positive feedback between partners and is often associated with socio-emotional processes. Rapport has been argued to lead to communicative efficiency, better learning outcomes, improved acceptance of medical advice and successful negotiations. We provide experimental evidence that a simple virtual character that provides positive listening feedback can induce stronger rapport-like effects than face-to-face communication between human partners. Specifically, this interaction can be more engaging to storytellers than speaking to a human audience, as measured by the length and content of their stories.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Rizzo, Albert; Graap, Ken; McLay, Robert N.; Perlman, Karen; Rothbaum, Barbara O.; Reger, Greg; Parsons, Thomas D.; Difede, JoAnn; Pair, Jarrell
Virtual Iraq: Initial Case Reports from a VR Exposure Therapy Application for Combat-Related Post Traumatic Stress Disorder Journal Article
In: Virtual Rehabilitation, vol. 27, pp. 124–130, 2007.
@article{rizzo_virtual_2007,
title = {Virtual Iraq: Initial Case Reports from a VR Exposure Therapy Application for Combat-Related Post Traumatic Stress Disorder},
author = {Albert Rizzo and Ken Graap and Robert N. McLay and Karen Perlman and Barbara O. Rothbaum and Greg Reger and Thomas D. Parsons and JoAnn Difede and Jarrell Pair},
url = {http://ict.usc.edu/pubs/Virtual%20Iraq-%20Initial%20Case%20Reports%20from%20a%20VR%20Exposure%20Therapy%20Application%20for%20Combat-Related%20Post%20Traumatic%20Stress%20Disorder.pdf},
year = {2007},
date = {2007-01-01},
journal = {Virtual Rehabilitation},
volume = {27},
pages = {124–130},
abstract = {Post Traumatic Stress Disorder (PTSD) is reported to be caused by traumatic events that are outside the range of usual human experience including (but not limited to) military combat, violent personal assault, being kidnapped or taken hostage and terrorist attacks. Initial data suggests that at least 1 out of 6 Iraq War veterans are exhibiting symptoms of depression, anxiety and PTSD. Virtual Reality (VR) delivered exposure therapy for PTSD has been used with reports of positive outcomes. The aim of the current paper is to present the rationale and brief description of a Virtual Iraq PTSD VR therapy application and present initial findings from two successfully treated patients. The VR treatment environment was created via the recycling of virtual graphic assets that were initially built for the U.S. Army-funded combat tactical simulation scenario and commercially successful X-Box game, Full Spectrum Warrior, in addition to other available and newly created assets. Thus far, Virtual Iraq consists of a series of customizable virtual scenarios designed to represent relevant Middle Eastern VR contexts for exposure therapy, including a city and desert road convoy environment. User-centered design feedback needed to iteratively evolve the system was gathered from returning Iraq War veterans in the USA and from a system deployed in Iraq and tested by an Army Combat Stress Control Team. Clinical trials are currently underway at Camp Pendleton and at the San Diego Naval Medical Center and the results from two successfully treated patients are presented along with a delineation of our future plans for research and clinical care using this application.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gratch, Jonathan; Wang, Ning; Gerten, Jillian; Fast, Edward; Duffy, Robin
Creating Rapport with Virtual Agents Proceedings Article
In: Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA), pp. 125–128, Paris, France, 2007.
@inproceedings{gratch_creating_2007,
title = {Creating Rapport with Virtual Agents},
author = {Jonathan Gratch and Ning Wang and Jillian Gerten and Edward Fast and Robin Duffy},
url = {http://ict.usc.edu/pubs/Creating%20Rapport%20with%20Virtual%20Agents.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA)},
volume = {4722},
pages = {125–128},
address = {Paris, France},
abstract = {Recent research has established the potential for virtual characters to establish rapport with humans through simple contingent nonverbal behaviors. We hypothesized that the contingency, not just the frequency of positive feedback is crucial when it comes to creating rapport. The primary goal in this study was evaluative: can an agent generate behavior that engenders feelings of rapport in human speakers and how does this compare to human generated feedback? A secondary goal was to answer the question: Is contingency (as opposed to frequency) of agent feedback crucial when it comes to creating feelings of rapport? Results suggest that contingency matters when it comes to creating rapport and that agent generated behavior was as good as human listeners in creating rapport. A "virtual human listener" condition performed worse than other conditions.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Filter
Sorry, no publications matched your criteria.