Publications
Search
Wang, Ning; Pynadath, David V.; Hill, Susan G.
Trust Calibration within a Human-Robot Team: Comparing Automatically Generated Explanations Proceedings Article
In: 2016 11th ACM/IEEE International Conference on Human-Robot Interaction (HRI), pp. 109–116, IEEE, New Zealand, 2016.
@inproceedings{wang_trust_2016,
title = {Trust Calibration within a Human-Robot Team: Comparing Automatically Generated Explanations},
author = {Ning Wang and David V. Pynadath and Susan G. Hill},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7451741},
doi = {10.1109/HRI.2016.7451741},
year = {2016},
date = {2016-03-01},
booktitle = {2016 11th ACM/IEEE International Conference on Human-Robot Interaction (HRI)},
pages = {109–116},
publisher = {IEEE},
address = {New Zealand},
abstract = {Trust is a critical factor for achieving the full potential of human-robot teams. Researchers have theorized that people will more accurately trust an autonomous system, such as a robot, if they have a more accurate understanding of its decision-making process. Studies have shown that hand-crafted explanations can help maintain trust when the system is less than 100% reliable. In this work, we leverage existing agent algorithms to provide a domain-independent mechanism for robots to automatically generate such explanations. To measure the explanation mechanism's impact on trust, we collected self-reported survey data and behavioral data in an agent-based online testbed that simulates a human-robot team task. The results demonstrate that the added explanation capability led to improvement in transparency, trust, and team performance. Furthermore, by observing the different outcomes due to variations in the robot's explanation content, we gain valuable insight that can help lead to refinement of explanation algorithms to further improve human-robot trust calibration.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ning; Pynadath, David V.; Hill, Susan G.
Building Trust in a Human-Robot Team with Automatically Generated Explanations Proceedings Article
In: Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015, National Training and Simulation Association, Orlando, Florida, 2015.
@inproceedings{wang_building_2015,
title = {Building Trust in a Human-Robot Team with Automatically Generated Explanations},
author = {Ning Wang and David V. Pynadath and Susan G. Hill},
url = {http://www.iitsecdocs.com/search},
year = {2015},
date = {2015-12-01},
booktitle = {Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015},
publisher = {National Training and Simulation Association},
address = {Orlando, Florida},
abstract = {Technological advances offer the promise of robotic systems that work with people to form human-robot teams that are more capable than their individual members. Unfortunately, the increasing capability of such autonomous systems has often failed to increase the capability of the human-robot team. Studies have identified many causes underlying these failures, but one critical aspect of a successful human-machine interaction is trust. When robots are more suited than humans for a certain task, we want the humans to trust the robots to perform that task. When the robots are less suited, we want the humans to appropriately gauge the robots’ ability and have people perform the task manually. Failure to do so results in disuse of robots in the former case and misuse in the latter. Real-world case studies and laboratory experiments show that failures in both cases are common. Researchers have theorized that people will more accurately trust an autonomous system, such as a robot, if they have a more accurate understanding of its decision-making process. Studies show that explanations offered by an automated system can help maintain trust with the humans in case the system makes an error, indicating that the robot’s communication transparency can be an important factor in earning an appropriate level of trust. To study how robots can communicate their decisionmaking process to humans, we have designed an agent-based online test-bed that supports virtual simulation of domain-independent human-robot interaction. In the simulation, humans work together with virtual robots as a team. The test-bed allows researchers to conduct online human-subject studies and gain better understanding of how robot communication can improve human-robot team performance by fostering better trust relationships between humans and their robot teammates. In this paper, we describe the details of our design, and illustrate its operation with an example human-robot team reconnaissance task.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pynadath, David V.; Wang, Ning; Merchant, Chirag
Toward Acquiring a Human Behavior Model of Competition vs. Cooperation Proceedings Article
In: Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015, National Training and Simulation Association, Orlando, Florida, 2015.
@inproceedings{pynadath_toward_2015,
title = {Toward Acquiring a Human Behavior Model of Competition vs. Cooperation},
author = {David V. Pynadath and Ning Wang and Chirag Merchant},
url = {http://www.iitsecdocs.com/search},
year = {2015},
date = {2015-12-01},
booktitle = {Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015},
publisher = {National Training and Simulation Association},
address = {Orlando, Florida},
abstract = {One of the challenges in modeling human behavior is accurately capturing the conditions under which people will behave selfishly or selflessly. Researchers have been unable to craft purely cooperative (or competitive) scenarios without significant numbers of subjects displaying unintended selfish (or selfless) behavior (e.g., Rapoport & Chammah, 1965). In this work, rather than try to further isolate competitive vs. cooperative behavior, we instead construct an experimental setting that deliberately includes both, in a way that fits within an operational simulation model. Using PsychSim, a multiagent social simulation framework with both Theory of Mind and decision theory, we have implemented an online resource allocation game called “Team of Rivals”, where four players seek to defeat a common enemy. The players have individual pools of resources which they can allocate toward that common goal. In addition to their progress toward this common goal, the players also receive individual feedback, in terms of the number of resources they own and have won from the enemy. By giving the players both an explicit cooperative goal and implicit feedback on potential competitive goals, we give them room to behave anywhere on the spectrum between these two extremes. Furthermore, by moving away from the more common two-player laboratory settings (e.g., Prisoner’s Dilemma), we can observe differential behavior across the richer space of possible interpersonal relationships. We discuss the design of the game that allows us to observe and analyze these relationships from human behavior data acquired through this game. We then describe decision-theoretic agents that can simulate hypothesized variations on human behavior. Finally, we present results of a preliminary playtest of the testbed and discuss the gathered data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Alfonso, Bexy; Pynadath, David V.; Lhommet, Margot; Marsella, Stacy
Emotional Perception for Updating Agents’ Beliefs Proceedings Article
In: Proceedings of ACII 2015, IEEE, Xi'an, China, 2015.
@inproceedings{alfonso_emotional_2015,
title = {Emotional Perception for Updating Agents’ Beliefs},
author = {Bexy Alfonso and David V. Pynadath and Margot Lhommet and Stacy Marsella},
url = {http://ict.usc.edu/pubs/Emotional%20Perception%20for%20Updating%20Agents%e2%80%99%20Beliefs.pdf},
year = {2015},
date = {2015-09-01},
booktitle = {Proceedings of ACII 2015},
publisher = {IEEE},
address = {Xi'an, China},
abstract = {The relative influence of perception and situation in emotional judgments has been extensively debated in psychology. A main issue in this debate concerns how these sources of information are integrated. This work proposes a method able to make probabilistic predictions of appraisals of other agents, using mental models of those agents. From these appraisal predictions, predictions about another agent’s expressions are made, integrated with observations of the other agent’s ambiguous emotional expressions using Bayesian techniques, resulting in updates to the agent’s mental models. Our method is inspired by psychological work on human interpretation of emotional expressions. We demonstrate how these appraisals of others’ emotions and observations of their expressions can be an integral part of an agent capable of Theory of Mind reasoning.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Cheng, Lin; Marsella, Stacy
The Appraisal Equivalence Hypothesis: Verifying the domain-independence of a computational model of emotion dynamics Proceedings Article
In: Proceedings of ACII 2015, IEEE, Xi'an, China, 2015.
@inproceedings{gratch_appraisal_2015,
title = {The Appraisal Equivalence Hypothesis: Verifying the domain-independence of a computational model of emotion dynamics},
author = {Jonathan Gratch and Lin Cheng and Stacy Marsella},
url = {http://ict.usc.edu/pubs/The%20Appraisal%20Equivalence%20Hypothesis-Verifying%20the%20domain-independence%20of%20a%20computational%20model%20of%20emotion%20dynamics.pdf},
year = {2015},
date = {2015-09-01},
booktitle = {Proceedings of ACII 2015},
publisher = {IEEE},
address = {Xi'an, China},
abstract = {Appraisal theory is the most influential theory within affective computing, and serves as the basis for several computational models of emotion. The theory makes strong claims of domain-independence: seemingly different situations, both within and across domains are claimed to produce the identical emotional responses if and only if they are appraised the same way. This article tests this claim, and the predictions of a computational model that embodies it, in two very different interactive games. The results extend prior empirical evidence for appraisal theory to situations where emotions unfold and change over time.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Stratou, Giota; Morency, Louis-Philippe; DeVault, David; Hartholt, Arno; Fast, Edward; Lhommet, Margaux; Lucas, Gale; Morbini, Fabrizio; Georgila, Kallirroi; Scherer, Stefan; Gratch, Jonathan; Stacy, Marcella; Traum, David; Rizzo, Albert
A Demonstration of the Perception System in SimSensei, a Virtual Human Application for Healthcare Interviews Proceedings Article
In: Affective Computing and Intelligent Interaction (ACII), 2015 International Conference on, pp. 787–789, IEEE, Xi'an, China, 2015.
@inproceedings{stratou_demonstration_2015,
title = {A Demonstration of the Perception System in SimSensei, a Virtual Human Application for Healthcare Interviews},
author = {Giota Stratou and Louis-Philippe Morency and David DeVault and Arno Hartholt and Edward Fast and Margaux Lhommet and Gale Lucas and Fabrizio Morbini and Kallirroi Georgila and Stefan Scherer and Jonathan Gratch and Marcella Stacy and David Traum and Albert Rizzo},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7344661},
doi = {10.1109/ACII.2015.7344661},
year = {2015},
date = {2015-09-01},
booktitle = {Affective Computing and Intelligent Interaction (ACII), 2015 International Conference on},
pages = {787–789},
publisher = {IEEE},
address = {Xi'an, China},
abstract = {We present the SimSensei system, a fully automatic virtual agent that conducts interviews to assess indicators of psychological distress. With this demo, we focus our attention on the perception part of the system, a multimodal framework which captures and analyzes user state behavior for both behavioral understanding and interactional purposes. We will demonstrate real-time user state sensing as a part of the SimSensei architecture and discuss how this technology enabled automatic analysis of behaviors related to psychological distress.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Feng, Andrew; Leuski, Anton; Marsella, Stacy; Casas, Dan; Kang, Sin-Hwa; Shapiro, Ari
A Platform for Building Mobile Virtual Humans Proceedings Article
In: Proceedings of the 15th International Conference on Intelligent Virtual Agents (IVA), pp. 310–319, Springer, Delft, Netherlands, 2015.
@inproceedings{feng_platform_2015,
title = {A Platform for Building Mobile Virtual Humans},
author = {Andrew Feng and Anton Leuski and Stacy Marsella and Dan Casas and Sin-Hwa Kang and Ari Shapiro},
url = {http://ict.usc.edu/pubs/A%20Platform%20for%20Building%20Mobile%20Virtual%20Humans.pdf},
doi = {10.1007/978-3-319-21996-7},
year = {2015},
date = {2015-08-01},
booktitle = {Proceedings of the 15th International Conference on Intelligent Virtual Agents (IVA)},
pages = {310--319},
publisher = {Springer},
address = {Delft, Netherlands},
abstract = {We describe an authoring framework for developing virtual humans on mobile applications. The framework abstracts many elements needed for virtual human generation and interaction, such as the rapid development of nonverbal behavior, lip syncing to speech, dialogue management, access to speech transcription services, and access to mobile sensors such as the microphone, gyroscope and location components.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lucas, Gale M.; Gratch, Jonathan; Cheng, Lin; Marsella, Stacy
When the going gets tough: Grit predicts costly perseverance Journal Article
In: Journal of Research in Personality, vol. 59, pp. 15–22, 2015, ISSN: 00926566.
@article{lucas_when_2015,
title = {When the going gets tough: Grit predicts costly perseverance},
author = {Gale M. Lucas and Jonathan Gratch and Lin Cheng and Stacy Marsella},
url = {http://ict.usc.edu/pubs/When%20the%20going%20gets%20tough-Grit%20predicts%20costly%20perseverance.pdf},
doi = {10.1016/j.jrp.2015.08.004},
issn = {00926566},
year = {2015},
date = {2015-08-01},
journal = {Journal of Research in Personality},
volume = {59},
pages = {15–22},
abstract = {In this research, we investigate how grittier individuals might incur some costs by persisting when they could move on. Grittier participants were found to be less willing to give up when failing even though they were likely to incur a cost for their persistence. First, grittier participants are more willing to risk failing to complete a task by persisting on individual items. Second, when they are losing, they expend more effort and persist longer in a game rather than quit. Gritty participants have more positive emotions and expectations toward the task, which mediates the relationship between grit and staying to persist when they are losing. Results show gritty individuals are more willing to risk suffering monetary loss to persist.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kang, Sin-Hwa; Feng, Andrew; Leuski, Anton; Casas, Dan; Shapiro, Ari
Smart Mobile Virtual Humans: “Chat with Me!” Proceedings Article
In: Proceedings of the 15th International Conference on Intelligent Virtual Agents (IVA), pp. 475–478, Springer, Delft, Netherlands, 2015.
@inproceedings{kang_smart_2015,
title = {Smart Mobile Virtual Humans: “Chat with Me!”},
author = {Sin-Hwa Kang and Andrew Feng and Anton Leuski and Dan Casas and Ari Shapiro},
url = {http://ict.usc.edu/pubs/Smart%20Mobile%20Virtual%20Humans%20-%20Chat%20with%20Me.pdf},
doi = {10.1007/978-3-319-21996-7},
year = {2015},
date = {2015-08-01},
booktitle = {Proceedings of the 15th International Conference on Intelligent Virtual Agents (IVA)},
pages = {475–478},
publisher = {Springer},
address = {Delft, Netherlands},
abstract = {In this study, we are interested in exploring whether people would talk with 3D animated virtual humans using a smartphone for a longer amount of time as a sign of feeling rapport [5], compared to non-animated or audio-only characters in everyday life. Based on previous studies [2, 7, 10], users prefer animated characters in emotionally engaged interactions when the characters were displayed on mobile devices, yet in a lab setting. We aimed to reach a broad range of users outside of the lab in natural settings to investigate the potential of our virtual human on smartphones to facilitate casual, yet emotionally engaging conversation. We also found that the literature has not reached a consensus regarding the ideal gaze patterns for a virtual human, one thing researchers agree on is that inappropriate gaze could negatively impact conversations at times, even worse than receiving no visual feedback at all [1, 4]. Everyday life may bring the experience of awkwardness or uncomfortable sentiments in reaction to continuous mutual gaze. On the other hand, gaze aversion could also make a speaker think their partner is not listening. Our work further aims to address this question of what constitutes appropriate eye gaze in emotionally engaged interactions. We developed a 3D animated and chat-based virtual human which presented emotionally expressive nonverbal behaviors such as facial expressions, head gestures, gaze, and other upper body movements (see Figure 1). The virtual human displayed appropriate gaze that was either consisted of constant mutual gaze or gaze aversion based on a statistical model of saccadic eye movement [8] while listening. Both gaze patterns were accompanied by other forms of appropriate nonverbal feedback. To explore the question of optimal communicative medium, we distributed our virtual human application to users via an app store for Android-powered phones (i.e. Google Play Store) in order to target users who owned a smartphone and could use our application in various natural settings.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ning; Pynadath, David V.; Unnikrishnan, K. V.; Shankar, Santosh; Merchant, Chirag
Intelligent Agents for Virtual Simulation of Human-Robot Interaction Proceedings Article
In: Virtual, Augmented and Mixed Reality, pp. 228 – 239, Springer International Publishing, Los Angeles, CA, 2015, ISBN: 978-3-319-21066-7 978-3-319-21067-4.
@inproceedings{wang_intelligent_2015,
title = {Intelligent Agents for Virtual Simulation of Human-Robot Interaction},
author = {Ning Wang and David V. Pynadath and K. V. Unnikrishnan and Santosh Shankar and Chirag Merchant},
url = {http://ict.usc.edu/pubs/Intelligent%20Agents%20for%20Virtual%20Simulation%20of%20Human-Robot%20Interaction.pdf},
doi = {10.1007/978-3-319-21067-4 24},
isbn = {978-3-319-21066-7 978-3-319-21067-4},
year = {2015},
date = {2015-08-01},
booktitle = {Virtual, Augmented and Mixed Reality},
volume = {9179},
pages = {228 – 239},
publisher = {Springer International Publishing},
address = {Los Angeles, CA},
series = {Lecture Notes in Computer Science},
abstract = {To study how robots can work better with humans as a team, we have designed an agent-based online testbed that supports virtual simulation of domain-independent human-robot interaction. The simulation is implemented as an online game where humans and virtual robots work together in simulated scenarios. This testbed allows researchers to carry out human-robot interaction studies and gain better understanding of, for example, how a robot’s communication can improve human-robot team performance by fostering better trust relationships among humans and their robot teammates. In this paper, we discuss the requirements, challenges and the design of such human-robot simulation. We illustrate its operation with an example human-robot joint reconnaissance task.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ning; Pynadath, David V.; Marsella, Stacy C.
Subjective Perceptions in Wartime Negotiation Journal Article
In: IEEE Transactions on Affective Computing, vol. 6, no. 2, pp. 118–126, 2015, ISSN: 1949-3045.
@article{wang_subjective_2015,
title = {Subjective Perceptions in Wartime Negotiation},
author = {Ning Wang and David V. Pynadath and Stacy C. Marsella},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6975149},
doi = {10.1109/TAFFC.2014.2378312},
issn = {1949-3045},
year = {2015},
date = {2015-04-01},
journal = {IEEE Transactions on Affective Computing},
volume = {6},
number = {2},
pages = {118–126},
abstract = {The prevalence of negotiation in social interaction has motivated researchers to develop virtual agents that can understand, facilitate, teach and even carry out negotiations. While much of this research has analyzed how to maximize the objective outcome, there is a growing body of work demonstrating that subjective perceptions of the outcome also play a critical role in human negotiation behavior. People derive subjective value from not only the outcome, but also from the process by which they achieve that outcome, from their relationship with their negotiation partner, etc. The affective responses evoked by these subjective valuations can be very different from what would be evoked by the objective outcome alone. We investigate such subjective valuations within human-agent negotiation in four variations of a wartime negotiation game. We observe that the objective outcomes of these negotiations are not strongly correlated with the human negotiators’ subjective perceptions, as measured by the Subjective Value Index. We examine the game dynamics and agent behaviors to identify features that induce different subjective values in the participants. We thus are able to identify characteristics of the negotiation process and the agents’ behavior that most impact people’s subjective valuations in our wartime negotiation games.⬚},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Scherer, Stefan; DeVault, David; Gratch, Jonathan; Artstein, Ron; Hartholt, Arno; Lucas, Gale; Marsella, Stacy; Morbini, Fabrizio; Nazarian, Angela; Stratou, Giota; Traum, David; Wood, Rachel; Boberg, Jill; Morency, Louis-Philippe
Detection and Computational Analysis of Psychological Signals Using a Virtual Human Interviewing Agent Proceedings Article
In: Proceedings of ICDVRAT 2014, International Journal of Disability and Human Development, Gothenburg, Sweden, 2014.
@inproceedings{rizzo_detection_2014,
title = {Detection and Computational Analysis of Psychological Signals Using a Virtual Human Interviewing Agent},
author = {Albert Rizzo and Stefan Scherer and David DeVault and Jonathan Gratch and Ron Artstein and Arno Hartholt and Gale Lucas and Stacy Marsella and Fabrizio Morbini and Angela Nazarian and Giota Stratou and David Traum and Rachel Wood and Jill Boberg and Louis-Philippe Morency},
url = {http://ict.usc.edu/pubs/Detection%20and%20Computational%20Analysis%20of%20Psychological%20Signals%20Using%20a%20Virtual%20Human%20Interviewing%20Agent.pdf},
year = {2014},
date = {2014-12-01},
booktitle = {Proceedings of ICDVRAT 2014},
publisher = {International Journal of Disability and Human Development},
address = {Gothenburg, Sweden},
abstract = {It has long been recognized that facial expressions, body posture/gestures and vocal parameters play an important role in human communication and the implicit signalling of emotion. Recent advances in low cost computer vision and behavioral sensing technologies can now be applied to the process of making meaningful inferences as to user state when a person interacts with a computational device. Effective use of this additive information could serve to promote human interaction with virtual human (VH) agents that may enhance diagnostic assessment. This paper will focus on our current research in these areas within the DARPA-funded “Detection and Computational Analysis of Psychological Signals” project, with specific attention to the SimSensei application use case. SimSensei is a virtual human interaction platform that is able to sense and interpret real-time audiovisual behavioral signals from users interacting with the system. It is specifically designed for health care support and leverages years of virtual human research and development at USC-ICT. The platform enables an engaging face-to-face interaction where the virtual human automatically reacts to the state and inferred intent of the user through analysis of behavioral signals gleaned from facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human to human interaction and communication, SimSensei aims to capture and infer from user non-verbal communication to improve engagement between a VH and a user. The system can also quantify and interpret sensed behavioral signals.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Feng, Andrew; Lucas, Gale; Marsella, Stacy; Suma, Evan; Chiu, Chung-Cheng; Casas, Dan; Shapiro, Ari
Acting the Part: The Role of Gesture on Avatar Identity Proceedings Article
In: Proceedings of the Seventh International Conference on Motion in Games (MIG 2014), pp. 49–54, ACM Press, Playa Vista, CA, 2014, ISBN: 978-1-4503-2623-0.
@inproceedings{feng_acting_2014,
title = {Acting the Part: The Role of Gesture on Avatar Identity},
author = {Andrew Feng and Gale Lucas and Stacy Marsella and Evan Suma and Chung-Cheng Chiu and Dan Casas and Ari Shapiro},
url = {http://dl.acm.org/citation.cfm?doid=2668064.2668102},
doi = {10.1145/2668064.2668102},
isbn = {978-1-4503-2623-0},
year = {2014},
date = {2014-11-01},
booktitle = {Proceedings of the Seventh International Conference on Motion in Games (MIG 2014)},
pages = {49–54},
publisher = {ACM Press},
address = {Playa Vista, CA},
abstract = {Recent advances in scanning technology have enabled the widespread capture of 3D character models based on human subjects. However, in order to generate a recognizable 3D avatar, the movement and behavior of the human subject should be captured and replicated as well. We present a method of generating a 3D model from a scan, as well as a method to incorporate a subjects style of gesturing into a 3D character. We present a study which shows that 3D characters that used the gestural style as their original human subjects were more recognizable as the original subject than those that don’t.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Feng, Andrew; Shapiro, Ari; Lhommet, Margaux; Marsella, Stacy
Embodied Autonomous Agents Book Section
In: Handbook of Virtual Environments: Design, Implementation, and Applications, pp. 335–352, 2014.
@incollection{feng_embodied_2014,
title = {Embodied Autonomous Agents},
author = {Andrew Feng and Ari Shapiro and Margaux Lhommet and Stacy Marsella},
url = {http://books.google.com/books?hl=en&lr=&id=7zzSBQAAQBAJ&oi=fnd&pg=PP1&dq=+Handbook+of+Virtual+Environments&ots=Vx3ia0S2Uu&sig=LaVbSdoG3FahlbVYbuCxLmKgFIA#v=onepage&q=Handbook%20of%20Virtual%20Environments&f=false},
year = {2014},
date = {2014-09-01},
booktitle = {Handbook of Virtual Environments: Design, Implementation, and Applications},
pages = {335–352},
abstract = {Since the last decade, virtual environments have been extensively used for a wide range of application, from training systems to video games. Virtual humans are animated characters that are designed to populate these environments and to interact with the objects of the world as well as with the user. A virtual agent must perceive the world in which it exists, reason about those perceptions, and decide on how to act on them in pursuit of its own agenda.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Xu, Yuyu; Pelachaud, Catherine; Marsella, Stacy
Compound Gesture Generation: A Model Based on Ideational Units Proceedings Article
In: Intelligent Virtual Agents, pp. 477–491, Springer, Boston, MA, 2014.
@inproceedings{xu_compound_2014,
title = {Compound Gesture Generation: A Model Based on Ideational Units},
author = {Yuyu Xu and Catherine Pelachaud and Stacy Marsella},
url = {http://ict.usc.edu/pubs/Compound%20Gesture%20Generation%20-%20A%20Model%20Based%20on%20Ideational%20Units.pdf},
year = {2014},
date = {2014-08-01},
booktitle = {Intelligent Virtual Agents},
pages = {477–491},
publisher = {Springer},
address = {Boston, MA},
abstract = {This work presents a hierarchical framework that generates continuous gesture animation performance for virtual characters. As opposed to approaches that focus more on realizing individual gesture, the focus of this work is on the relation between gestures as part of an overall gesture performance. Following Calbris’ work [3], our approach is to structure the performance around ideational units and determine gestural features within and across these ideational units. Furthermore, we use Calbris’ work on the relation between form and meaning in gesture to help inform how individual gesture’s expressivity is manipulated. Our framework takes in high level communicative function descriptions, generates behavior descriptions and realizes them using our character animation engine. We define the specifications for these different levels of descriptions. Finally, we show the general results as well as experiments illustrating the impacts of the key features.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lhommet, Margot; Marsella, Stacy
Metaphoric Gestures: Towards Grounded Mental Spaces Proceedings Article
In: Intelligent Virtual Agents, pp. 264–274, Springer, Boston, MA, 2014.
@inproceedings{lhommet_metaphoric_2014,
title = {Metaphoric Gestures: Towards Grounded Mental Spaces},
author = {Margot Lhommet and Stacy Marsella},
url = {http://ict.usc.edu/pubs/Metaphoric%20Gestures%20-%20Towards%20Grounded%20Mental%20Spaces.pdf},
year = {2014},
date = {2014-08-01},
booktitle = {Intelligent Virtual Agents},
pages = {264–274},
publisher = {Springer},
address = {Boston, MA},
abstract = {Gestures are related to the mental states and unfolding processes of thought, reasoning and verbal language production. This is especially apparent in the case of metaphors and metaphoric gestures. For example, talking about the importance of an idea by calling it a big idea and gesturing to indicate that large size is a manifestation of the use of metaphors in language and gesture. We propose a computational model of the influence of conceptual metaphors on gestures that maps from mental state representations of ideas to their expression in concrete, physical metaphoric gestures. This model relies on conceptual primary metaphors to map the abstract elements of the mental space to concrete physical elements that can be conveyed with gestures.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pynadath, David V.; Rosenbloom, Paul S.; Marsella, Stacy C.
Reinforcement Learning for Adaptive Theory of Mind in the Sigma Cognitive Architecture Proceedings Article
In: Proceedings of the 7th Annual Conference on Artificial General Intelligence, pp. 143 – 154, Springer International Publishing, Quebec City, Canada, 2014, ISBN: 978-3-319-09273-7.
@inproceedings{pynadath_reinforcement_2014,
title = {Reinforcement Learning for Adaptive Theory of Mind in the Sigma Cognitive Architecture},
author = {David V. Pynadath and Paul S. Rosenbloom and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Reinforcement%20learning%20for%20adaptive%20Theory%20of%20Mind%20in%20the%20Sigma%20cognitive%20architecture.pdf},
doi = {10.1007/978-3-319-09274-4_14},
isbn = {978-3-319-09273-7},
year = {2014},
date = {2014-08-01},
booktitle = {Proceedings of the 7th Annual Conference on Artificial General Intelligence},
pages = {143 – 154},
publisher = {Springer International Publishing},
address = {Quebec City, Canada},
abstract = {One of the most common applications of human intelligence is social interaction, where people must make effective decisions despite uncertainty about the potential behavior of others around them. Reinforcement learning (RL) provides one method for agents to acquire knowledge about such interactions. We investigate different methods of multiagent reinforcement learning within the Sigma cognitive architecture. We leverage Sigma’s architectural mechanism for gradient descent to realize four different approaches to multiagent learning: (1) with no explicit model of the other agent, (2) with a model of the other agent as following an unknown stationary policy, (3) with prior knowledge of the other agent’s possible reward functions, and (4) through inverse reinforcement learn- ing (IRL) of the other agent’s reward function. While the first three variations re-create existing approaches from the literature, the fourth represents a novel combination of RL and IRL for social decision-making. We show how all four styles of adaptive Theory of Mind are realized through Sigma’s same gradient descent algorithm, and we illustrate their behavior within an abstract negotiation task.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
McAlinden, Ryan; Pynadath, David V.; Hill, Randall W.
UrbanSim: Using Social Simulation to Train for Stability Operations Book Section
In: Understanding Megacities with the Reconnaissance, Surveillance, and Intelligence Paradigm, 2014.
@incollection{mcalinden_urbansim_2014,
title = {UrbanSim: Using Social Simulation to Train for Stability Operations},
author = {Ryan McAlinden and David V. Pynadath and Randall W. Hill},
url = {http://ict.usc.edu/pubs/UrbanSim%20-%20Using%20Social%20Simulation%20to%20Train%20for%20Stability%20Operations.pdf},
year = {2014},
date = {2014-04-01},
booktitle = {Understanding Megacities with the Reconnaissance, Surveillance, and Intelligence Paradigm},
abstract = {As the United States reorients itself towards to a period of reduced military capacity and away from large‐footprint military engagements, there is an imperative to keep commanders and decision‐makers mentally sharp and prepared for the next ‘hot spot.’ One potential hot spot, megacities, presents a unique set of challenges due to their expansive, often interwoven ethnographic landscapes, and their overall lack of understanding by many western experts. Social simulation using agent‐based models is one approach for furthering our understanding of distant societies and their security implications, and for preparing leaders to engage these populations if and when the need arises. Over the past ten years, the field of social simulation has become decidedly cross‐discipline, including academics and practitioners from the fields of sociology, anthropology, psychology, artificial intelligence and engineering. This has led to an unparalleled advancement in social simulation theory and practice, and as new threats evolve to operate within dense but expansive urban environments, social simulation has a unique opportunity to shape our perspectives and develop knowledge that may otherwise be difficult to obtain. This article presents a social simulation‐based training application (UrbanSim) developed by the University of Southern California’s Institute for Creative Technologies (USC‐ICT) in partnership with the US Army’s School for Command Preparation (SCP). UrbanSim has been in‐use since 2009 to help Army commanders understand and train for missions in complex, uncertain environments. The discussion describes how the social simulation‐based training application was designed to develop and hone commanders' skills for conducting missions in environs with multifaceted social, ethnic and political fabrics. We present a few considerations when attempting to recreate dense, rapidly growing population centers, and how the integration of real‐world data into social simulation frameworks can add a level of realism and understanding not possible even a few years ago.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Sukthankar, Gita; Goldman, Robert P.; Geib, Christopher; Pynadath, David V.; Bui, Hung
Plan, Activity, and Intent Recognition: Theory and Practice Book
Morgan Kaufmann, 2014, ISBN: 0-12-398532-3.
@book{sukthankar_plan_2014,
title = {Plan, Activity, and Intent Recognition: Theory and Practice},
author = {Gita Sukthankar and Robert P. Goldman and Christopher Geib and David V. Pynadath and Hung Bui},
url = {http://www.amazon.com/Plan-Activity-Intent-Recognition-Practice/dp/0123985323/ref=sr_1_1?s=books&ie=UTF8&qid=1408747877&sr=1-1&keywords=Plan%2C+Activity%2C+and+Intent+Recognition%3A+Theory+and+Practice},
isbn = {0-12-398532-3},
year = {2014},
date = {2014-03-01},
publisher = {Morgan Kaufmann},
keywords = {},
pubstate = {published},
tppubtype = {book}
}
Hill, Randall W.
Virtual Reality and Leadership Development Book Section
In: Using Experience to Develop Leadership Talent: How Organizations Leverage On-The-Job Development, pp. 286–312, John Wiley & Sons, Inc., 2014, ISBN: 978-1-118-76783-2.
@incollection{hill_virtual_2014,
title = {Virtual Reality and Leadership Development},
author = {Randall W. Hill},
url = {http://www.amazon.com/dp/1118767837/ref=cm_sw_su_dp},
isbn = {978-1-118-76783-2},
year = {2014},
date = {2014-03-01},
booktitle = {Using Experience to Develop Leadership Talent: How Organizations Leverage On-The-Job Development},
pages = {286–312},
publisher = {John Wiley & Sons, Inc.},
series = {J-B SIOP Professional Practice Series (Book 1)},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Filter
2009
Thiebaux, Marcus; Lance, Brent; Marsella, Stacy C.
Real-Time Expressive Gaze Animation for Virtual Humans Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS), Budapest, Hungary, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{thiebaux_real-time_2009,
title = {Real-Time Expressive Gaze Animation for Virtual Humans},
author = {Marcus Thiebaux and Brent Lance and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Real-Time%20Expressive%20Gaze%20Animation%20for%20Virtual%20Humans.pdf},
year = {2009},
date = {2009-05-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS)},
address = {Budapest, Hungary},
abstract = {Gaze is an extremely important aspect of human face to face interaction. Over the course of an interaction, a single individual's gaze can perform many different functions, such as regulating communication, expressing emotion, and attending to task performance. When gaze shifts occur, where they are directed, and how they are performed all provide critical information to an observer of the gaze shift. The goal of this work is to allow virtual humans to mimic the gaze capabilities of humans in face to face interaction. This paper introduces the SmartBody Gaze Controller (SBGC), a highly versatile framework for realizing various manners of gaze through a rich set of input parameters. Using these parameters, the SBCG controls aspects of movement such as velocity, postural bias, and the selection of joints committed to a particular gaze task. We provide a preliminary implementation that demonstrates how related work on the Expressive Gaze Model (EGM) can be used to inform management of these input parameters. The EGM is a model for manipulating the style of gaze shifts for the purpose of expressing emotion [11]. The SBGC is fully compatible with all aspects of the SmartBody system [23].},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Ito, Jonathan Y.; Pynadath, David V.; Marsella, Stacy C.
Self-Deceptive Decision Making: Normative and Descriptive Insights Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS), Budapest, Hungary, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{ito_self-deceptive_2009,
title = {Self-Deceptive Decision Making: Normative and Descriptive Insights},
author = {Jonathan Y. Ito and David V. Pynadath and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Self-Deceptive%20Decision%20Making-%20Normative%20and%20Descriptive%20Insights.pdf},
year = {2009},
date = {2009-05-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS)},
address = {Budapest, Hungary},
abstract = {Computational modeling of human belief maintenance and decision-making processes has become increasingly impor- tant for a wide range of applications. We present a frame- work for modeling the psychological phenomenon of self- deception in a decision-theoretic framework. Specifically, we model the self-deceptive behavior of wishful thinking as a psychological bias towards the belief in a particularly de- sirable situation or state. By leveraging the structures and axioms of Expected Utility (EU) Theory we are able to op- erationalize both the determination and the application of the desired belief state with respect to the decision-making process of expected utility maximization. While we catego- rize our framework as a descriptive model of human decision making, we show that in certain circumstances the realized expected utility of an action biased by wishful thinking can exceed that of an action motivated purely by the maximiza- tion of perceived expected utility. Finally, we show that our framework of self-deception and wishful thinking has the de- scriptive flexibility to account for the inconsistencies high- lighted by the Common Ratio Effect and the Allais Para- dox.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Marsella, Stacy C.; Gratch, Jonathan
EMA: A process model of appraisal dynamics Journal Article
In: Journal of Cognitive Systems Research, vol. 10, no. 1, pp. 70–90, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{marsella_ema_2009,
title = {EMA: A process model of appraisal dynamics},
author = {Stacy C. Marsella and Jonathan Gratch},
url = {http://ict.usc.edu/pubs/EMA-%20A%20process%20model%20of%20appraisal%20dynamics.pdf},
year = {2009},
date = {2009-03-01},
journal = {Journal of Cognitive Systems Research},
volume = {10},
number = {1},
pages = {70–90},
abstract = {A computational model of emotion must explain both the rapid dynamics of some emotional reactions as well as the slower responses that follow deliberation. This is often addressed by positing multiple levels of appraisal processes such as fast pattern directed vs. slower deliberative appraisals. In our view, this confuses appraisal with inference. Rather, we argue for a single and automatic appraisal process that operates over a person's interpretation of their relationship to the environment. Dynamics arise from perceptual and inferential processes operating on this interpretation (including deliberative and reactive processes). This article discusses current developments in a computational model of emotion processes and illustrates how a single-level model of appraisal obviates a multi-level approach within the context of modeling a naturalistic emotional situation.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Gratch, Jonathan; Marsella, Stacy C.; Wang, Ning; Stankovic, Brooke
Assessing the validity of appraisal-based models of emotion Proceedings Article
In: Proceedings of the International Conference on Affective Computing and Intelligent Interaction (ACII), Amsterdam, The Netherlands, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_assessing_2009,
title = {Assessing the validity of appraisal-based models of emotion},
author = {Jonathan Gratch and Stacy C. Marsella and Ning Wang and Brooke Stankovic},
url = {http://ict.usc.edu/pubs/Assessing%20the%20validity%20of%20appraisal-based%20models%20of%20emotion.pdf},
year = {2009},
date = {2009-01-01},
booktitle = {Proceedings of the International Conference on Affective Computing and Intelligent Interaction (ACII)},
address = {Amsterdam, The Netherlands},
abstract = {We describe an empirical study comparing the accuracy of competing computational models of emotion in predicting human emotional responses in naturalistic emotion-eliciting situations. The results find clear differences in modelss ability to forecast human emotional responses, and provide guidance on how to develop more accurate models of human emotion.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Hartholt, Arno; Gratch, Jonathan; Leuski, Anton; Morency, Louis-Philippe; Marsella, Stacy C.; Liewer, Matt; Doraiswamy, Prathibha; Weiss, Lori; LeMasters, Kim; Fast, Edward; Sadek, Ramy; Marshall, Andrew; Lee, Jina; Thiebaux, Marcus; Tsiartas, Andreas
At the Virtual Frontier: Introducing Gunslinger, a Multi- Character, Mixed-Reality, Story-Driven Experience Proceedings Article
In: Proceedings of the 9th International Conference on Intelligent Virtual Agents (IVA), Amsterdam, The Netherlands, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{hartholt_at_2009,
title = {At the Virtual Frontier: Introducing Gunslinger, a Multi- Character, Mixed-Reality, Story-Driven Experience},
author = {Arno Hartholt and Jonathan Gratch and Anton Leuski and Louis-Philippe Morency and Stacy C. Marsella and Matt Liewer and Prathibha Doraiswamy and Lori Weiss and Kim LeMasters and Edward Fast and Ramy Sadek and Andrew Marshall and Jina Lee and Marcus Thiebaux and Andreas Tsiartas},
url = {http://ict.usc.edu/pubs/At%20the%20Virtual%20Frontier-%20Introducing%20Gunslinger%20a%20Multi-%20Character%20Mixed-Reality%20Story-Driven%20Experience.pdf},
year = {2009},
date = {2009-01-01},
booktitle = {Proceedings of the 9th International Conference on Intelligent Virtual Agents (IVA)},
address = {Amsterdam, The Netherlands},
abstract = {We describe an application of intelligent virtual agents to the domain of mixed-reality interactive entertainment. Gunslinger allows users to interact with life-sized virtual humans within the context of a wild west story world. The application incorporates a novel integration of capabilities including gesture and spoken language recognition, story and dialogue reasoning, and multi-character, multi-modal behavior generation and synthesis. The article describes our design process, technological innovations, and initial feedback from user interactions with the system.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.; Petta, Paola
Modeling the Cognitive Antecedents and Consequences of Emotion Journal Article
In: Journal of Cognitive Systems Research, vol. 10, no. 1, pp. 1–5, 2009.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{gratch_modeling_2009,
title = {Modeling the Cognitive Antecedents and Consequences of Emotion},
author = {Jonathan Gratch and Stacy C. Marsella and Paola Petta},
url = {http://ict.usc.edu/pubs/Modeling%20the%20Cognitive%20Antecedents%20and%20Consequences%20of%20Emotion.pdf},
year = {2009},
date = {2009-01-01},
journal = {Journal of Cognitive Systems Research},
volume = {10},
number = {1},
pages = {1–5},
abstract = {The last decade has seen an explosion of interest in emotion in both the social and computational sciences. Emotions arise from interactions with both people and technology. They color human perception and decision making and shape a person's moment-to-moment responses to their social and physical environment. Emotions are expressed through changes in speech, facial expression, posture and physiological processes, and these changes provide essential clues to a person's beliefs, desires, intentions and likely future behavior. Recognizing and exploiting such influences can have broad impact across a variety of disciplines: Incorporating the influence of emotion greatly increases explanatory power of models of human decision making (Loewenstein & Lerner, 2003); Responding to a student's emotions can enhance the effectiveness of human or computer tutors (Conati & MacLaren, 2004; Graesser et al., 2008; Lepper, 1988); And modeling emotional influences can enhance the fidelity of social simulations, including how crowds react in disasters (Lyell, Flo, & Mejia-Tellez, 2006; Silverman, Johns, O'Brien, Weaver, & Cornwell, 2002), how military units respond to the stress of battle (Gratch & Marsella, 2003), and even large social situations as when modeling the economic impact of traumatic events such as 9/11 or modeling inter-group conflicts (Marsella, Pynadath, & Read, 2004). More generally, an understanding of the cognitive and social function of human emotion complements the rational, individualistic and disembodied view of cognition that underlies most artificial intelligence and cognitive system research. Emotional influences that seem irrational on the surface may have important social and cognitive functions that would be required by any intelligent system. For example, Herb Simon (1967) theorized that emotions serve to interrupt normal cognition when unattended goals require servicing. Robert Frank argues that social emotions such as anger and guilt reflect a mechanism that improves group utility by minimizing social conflicts, and thereby explains people's "irrational" choices to cooperate in social games such as the prisoner's dilemma (Frank, 1988). Similarly, Alfred Mele (2001) claims that "emotional biases" such as wishful thinking reflect a rational mechanism that more accurately accounts for social costs, such as the cost of betrayal when a parent defends a child despite strong evidence of their guilt in a crime (see also Ito, Pynadath, & Marsella, 2008). At the same time, findings on non-conscious judgments (e.g., Barrett, Ochsner, & Gross, 2007; Moors, De Houwer, Hermans, & Eelen, 2005) have enriched our understanding of how cognitive style is shaped by the socio-emotional context, often in adaptive ways. More broadly, appraisal theorists such as Lazarus (1991), Frijda (1987) and Scherer (2001) have argued that emotions are intimately connected with how organisms sense events, relate them to internal needs (e.g., is this an opportunity or a threat?), characterize appropriate responses (e.g., fight, flight or plan) and recruit the cognitive, physical and social resources needed to adaptively respond. Thus, an understanding of emotion's function can inform the design of cognitive systems that must survive in a dynamic, semi-predictable and social world. This special issue of the Journal of Cognitive Systems Research gives a cross-section of contemporary psychological and computational research on the interplay of cognition and emotion. The articles arise from a recent interdisciplinary symposium on Modeling the Cognitive Antecedents and Consequences of Emotion that brought together leaders in psychological and computational approaches to emotion for three days of intense discussion. The articles represent the current state of an ongoing discussing to bridge the divide between computational and psychological perspectives on emotion, illustrating both that theories on the function of emotion in human cognition can yield key insights into the design and control of intelligent entities in general, and that computational models of human mental processes can inform psychological theories through the exercise of concretizing them into working and testable systems.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Kim, Julia; Hill, Randall W.; Durlach, Paula; Lane, H. Chad; Forbell, Eric; Core, Mark; Marsella, Stacy C.; Pynadath, David V.; Hart, John
BiLAT: A Game-Based Environment for Practicing Negotiation in a Cultural Context Journal Article
In: International Journal of Artificial Intelligence in Education, vol. 19, no. Issue on Ill-Defined Domains, pp. 289–308, 2009.
Abstract | Links | BibTeX | Tags: Learning Sciences, Social Simulation
@article{kim_bilat_2009,
title = {BiLAT: A Game-Based Environment for Practicing Negotiation in a Cultural Context},
author = {Julia Kim and Randall W. Hill and Paula Durlach and H. Chad Lane and Eric Forbell and Mark Core and Stacy C. Marsella and David V. Pynadath and John Hart},
url = {http://ict.usc.edu/pubs/BiLAT-%20A%20Game-Based%20Environment%20for%20Practicing%20Negotiation%20in%20a%20Cultural%20Context.pdf},
year = {2009},
date = {2009-01-01},
journal = {International Journal of Artificial Intelligence in Education},
volume = {19},
number = {Issue on Ill-Defined Domains},
pages = {289–308},
abstract = {Negotiation skills are essential in everyday life, whether in a professional or personal context. Negotiation enables two parties to address misunderstandings and avoid conflicts through an exchange that depends as much on the interpersonal skills of the negotiators as the tactics employed. Acquiring these skills requires not only sound conceptual knowledge but also practice and mentoring. This paper describes the BiLAT game-based simulation and tutoring system developed to provide students, initially United States Army soldiers, with an environment to practice preparing for and conducting bilateral negotiations. We describe the models that were created to implement BiLAT, with a particular focus on the challenge of designing for and tutoring in the ill-defined domain of negotiation. An initial assessment of the training effectiveness of the system indicates significant situation-judgment gains by novices.},
keywords = {Learning Sciences, Social Simulation},
pubstate = {published},
tppubtype = {article}
}
2008
Traum, David; Gratch, Jonathan; Hartholt, Arno; Marsella, Stacy C.; Lee, Jina
Multi-party, Multi-issue, Multi-strategy Negotiation for Multi-modal Virtual Agents Proceedings Article
In: Proceedings of the 8th International Conference on Intelligent Virtual Agents, pp. 117–130, Tokyo, Japan, 2008.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{traum_multi-party_2008,
title = {Multi-party, Multi-issue, Multi-strategy Negotiation for Multi-modal Virtual Agents},
author = {David Traum and Jonathan Gratch and Arno Hartholt and Stacy C. Marsella and Jina Lee},
url = {http://ict.usc.edu/pubs/Multi-party,%20Multi-issue,%20Multi-strategy%20Negotiation.pdf},
year = {2008},
date = {2008-09-01},
booktitle = {Proceedings of the 8th International Conference on Intelligent Virtual Agents},
pages = {117–130},
address = {Tokyo, Japan},
abstract = {We present a model of negotiation for virtual agents that extends previous work to be more human-like and applicable to a broader range of situations, including more than two negotiators with different goals, and negotiating over multiple options. The agents can dynamically change their negotiating strategies based on the current values of several parameters and factors that can be updated in the course of the negotiation.We have implemented this model and done preliminary evaluation within a prototype training system and a three-party negotiation with two virtual humans and one human.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Kenny, Patrick G.; Parsons, Thomas D.; Gratch, Jonathan; Rizzo, Albert
Virtual Humans for Assisted Health Care Proceedings Article
In: Pervasive Technologies for Assistive Environments (PETRA) Conference Proceedings, ACM, Athens, Greece, 2008.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, Virtual Humans
@inproceedings{kenny_virtual_2008-1,
title = {Virtual Humans for Assisted Health Care},
author = {Patrick G. Kenny and Thomas D. Parsons and Jonathan Gratch and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Virtual%20Humans%20for%20Assisted%20Health%20Care.pdf},
year = {2008},
date = {2008-07-01},
booktitle = {Pervasive Technologies for Assistive Environments (PETRA) Conference Proceedings},
publisher = {ACM},
address = {Athens, Greece},
abstract = {There is a growing need for applications that can dynamically interact with aging populations to gather information, monitor their health care, provide information, or even act as companions. Virtual human agents or virtual characters offer a technology that can enable human users to overcome the confusing interfaces found in current human-computer interactions. These artificially intelligent virtual characters have speech recognition, natural language and vision that will allow human users to interact with their computers in a more natural way. Additionally, sensors may be used to monitor the environment for specific behaviors that can be fused into a virtual human system. As a result, the virtual human may respond to a patient or elderly person in a manner that will have a powerful affect on their living situation. This paper will describe the virtual human technology developed and some current applications that apply the technology to virtual patients for mental health diagnosis and clinician training. Additionally the paper will discuss possible ways in which the virtual humans may be utilized for assisted health care and for the integration of multi-modal input to enhance the virtual human system.},
keywords = {MedVR, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ning; Marsella, Stacy C.; Hawkins, Tim
Individual Differences in Expressive Response: A Challenge for ECA Design Proceedings Article
In: 7th International Conference on Autonomous Agents and Multiagent Systems, Estoril, Portugal, 2008.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{wang_individual_2008,
title = {Individual Differences in Expressive Response: A Challenge for ECA Design},
author = {Ning Wang and Stacy C. Marsella and Tim Hawkins},
url = {http://ict.usc.edu/pubs/Individual%20Differences%20in%20Expressive%20Response.pdf},
year = {2008},
date = {2008-05-01},
booktitle = {7th International Conference on Autonomous Agents and Multiagent Systems},
address = {Estoril, Portugal},
abstract = {To create realistic and expressive virtual humans, we need to develop better models of the processes and dynamics of human emotions and expressions. A first step in this effort is to develop means to systematically induce and capture realistic expressions in real humans. We conducted a series of studies on human emotions and facial expression using the Emotion Evoking Game (EVG) and a high-speed video camera. In this paper, we discuss a detailed analysis of facial expressions in response to a surprise situation. We provide details on the rich dynamics of facial expressions, along with data useful for animation of virtual human. The analysis of the data also revealed considerable individual differences in whether surprise was evoked and how it was expressed.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Lee, Jina; DeVault, David; Marsella, Stacy C.; Traum, David
Thoughts on FML: Behavior Generation in the Virtual Human Communication Architecture Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS) First Functional Markup Language Workshop, Estoril, Portugal, 2008.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{lee_thoughts_2008,
title = {Thoughts on FML: Behavior Generation in the Virtual Human Communication Architecture},
author = {Jina Lee and David DeVault and Stacy C. Marsella and David Traum},
url = {http://ict.usc.edu/pubs/Thoughts%20on%20FML.pdf},
year = {2008},
date = {2008-05-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS) First Functional Markup Language Workshop},
address = {Estoril, Portugal},
abstract = {We discuss our current architecture for the generation of natural language and non-verbal behavior in ICT virtual humans. We draw on our experience developing this archi- tecture to present our current perspective on several issues related to the standardization of FML and to the SAIBA framework more generally. In particular, we discuss our current use, and non-use, of FML-inspired representations in generating natural language, eye gaze, and emotional dis- plays. We also comment on some of the shortcomings of our design as currently implemented.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Marsella, Stacy C.
Experiences Authoring Interactive Pedagogical Dramas Journal Article
In: Int. J. Cont. Engineering Education and Life-Long Learning, vol. 18, no. 2, pp. 159–180, 2008.
Abstract | Links | BibTeX | Tags: Social Simulation
@article{marsella_experiences_2008,
title = {Experiences Authoring Interactive Pedagogical Dramas},
author = {Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Experiences%20Authoring%20Interactive%20Pedagogical%20Dramas.pdf},
year = {2008},
date = {2008-03-01},
journal = {Int. J. Cont. Engineering Education and Life-Long Learning},
volume = {18},
number = {2},
pages = {159–180},
abstract = {The focus of this article is the design of Interactive Pedagogical Dramas (IPD). An IPD is a computer-generated environment that immerses the learner as an active participant in an engaging, evocative story, populated with animated characters. The story unfolds based on the learner's decisions. The various design issues that are faced in crafting IPDs will be covered. In particular, how pedagogy can be incorporated into the learner's interaction with the narrative will be discussed. The discussion will be illustrated using several existing IPDs. Finally, we will conclude with several observations on the design process for IPDs and possible new directions for this design process.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {article}
}
Traum, David; Swartout, William; Gratch, Jonathan; Marsella, Stacy C.
A Virtual Human Dialogue Model for Non-team Interaction Book Section
In: Recent Trends in Discourse and Dialogue, vol. 39, pp. 45–67, Springer, Dordecht, The Netherlands, 2008.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@incollection{traum_virtual_2008,
title = {A Virtual Human Dialogue Model for Non-team Interaction},
author = {David Traum and William Swartout and Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/A%20Virtual%20Human%20Dialogue%20Model%20for%20Non-team%20Interaction.pdf},
year = {2008},
date = {2008-01-01},
booktitle = {Recent Trends in Discourse and Dialogue},
volume = {39},
pages = {45–67},
publisher = {Springer},
address = {Dordecht, The Netherlands},
series = {Text, Speech and Language Technology},
abstract = {We describe the dialogue model for the virtual humans developed at the Institute for Creative Technologies at the University of Southern California. The dialogue model contains a rich set of information state and dialogue moves to allow a wide range of behaviour in multimodal, multiparty interaction. We extend this model to enable non-team negotiation, using ideas from social science literature on negotiation and implemented strategies and dialogue moves for this area. We present a virtual human doctor who uses this model to engage in multimodal negotiation dialogue with people from other organisations. The doctor is part of the SASO-ST system, used for training for non-team interactions.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
2007
Kenny, Patrick G.; Hartholt, Arno; Gratch, Jonathan; Swartout, William; Traum, David; Marsella, Stacy C.; Piepol, Diane
Building Interactive Virtual Humans for Training Environments Proceedings Article
In: Interservice/Industry Training, Simulation and Education Conference (I/ITSEC), Orlando, FL, 2007.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, Virtual Humans
@inproceedings{kenny_building_2007,
title = {Building Interactive Virtual Humans for Training Environments},
author = {Patrick G. Kenny and Arno Hartholt and Jonathan Gratch and William Swartout and David Traum and Stacy C. Marsella and Diane Piepol},
url = {http://ict.usc.edu/pubs/Building%20Interactive%20Virtual%20Humans%20for%20Training%20Environments.pdf},
year = {2007},
date = {2007-11-01},
booktitle = {Interservice/Industry Training, Simulation and Education Conference (I/ITSEC)},
address = {Orlando, FL},
abstract = {There is a great need in the Joint Forces to have human to human interpersonal training for skills such as negotiation, leadership, interviewing and cultural training. Virtual environments can be incredible training tools if used properly and used for the correct training application. Virtual environments have already been very successful in training Warfighters how to operate vehicles and weapons systems. At the Institute for Creative Technologies (ICT) we have been exploring a new question: can virtual environments be used to train Warfighters in interpersonal skills such as negotiation, tactical questioning and leadership that are so critical for success in the contemporary operating environment? Using embodied conversational agents to create this type of training system has been one of the goals of the Virtual Humans project at the institute. ICT has a great deal of experience building complex, integrated and immersive training systems that address the human factor needs for training experiences. This paper will address the research, technology and value of developing virtual humans for training environments. This research includes speech recognition, natural language understanding & generation, dialogue management, cognitive agents, emotion modeling, question response managers, speech generation and non-verbal behavior. Also addressed will be the diverse set of training environments we have developed for the system, from single computer laptops to multi-computer immersive displays to real and virtual integrated environments. This paper will also discuss the problems, issues and solutions we encountered while building these systems. The paper will recount subject testing we have performed in these environments and results we have obtained from users. Finally the future of this type of Virtual Humans technology and training applications will be discussed.},
keywords = {MedVR, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Lance, Brent; Marsella, Stacy C.
Emotionally Expressive Head and Body Movement During Gaze Shifts Proceedings Article
In: 7th International Conference on Intelligent Virtual Agents (IVA 2007), Paris, France, 2007.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{lance_emotionally_2007,
title = {Emotionally Expressive Head and Body Movement During Gaze Shifts},
author = {Brent Lance and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Emotionally%20Expressive%20Head%20and%20Body%20Movement%20During%20Gaze%20Shifts.pdf},
year = {2007},
date = {2007-09-01},
booktitle = {7th International Conference on Intelligent Virtual Agents (IVA 2007)},
address = {Paris, France},
abstract = {The current state of the art virtual characters fall far short of characters produced by skilled animators. One reason for this is that the physical behaviors of virtual characters do not express the emotions and attitudes of the character adequately. A key deficiency possessed by virtual characters is that their gaze behavior is not emotionally expressive. This paper describes work on expressing emotion through head movement and body posture during gaze shifts, with intent to integrate a model of emotionally expressive eye movement into this work in the future. The paper further describes an evaluation showing that users can recognize the emotional states generated by the model.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Kenny, Patrick G.; Hartholt, Arno; Gratch, Jonathan; Traum, David; Marsella, Stacy C.; Swartout, William
The More the Merrier: Multi-Party Negotiation with Virtual Humans Proceedings Article
In: AAAI Conference On Artificial Intelligence; Proceedings of the 22nd National Conference on Artificial Intelligence, pp. 1970–1971, 2007.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, Virtual Humans
@inproceedings{kenny_more_2007,
title = {The More the Merrier: Multi-Party Negotiation with Virtual Humans},
author = {Patrick G. Kenny and Arno Hartholt and Jonathan Gratch and David Traum and Stacy C. Marsella and William Swartout},
url = {http://ict.usc.edu/pubs/The%20More%20the%20Merrier-%20Multi-Party%20Negotiation%20with%20Virtual%20Humans.pdf},
year = {2007},
date = {2007-07-01},
booktitle = {AAAI Conference On Artificial Intelligence; Proceedings of the 22nd National Conference on Artificial Intelligence},
volume = {2},
pages = {1970–1971},
abstract = {The goal of the Virtual Humans Project at the University of Southern California�s Institute for Creative Technologies is to enrich virtual training environments with virtual humans � autonomous agents that support face-to-face interaction with trainees in a variety of roles � through bringing together many different areas of research including speech recognition, natural language understanding, dialogue management, cognitive modeling, emotion modeling, non-verbal behavior and speech and knowledge management. The demo at AAAI will focus on our work using virtual humans to train negotiation skills. Conference attendees will negotiate with a virtual human doctor and elder to try to move a clinic out of harm�s way in single and multi-party negotiation scenarios using the latest iteration of our Virtual Humans framework. The user will use natural speech to talk to the embodied agents, who will respond in accordance with their internal task model and state. The characters will carry out a multi-party dialogue with verbal and non-verbal behavior. A video of a single-party version of the scenario was shown at AAAI-06. This new interactive demo introduces several new features, including multi-party negotiation, dynamically generated non-verbal behavior and a central ontology.},
keywords = {MedVR, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Thagard, Paul; Ditto, Peter; Gratch, Jonathan; Marsella, Stacy C.; Westen, Drew
Emotional Cognition in the Real World Proceedings Article
In: Proceedings of the Twenty-Ninth Annual Meeting of the Cognitive Science Society, Nashville, TN, 2007.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{thagard_emotional_2007,
title = {Emotional Cognition in the Real World},
author = {Paul Thagard and Peter Ditto and Jonathan Gratch and Stacy C. Marsella and Drew Westen},
url = {http://ict.usc.edu/pubs/Emotional%20Cognition%20in%20the%20Real%20World.pdf},
year = {2007},
date = {2007-06-01},
booktitle = {Proceedings of the Twenty-Ninth Annual Meeting of the Cognitive Science Society},
address = {Nashville, TN},
abstract = {There is increasing appreciation in cognitive science of the impact of emotions on many kinds of thinking, from decision making to scientific discovery. This appreciation has developed in all the fields of cognitive science, including, psychology, philosophy, artificial intelligence, and linguistics, and anthropology. The purpose of the proposed symposium is to report and discuss new investigations of the impact of emotion on cognitive processes, in particular ones that are important in real life situations. We will approach the practical importance of emotional cognition from a variety of disciplinary perspectives: social psychology (Ditto), clinical psychology (Westen), computer science (Gratch and Marsella), and philosophy and neuroscience (Thagard). In order to provide integration across these approaches, we will try to address a fundamental set of questions, including: 1. How do emotions interact with basic cognitive processes? 2. What are the positive contributions of emotions to various kinds of thinking in real world situations? 3. How do emotions sometimes bias thinking in real world situations? 4. How can understanding of the psychology and neuroscience of emotional cognition be used to improve the effectiveness of real world thinking?},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
The Architectural Role of Emotion in Cognitive Systems Book Section
In: Integrated Models of Cognitive Systems, Oxford University Press, New York, 2007.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@incollection{gratch_architectural_2007,
title = {The Architectural Role of Emotion in Cognitive Systems},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/The%20Architectural%20Role%20of%20Emotion%20in%20Cognitive%20Systems.pdf},
year = {2007},
date = {2007-03-01},
booktitle = {Integrated Models of Cognitive Systems},
publisher = {Oxford University Press},
address = {New York},
abstract = {In this chapter, we will revive an old argument that theories of human emotion can give insight into the design and control of complex cognitive systems. In particular, we claim that appraisal theories of emotion provide essential insight into the influences of emotion over cognition and can help translate such findings into concrete guidance for the design of cognitive systems. Ap- praisal theory claims that emotion plays a central and functional role in sensing external events, characterizing them as opportunity or threats and recruiting the cognitive, physical and social resources needed to adaptively respond. Further, because it argues for a close association be- tween emotion and cognition, the theoretical claims of appraisal theory can be recast as a re- quirement specification for how to build a cognitive system. This specification asserts a set of judgments that must be supported in order to correctly interpret and respond to stimuli and pro- vides a unifying framework for integrating these judgments into a coherent physical or social re- sponse. This chapter elaborates argument in some detail based on our joint experience in build- ing complex cognitive systems and computational models of emotion.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Martinovski, Bilyana; Traum, David; Marsella, Stacy C.
Rejection of empathy in negotiation Journal Article
In: Group Decision and Negotiation, vol. 16, pp. 61–76, 2007, ISSN: 0926-2644.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{martinovski_rejection_2007,
title = {Rejection of empathy in negotiation},
author = {Bilyana Martinovski and David Traum and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Rejection%20of%20empathy%20in%20negotiation.pdf},
issn = {0926-2644},
year = {2007},
date = {2007-01-01},
journal = {Group Decision and Negotiation},
volume = {16},
pages = {61–76},
abstract = {Trust is a crucial quality in the development of individuals and societies and empathy plays a key role in the formation of trust. Trust and empathy have growing importance in studies of negotiation. However, empathy can be rejected which complicates its role in negotiation. This paper presents a linguistic analysis of empathy by focusing on rejection of empathy in negotiation. Some of the rejections are due to failed recognition of the rejector's needs and desires whereas others have mainly strategic functions gaining momentum in the negotiation. In both cases, rejection of empathy is a phase in the negotiation not a breakdown.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Lee, Jina; Marsella, Stacy C.; Traum, David; Gratch, Jonathan; Lance, Brent
The Rickel Gaze Model: A Window on the Mind of a Virtual Human Proceedings Article
In: Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA), pp. 296–303, Paris, France, 2007.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{lee_rickel_2007,
title = {The Rickel Gaze Model: A Window on the Mind of a Virtual Human},
author = {Jina Lee and Stacy C. Marsella and David Traum and Jonathan Gratch and Brent Lance},
url = {http://ict.usc.edu/pubs/The%20Rickel%20Gaze%20Model-%20A%20Window%20on%20the%20Mind%20of%20a%20Virtual%20Human.pdf},
year = {2007},
date = {2007-01-01},
booktitle = {Lecture Notes in Artificial Intelligence; Proceedings of the 7th International Conference on Intelligent Virtual Agents (IVA)},
volume = {4722},
pages = {296–303},
address = {Paris, France},
abstract = {Gaze plays a large number of cognitive, communicative and affective roles in face-to-face human interaction. To build a believable virtual human, it is imperative to construct a gaze model that generates realistic gaze behaviors. However, it is not enough to merely imitate a person's eye movements. The gaze behaviors should reflect the internal states of the virtual human and users should be able to derive them by observing the behaviors. In this paper, we present a gaze model driven by the cognitive operations; the model processes the virtual human's reasoning, dialog management, and goals to generate behaviors that reflect the agent's inner thoughts. It has been implemented in our virtual human system and operates in real-time. The gaze model introduced in this paper was originally designed and developed by Jeff Rickel but has since been extended by the authors.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
2006
Core, Mark; Traum, David; Lane, H. Chad; Swartout, William; Marsella, Stacy C.; Gratch, Jonathan; Lent, Michael
Teaching Negotiation Skills through Practice and Reflection with Virtual Humans Journal Article
In: Simulation: Transactions of the Society for Modeling and Simulation, vol. 82, no. 11, pp. 685–701, 2006.
Abstract | Links | BibTeX | Tags: Learning Sciences, Social Simulation, Virtual Humans
@article{core_teaching_2006,
title = {Teaching Negotiation Skills through Practice and Reflection with Virtual Humans},
author = {Mark Core and David Traum and H. Chad Lane and William Swartout and Stacy C. Marsella and Jonathan Gratch and Michael Lent},
url = {http://ict.usc.edu/pubs/Teaching%20Negotiation%20Skills.pdf},
year = {2006},
date = {2006-11-01},
journal = {Simulation: Transactions of the Society for Modeling and Simulation},
volume = {82},
number = {11},
pages = {685–701},
abstract = {Although the representation of physical environments and behaviors will continue to play an important role in simulation-based training, an emerging challenge is the representation of virtual humans with rich mental models (e.g., including emotions, trust) that interact through conversational as well as physical behaviors. The motivation for such simulations is training soft skills such as leadership, cultural awareness, and negotiation, where the majority of actions are conversational, and the problem solving involves consideration of the emotions, attitudes, and desires of others.The educational power of such simulations can be enhanced by the integration of an intelligent tutoring system to support learners� understanding of the effect of their actions on virtual humans and how they might improve their performance. In this paper, we discuss our efforts to build such virtual humans, along with an accompanying intelligent tutor, for the domain of negotiation and cultural awareness.},
keywords = {Learning Sciences, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Lee, Jina; Marsella, Stacy C.
Nonverbal Behavior Generator for Embodied Conversational Agents Proceedings Article
In: 6th International Conference on Intelligent Virtual Agents, Marina del Rey, CA, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{lee_nonverbal_2006,
title = {Nonverbal Behavior Generator for Embodied Conversational Agents},
author = {Jina Lee and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Nonverbal%20Behavior%20Generator%20for%20Embodied%20Conversational%20Agents.pdf},
year = {2006},
date = {2006-08-01},
booktitle = {6th International Conference on Intelligent Virtual Agents},
address = {Marina del Rey, CA},
abstract = {Believable nonverbal behaviors for embodied conversational agents (ECA) can create a more immersive experience for users and improve the effectiveness of communication. This paper describes a nonverbal behavior generator that analyzes the syntactic and semantic structure of the surface text as well as the affective state of the ECA and annotates the surface text with appropriate nonverbal behaviors. A number of video clips of people conversing were analyzed to extract the nonverbal behavior generation rules. The system works in real-time and is user-extensible so that users can easily modify or extend the current behavior generation rules.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Gluck, Kevin A.; Gunzelmann, Glenn; Gratch, Jonathan; Hudlicka, Eva; Ritter, Frank E.
Modeling the Impact of Cognitive Moderators on Human Cognition and Performance Proceedings Article
In: Proceedings of the 2006 Conference of the Cognitive Society, pp. 2658, Vancouver, CA, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gluck_modeling_2006,
title = {Modeling the Impact of Cognitive Moderators on Human Cognition and Performance},
author = {Kevin A. Gluck and Glenn Gunzelmann and Jonathan Gratch and Eva Hudlicka and Frank E. Ritter},
url = {http://ict.usc.edu/pubs/Modeling%20the%20Impact%20of%20Cognitive%20Moderators%20on%20Human%20Cognition%20and%20Performance.pdf},
year = {2006},
date = {2006-08-01},
booktitle = {Proceedings of the 2006 Conference of the Cognitive Society},
pages = {2658},
address = {Vancouver, CA},
abstract = {Cognitive moderators, such as emotions, personality, stress, and fatigue, represent an emerging area of research within the cognitive science community and are increasingly acknowledged as important and ubiquitous influences on cognitive processes. This symposium brings together scientists engaged in research to develop models that help us better understand the mechanisms through which these factors impact human cognition and performance. There are two unifying themes across the presentations. One theme is a commitment to developing computational models useful for simulating the processes that produce the effects and phenomena of interest. The second theme is a commitment to assessing the validity of the models by comparing their performance against empirical human data.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Marsella, Stacy C.; Carnicke, Sharon Marie; Gratch, Jonathan; Okhmatovskaia, Anna; Rizzo, Albert
An Exploration of Delsartes Structural Acting System Proceedings Article
In: Proceedings of the 6th International Conference on Intelligent Virtual Agents (IVA), pp. 80–92, Marina del Rey, CA, 2006.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, Virtual Humans
@inproceedings{marsella_exploration_2006,
title = {An Exploration of Delsartes Structural Acting System},
author = {Stacy C. Marsella and Sharon Marie Carnicke and Jonathan Gratch and Anna Okhmatovskaia and Albert Rizzo},
url = {http://ict.usc.edu/pubs/An%20Exploration%20of%20Delsarte%E2%80%99s%20Structural%20Acting%20System.pdf},
year = {2006},
date = {2006-08-01},
booktitle = {Proceedings of the 6th International Conference on Intelligent Virtual Agents (IVA)},
pages = {80–92},
address = {Marina del Rey, CA},
abstract = {The designers of virtual agents often draw on a large research literature in psychology, linguistics and human ethology to design embodied agents that can interact with people. In this paper, we consider a structural acting system developed by Francois Delsarte as a possible resource in designing the nonverbal behavior of embodied agents. Using human subjects,we evaluate one component of the system, Delsarte's Cube, that addresses the meaning of differing attitudes of the hand in gestures.},
keywords = {MedVR, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Okhmatovskaia, Anna; Lamothe, Francois; Marsella, Stacy C.; Morales, Mathieu; Werf, R. J.; Morency, Louis-Philippe
Virtual Rapport Proceedings Article
In: Lecture Notes in Computer Science, pp. 14–27, Marina del Rey, CA, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_virtual_2006-1,
title = {Virtual Rapport},
author = {Jonathan Gratch and Anna Okhmatovskaia and Francois Lamothe and Stacy C. Marsella and Mathieu Morales and R. J. Werf and Louis-Philippe Morency},
url = {http://ict.usc.edu/pubs/Virtual%20Rapport.pdf},
year = {2006},
date = {2006-08-01},
booktitle = {Lecture Notes in Computer Science},
volume = {4311},
pages = {14–27},
address = {Marina del Rey, CA},
abstract = {Effective face-to-face conversations are highly interactive. Participants respond to each other, engaging in nonconscious behavioral mimicry and backchanneling feedback. Such behaviors produce a subjective sense of rapport and are correlated with effective communication, greater liking and trust, and greater influence between participants. Creating rapport requires a tight sense-act loop that has been traditionally lacking in embodied conversational agents. Here we describe a system, based on psycholinguistic theory, designed to create a sense of rapport between a human speaker and virtual human listener. We provide empirical evidence that it increases speaker fluency and engagement.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Kopp, Stefan; Krenn, Brigitte; Marsella, Stacy C.; Marshall, Andrew; Pelachaud, Catherine; Pirker, Hannes; Thórisson, Kristinn R.; Vilhjálmsson, Hannes
Towards a Common Framework for Multimodal Generation: The Behavior Markup Language Proceedings Article
In: Proceedings of the Intelligent Virtual Humans Conference, Marina del Rey, CA, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation
@inproceedings{kopp_towards_2006,
title = {Towards a Common Framework for Multimodal Generation: The Behavior Markup Language},
author = {Stefan Kopp and Brigitte Krenn and Stacy C. Marsella and Andrew Marshall and Catherine Pelachaud and Hannes Pirker and Kristinn R. Thórisson and Hannes Vilhjálmsson},
url = {http://ict.usc.edu/pubs/Towards%20a%20Common%20Framework%20for%20Multimodal%20Generation-%20The%20Behavior%20Markup%20Language.pdf},
year = {2006},
date = {2006-08-01},
booktitle = {Proceedings of the Intelligent Virtual Humans Conference},
address = {Marina del Rey, CA},
abstract = {This paper describes an international effort to unify a multimodal behavior generation framework for Embodied Conversational Agents (ECAs). We propose a three stage model we call SAIBA where the stages represent intent planning, behavior planning and behavior realization. A Function Markup Language (FML), describing intent without referring to physical behavior, mediates between the first two stages and a Behavior Markup Language (BML)describing desired physical realization, mediates between the last two stages. In this paper we will focus on BML. The hope is that this abstraction and modularization will help ECA researchers pool their resources to build more sophisticated virtual humans.},
keywords = {Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.; Egges, Arjan; Eliëns, Anton; Isbister, Katherine; Paiva, Ana; Rist, Thomas; Hagen, Paul
Design criteria, techniques and case studies for creating and evaluating interactive experiences for virtual humans Proceedings Article
In: Dagstuhl Seminar Proceedings, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_design_2006,
title = {Design criteria, techniques and case studies for creating and evaluating interactive experiences for virtual humans},
author = {Jonathan Gratch and Stacy C. Marsella and Arjan Egges and Anton Eliëns and Katherine Isbister and Ana Paiva and Thomas Rist and Paul Hagen},
url = {http://ict.usc.edu/pubs/Design%20criteria%20techniques%20and%20case%20studies%20for%20creating%20and%20evaluating%20interactive%20experiences%20for%20virtual%20humans.pdf},
year = {2006},
date = {2006-06-01},
booktitle = {Dagstuhl Seminar Proceedings},
abstract = {How does one go about designing a human? With the rise in recent years of virtual humans this is no longer purely a philosophical question. Virtual humans are intelligent agents with a body, often a human-like graphical body, that interact verbally and non-verbally with human users on a variety of tasks and applications. At a recent meeting on this subject, the above authors participated in a several day discussion on the question of virtual human design. Our working group approached this question from the perspective of interactivity. Specifically, how can one design effective interactive experiences involving a virtual human, and what constraints does this goal place on the form and function of an embodied conversational agent. Our group grappled with several related questions: What ideals should designers aspire to, what sources of theory and data will best lead to this goal and what methodologies can inform and validate the design process? This article summarizes our output and suggests a specific framework, borrowed from interactive media design, as a vehicle for advancing the state of interactive experiences with virtual humans.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.; Wenji, Mao
Towards a Validated Model of "Emotional Intelligence" Proceedings Article
In: Proceedings of the 21st National Conference on Artificial Intelligence, pp. 1613–1616, Boston, MA, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_towards_2006,
title = {Towards a Validated Model of "Emotional Intelligence"},
author = {Jonathan Gratch and Stacy C. Marsella and Mao Wenji},
url = {http://ict.usc.edu/pubs/Towards%20a%20Validated%20Model%20of%20Emotional%20Intelligence.pdf},
year = {2006},
date = {2006-01-01},
booktitle = {Proceedings of the 21st National Conference on Artificial Intelligence},
volume = {2},
pages = {1613–1616},
address = {Boston, MA},
abstract = {This article summarizes recent progress in developing a validated computational account of the cognitive antecedents and consequences of emotion. We describe the potential of this work to impact a variety of AI problem domains.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Swartout, William; Gratch, Jonathan; Hill, Randall W.; Hovy, Eduard; Lindheim, Richard; Marsella, Stacy C.; Rickel, Jeff; Traum, David
Simulation Meets Hollywood: Integrating Graphics, Sound, Story and Character for Immersive Simulation Book Section
In: Multimodal Intelligent Information Presentation, vol. 27, pp. 305–321, Springer, Netherlands, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@incollection{swartout_simulation_2006,
title = {Simulation Meets Hollywood: Integrating Graphics, Sound, Story and Character for Immersive Simulation},
author = {William Swartout and Jonathan Gratch and Randall W. Hill and Eduard Hovy and Richard Lindheim and Stacy C. Marsella and Jeff Rickel and David Traum},
url = {http://ict.usc.edu/pubs/SIMULATION%20MEETS%20HOLLYWOOD-%20Integrating%20Graphics,%20Sound,%20Story%20and%20Character%20for%20Immersive%20Simulation.pdf},
year = {2006},
date = {2006-01-01},
booktitle = {Multimodal Intelligent Information Presentation},
volume = {27},
pages = {305–321},
publisher = {Springer},
address = {Netherlands},
abstract = {The Institute for Creative Technologies was created at the University of Southern California with the goal of bringing together researchers in simulation technology to collaborate with people from the entertainment industry. The idea was that much more compelling simulations could be developed if researchers who understood state-of-the-art simulation technology worked together with writers and directors who knew how to create compelling stories and characters. This paper presents our first major effort to realize that vision, the Mission Rehearsal Exercise Project, which confronts a soldier trainee with the kinds of dilemmas he might reasonably encounter in a peacekeeping operation. The trainee is immersed in a synthetic world and interacts with virtual humans: artificially intelligent and graphically embodied conversational agents that understand and generate natural language, reason about world events and respond appropriately to the trainee's actions or commands. This project is an ambitious exercise in integration, both in the sense of integrating technology with entertainment industry content, but also in that we have also joined a number of component technologies that have not been integrated before. This integration has not only raised new research issues, but it has also suggested some new approaches to difficult problems. In this paper we describe the Mission Rehearsal Exercise system and the insights gained through this large-scale integration.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Gratch, Jonathan; Mao, Wenji; Marsella, Stacy C.
Modeling Social Emotions and Social Attributions Book Section
In: Sun, R. (Ed.): Cognition and Multi-Agent Interaction: Extending Cognitive Modeling to Social Simulation, Cambridge University Press, 2006.
Links | BibTeX | Tags: Social Simulation, Virtual Humans
@incollection{gratch_modeling_2006,
title = {Modeling Social Emotions and Social Attributions},
author = {Jonathan Gratch and Wenji Mao and Stacy C. Marsella},
editor = {R. Sun},
url = {http://ict.usc.edu/pubs/Modeling%20Social%20Emotions%20and%20Social%20Attributions.pdf},
year = {2006},
date = {2006-01-01},
booktitle = {Cognition and Multi-Agent Interaction: Extending Cognitive Modeling to Social Simulation},
publisher = {Cambridge University Press},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Swartout, William; Gratch, Jonathan; Hill, Randall W.; Hovy, Eduard; Marsella, Stacy C.; Rickel, Jeff; Traum, David
Toward Virtual Humans Journal Article
In: AI Magazine, 2006.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{swartout_toward_2006,
title = {Toward Virtual Humans},
author = {William Swartout and Jonathan Gratch and Randall W. Hill and Eduard Hovy and Stacy C. Marsella and Jeff Rickel and David Traum},
url = {http://ict.usc.edu/pubs/Toward%20Virtual%20Humans.pdf},
year = {2006},
date = {2006-01-01},
journal = {AI Magazine},
abstract = {This paper describes the virtual humans developed as part of the Mission Rehearsal Exercise project, a virtual reality-based training system. This project is an ambitious exercise in integration, both in the sense of integrating technology with entertainment industry content, but also in that we have joined a number of component technologies that have not been integrated before. This integration has not only raised new research issues, but it has also suggested some new approaches to difficult problems. We describe the key capabilities of the virtual humans, including task representation and reasoning, natural language dialogue, and emotion reasoning, and show how these capabilities are integrated to provide more human-level intelligence than would otherwise be possible.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
2005
Traum, David; Swartout, William; Marsella, Stacy C.; Gratch, Jonathan
Fight, Flight, or Negotiate: Believable Strategies for Conversing under Crisis Proceedings Article
In: 5th International Working Conference on Intelligent Virtual Agents, Kos, Greece, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{traum_fight_2005,
title = {Fight, Flight, or Negotiate: Believable Strategies for Conversing under Crisis},
author = {David Traum and William Swartout and Stacy C. Marsella and Jonathan Gratch},
url = {http://ict.usc.edu/pubs/Fight,%20Flight,%20or%20Negotiate-%20Believable%20Strategies%20for%20Conversing%20under%20Crisis.pdf},
year = {2005},
date = {2005-09-01},
booktitle = {5th International Working Conference on Intelligent Virtual Agents},
address = {Kos, Greece},
abstract = {This paper des ribes a model of onversation strategies implemented in virtual humans designed to help people learn negotiation skills. We motivate and dis uss these strategies and their use to allow a virtual human to engage in omplex adversarial negotiation with a human trainee. Choi e of strategy depends on both the personality of the agent and assessment of the likelihood that the negotiation an be bene ial. Exe ution of strategies an be performed by hoosing spe i dialogue behaviors su h as whether and how to respond to a proposal. Current assessment of the value of the topi , the utility of the strategy, and aÆliation toward the other onversants an be used to dynami ally hange strategies throughout the ourse of a onversation. Examples will be given from the SASO-ST proje t, in whi h a trainee learns to negotiate by intera ting with virtual humans who employ these strategies.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Traum, David; Swartout, William; Gratch, Jonathan; Marsella, Stacy C.; Kenny, Patrick G.; Hovy, Eduard; Narayanan, Shrikanth; Fast, Edward; Martinovski, Bilyana; Baghat, Rahul; Robinson, Susan; Marshall, Andrew; Wang, Dagen; Gandhe, Sudeep; Leuski, Anton
Dealing with Doctors: A Virtual Human for Non-team Interaction Proceedings Article
In: 6th SIGdial Conference on Discourse and Dialogue, Lisbon, Portugal, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{traum_dealing_2005,
title = {Dealing with Doctors: A Virtual Human for Non-team Interaction},
author = {David Traum and William Swartout and Jonathan Gratch and Stacy C. Marsella and Patrick G. Kenny and Eduard Hovy and Shrikanth Narayanan and Edward Fast and Bilyana Martinovski and Rahul Baghat and Susan Robinson and Andrew Marshall and Dagen Wang and Sudeep Gandhe and Anton Leuski},
url = {http://ict.usc.edu/pubs/Dealing%20with%20Doctors.pdf},
year = {2005},
date = {2005-09-01},
booktitle = {6th SIGdial Conference on Discourse and Dialogue},
address = {Lisbon, Portugal},
abstract = {We present a virtual human do tor who an engage in multi-modal negotiation dialogue with people from other organizations. The do tor is part of the SASO-ST system, used for training for non-team intera tions},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Traum, David; Swartout, William; Gratch, Jonathan; Marsella, Stacy C.
Virtual Humans for non-team interaction training Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS) Workshop on Creating Bonds with Humanoids, Utrecht, Netherlands, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{traum_virtual_2005,
title = {Virtual Humans for non-team interaction training},
author = {David Traum and William Swartout and Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Virtual%20Humans%20for%20non-team%20interaction%20training.pdf},
year = {2005},
date = {2005-07-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS) Workshop on Creating Bonds with Humanoids},
address = {Utrecht, Netherlands},
abstract = {We describe a model of virtual humans to be used in training for non-team interactions, such as negotiating with people from other organizations. The virtual humans build on existing task, dialogue, and emotion models, with an added model of trust, which are used to understand and produce interactional moves. The model has been implemented within an agent in the SASO-ST system, and some example dialogues are given, illustrating the necessity for building social bonds.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
Evaluating a computational model of emotion Journal Article
In: Journal Autonomous Agents and Multi-Agent Systems. Special Issue on the Best of AAMAS 2004, vol. 11, no. 1, pp. 23–43, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{gratch_evaluating_2005,
title = {Evaluating a computational model of emotion},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Evaluating%20a%20computational%20model%20of%20emotion.pdf},
year = {2005},
date = {2005-07-01},
journal = {Journal Autonomous Agents and Multi-Agent Systems. Special Issue on the Best of AAMAS 2004},
volume = {11},
number = {1},
pages = {23–43},
abstract = {Spurred by a range of potential applications, there has been a growing body of research in computational models of human emotion. To advance the development of these models, it is critical that we evaluate them against the phenomena they purport to model. In this paper, we present one method to evaluate an emotion model that compares the behavior of the model against human behavior using a standard clinical instrument for assessing human emotion and coping. We use this method to evaluate the Emotion and Adaptation (EMA) model of emotion Gratch and Marsella. The evaluation highlights strengths of the approach and identifies where the model needs further development.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Martinovski, Bilyana; Mao, Wenji; Gratch, Jonathan; Marsella, Stacy C.
Mitigation Theory: An Integrated Approach Proceedings Article
In: Proceedings of the 27th Annual Conference of the Cognitive Science Society (CogSci), Stresa, Italy, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{martinovski_mitigation_2005,
title = {Mitigation Theory: An Integrated Approach},
author = {Bilyana Martinovski and Wenji Mao and Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Mitigation%20Theory-%20An%20Integrated%20Approach.pdf},
year = {2005},
date = {2005-01-01},
booktitle = {Proceedings of the 27th Annual Conference of the Cognitive Science Society (CogSci)},
address = {Stresa, Italy},
abstract = {The purpose of this paper is to develop a theoretical model of mitigation by integrating cognitive and discourse approaches to appraisal and coping. Mitigation involves strategic, emotional, linguistic, and Theory of Mind processes on different levels of consciousness. We emphasize that discourse analysis can assist our understanding of these processes.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Maatman, R. M.; Gratch, Jonathan; Marsella, Stacy C.
Natural Behavior of a Listening Agent Proceedings Article
In: Lecture Notes in Computer Science; Proceedings of the 5th International Working Conference on Intelligent Virtual Agents (IVA), pp. 25–36, Kos, Greece, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{maatman_natural_2005,
title = {Natural Behavior of a Listening Agent},
author = {R. M. Maatman and Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Natural%20Behavior%20of%20a%20Listening%20Agent.pdf},
year = {2005},
date = {2005-01-01},
booktitle = {Lecture Notes in Computer Science; Proceedings of the 5th International Working Conference on Intelligent Virtual Agents (IVA)},
pages = {25–36},
address = {Kos, Greece},
abstract = {In contrast to the variety of listening behaviors produced in human-to-human interaction, most virtual agents sit or stand passively when a user speaks. This is a reflection of the fact that although the correct responsive behavior of a listener during a conversation is often related to the semantics, the state of current speech understanding technology is such that semantic information is unavailable until after an utterance is complete. This paper will illustrate that appropriate listening behavior can also be generated by other features of a speaker's behavior that are available in real time such as speech quality, posture shifts and head movements. This paper presents a mapping from these real-time obtainable features of a human speaker to agent listening behaviors.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Maatman, R. M.; Gratch, Jonathan; Marsella, Stacy C.
Responsive Behavior of a Listening Agent Technical Report
University of Southern California Institute for Creative Technologies no. ICT TR 02 2005, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@techreport{maatman_responsive_2005,
title = {Responsive Behavior of a Listening Agent},
author = {R. M. Maatman and Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/ICT-TR.02.2005.pdf},
year = {2005},
date = {2005-01-01},
number = {ICT TR 02 2005},
institution = {University of Southern California Institute for Creative Technologies},
abstract = {The purpose of this assignment is twofold. First the possibility of generating real time responsive behavior is evaluated in order to create a more human-like agent. Second, the effect of the behavior of the agent on the human interactor is evaluated. The main motivation for the focus on responsive gestures is because much research has been done already on gestures that accompany the speaker, and nothing on gesture that accompany the listener, although responsiveness is a crucial part of a conversation. The responsive behavior of a virtual agent consists of performing gestures during the time a human is speaking to the agent. To generate the correct gestures, first a literature research is carried out, from which is concluded that with the current of the current Natural Language Understanding technology, it is not possible to extract semantic features of the human speech in real time. Thus, other features have to be considered. The result of the literature research is a basic mapping between real time obtainable features and their correct responsive behavior: - if the speech contains a relatively long period of low pitch then perform a head nod. - if the speech contains relatively high intensity then perform a head nod - if the speech contains disfluency then perform a posture shift, gazing behavior or a frown - if the human performs a posture shift then mirror this posture shift - if the human performs a head shake then mirror this head shake - if the human performs major gazing behavior then mimic this behavior A design has been made to implement this mapping into the behavior of a virtual agent and this design has been implemented which results in two programs. One to mirror the physical features of the human and one to extract the speech features from the voice of the human. The two programs are combined and the effect of the resulting behavior on the human interactor has been tested. The results of these tests are that the performing of responsive behavior has a positive effect on the natural behavior of a virtual agent and thus looks promising for future research. However, the gestures proposed by this mapping are not always context-independent. Thus, much refinement is still to be done and more functionality can be added to improve the responsive behavior. The conclusion of this research is twofold. First the performing of responsive behaviors in real time is possible with the presented mapping and this results in a more natural behaving agent. Second, some responsive behavior is still dependant of semantic information. This leaves open the further enhancement of the presented mapping in order to increase the responsive behavior.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {techreport}
}
Gratch, Jonathan; Marsella, Stacy C.
Lessons from Emotion Psychology for the Design of Lifelike Characters Journal Article
In: Applied Artificial Intelligence Journal, vol. 19, pp. 215–233, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{gratch_lessons_2005,
title = {Lessons from Emotion Psychology for the Design of Lifelike Characters},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Lessons%20from%20Emotion%20Psychology%20for%20the%20Design%20of%20Lifelike%20Characters.pdf},
year = {2005},
date = {2005-01-01},
journal = {Applied Artificial Intelligence Journal},
volume = {19},
pages = {215–233},
abstract = {This special issue describes a number of applications that utilize lifelike characters that teach indirectly, by playing some role in a social interaction with a user. The design of such systems reflects a compromise between competing, sometimes unarticulated de- mands: they must realistically exhibit the behaviors and characteristics of their role, they must facilitate the desired learning, and they must work within the limitations of current technology, and there is little theoretical or empirical guidance on the impact of these compromises on learning. Our perspective on this problem is shaped by our interest in the role of emotion and emotional behaviors in such forms of learning. In recent years, there has been an explosion of interest in the role of emotion in the design of virtual hu- mans. The techniques and motivations underlying these various efforts can seem, from an outsider's perspective, as bewildering and multifaceted as the concept of emotion itself is generally accused of being. Drawing on insights from emotion psychology, this article attempts to clarify for the designers of educational agents the various theoretical perspec- tives on the concept of emotion with the aim of giving guidance to designers of educa- tional agents.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Martinovski, Bilyana; Traum, David; Marsella, Stacy C.
Rejection of empathy and its linguistic manifestations Proceedings Article
In: Proceedings of Conference on Formal and Informal Negotiation (FINEXIN), Ottowa, Canada, 2005.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{martinovski_rejection_2005,
title = {Rejection of empathy and its linguistic manifestations},
author = {Bilyana Martinovski and David Traum and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Rejection%20of%20empathy%20and%20its%20linguistic%20manifestations.pdf},
year = {2005},
date = {2005-01-01},
booktitle = {Proceedings of Conference on Formal and Informal Negotiation (FINEXIN)},
address = {Ottowa, Canada},
abstract = {Trust is a crucial quality in the development of individuals and societies and empathy plays a key role in the formation of trust. Trust and empathy have growing importance in studies of negotiation. However, empathy can be rejected which complicates its role in negotiation. This paper presents a linguistic analysis of empathy by focusing on rejection of empathy in negotiation. Some of the rejections are due to failed recognition of the rejector's needs and desires whereas others have mainly strategic functions gaining momentum in the negotiation. In both cases, rejection of empathy is a phase in the negotiation not a breakdown.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
2004
Gratch, Jonathan; Marsella, Stacy C.
Towards a Validated Model of the Influence of Emotion on Human Performance Proceedings Article
In: Proceedings of the 24th Army Science Conference, 2004.
Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_towards_2004,
title = {Towards a Validated Model of the Influence of Emotion on Human Performance},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/TOWARDS%20A%20VALIDATED%20MODEL%20OF%20THE%20INFLUENCE%20OF%20EMOTION%20ON%20HUMAN%20PERFORMANCE.pdf},
year = {2004},
date = {2004-12-01},
booktitle = {Proceedings of the 24th Army Science Conference},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
Evaluating the modeling and use of emotion in virtual humans Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS), New York, NY, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_evaluating_2004,
title = {Evaluating the modeling and use of emotion in virtual humans},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Evaluating%20the%20modeling%20and%20use%20of%20emotion%20in%20virtual%20humans.pdf},
year = {2004},
date = {2004-08-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS)},
address = {New York, NY},
abstract = {Spurred by a range of potential applications, there has been a growing body of research in computational models of human emotion. To advance the development of these models, it is critical that we begin to evaluate them against the phenomena they purport to model. In this paper, we present one methodology to evaluate an emotion model. The methodology is based on comparing the behavior of the computational model against human behavior, using a standard clinical instrument for assessing human emotion and coping. We use this methodology to evaluate the EMA model of emotion. The model did quite well. And, as expected, the comparison helped identify where the model needs further development.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Muller, T. J.; Hartholt, Arno; Marsella, Stacy C.; Gratch, Jonathan; Traum, David
Do You Want To Talk About It? A First Step Towards Emotion Integrated Dialogue Proceedings Article
In: International Conference on Autonomous Agents and Multiagent Systems (AAMAS), Kloster Irsee, Germany, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{muller_you_2004,
title = {Do You Want To Talk About It? A First Step Towards Emotion Integrated Dialogue},
author = {T. J. Muller and Arno Hartholt and Stacy C. Marsella and Jonathan Gratch and David Traum},
url = {http://ict.usc.edu/pubs/Do%20you%20want%20to%20talk%20about%20it.pdf},
year = {2004},
date = {2004-08-01},
booktitle = {International Conference on Autonomous Agents and Multiagent Systems (AAMAS)},
address = {Kloster Irsee, Germany},
abstract = {In this paper, we descrribe an implemented system for emotion-referring dialogue. An agen can engage in emotion-referring dialogue if it first has a model of its own emotions, and secondly has a way of talking about them. We create this facility in MRE Project's virtual humans, building upon the existing emotion and dialogue facilities of these agents.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Marsella, Stacy C.; Gratch, Jonathan; Rickel, Jeff
Expressive Behaviors for Virtual Worlds Book Section
In: Life-Like Characters: Tools, Affective Functions, and Applications, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@incollection{marsella_expressive_2004,
title = {Expressive Behaviors for Virtual Worlds},
author = {Stacy C. Marsella and Jonathan Gratch and Jeff Rickel},
url = {http://ict.usc.edu/pubs/Expressive%20Behaviors%20for%20Virtual%20Worlds.pdf},
year = {2004},
date = {2004-06-01},
booktitle = {Life-Like Characters: Tools, Affective Functions, and Applications},
abstract = {A person's behavior provides signi⬚cant information about their emotional state, attitudes, and attention. Our goal is to create virtual humans that convey such information to people while interacting with them in virtual worlds. The virtual humans must respond dynamically to the events surrounding them, which are fundamentally influenced by users' actions, while providing an illusion of human-like behavior. A user must be able to interpret the dynamic cognitive and emotional state of the virtual humans using the same nonverbal cues that people use to understand one another. Towards these goals, we are integrating and extending components from three prior systems: a virtual human architecture with a wide range of cognitive and motor capabilities, a model of task-oriented emotional appraisal and socially situated planning, and a model of how emotions and coping impact physical behavior. We describe the key research issues and approach in each of these prior systems, as well as our integration and its initial implementation in a leadership training system.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Gratch, Jonathan; Marsella, Stacy C.
Evaluating a General Model of Emotional Appraisal and Coping Proceedings Article
In: AAAI Spring Symposium on Architectures for Modeling Emotion: Cross-disciplinary Foundations, Palo Alto, CA, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{gratch_evaluating_2004-1,
title = {Evaluating a General Model of Emotional Appraisal and Coping},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Evaluating%20a%20General%20Model%20of%20Emotional%20Appraisal%20and%20Coping.pdf},
year = {2004},
date = {2004-06-01},
booktitle = {AAAI Spring Symposium on Architectures for Modeling Emotion: Cross-disciplinary Foundations},
address = {Palo Alto, CA},
abstract = {Introduction: In our research, we have developed a general computational model of human emotion. The model attempts to account for both the factors that give rise to emotions as well as the wide-ranging impact emotions have on cognitive and behavioral responses. Emotions influence our beliefs, our decision-making and how we adapt our behavior to the world around us. While most apparent in moments of great stress, emotions sway even the mundane decisions we face in everyday life. Emotions also infuse our social relationships. Our interactions with each other are a source of many emotions and we have developed a range of behaviors that can communicate emotional information as well as an ability to recognize and be influenced by the emotional arousal of others. By virtue of their central role and wide influence, emotion arguably provides the means to coordinate the diverse mental and physical components required to respond to the world in a coherent fashion. (1st Paragraph)},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Traum, David; Marsella, Stacy C.; Gratch, Jonathan
Emotion and Dialogue in the MRE Virtual Humans Proceedings Article
In: Lecture Notes in Computer Science, pp. 117–127, Kloster Irsee, Germany, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{traum_emotion_2004,
title = {Emotion and Dialogue in the MRE Virtual Humans},
author = {David Traum and Stacy C. Marsella and Jonathan Gratch},
url = {http://ict.usc.edu/pubs/Emotion%20and%20Dialogue%20in%20the%20MRE%20Virtual%20Humans.pdf},
year = {2004},
date = {2004-01-01},
booktitle = {Lecture Notes in Computer Science},
volume = {3068},
pages = {117–127},
address = {Kloster Irsee, Germany},
abstract = {We describe the emotion and dialogue aspects of the virtual agents used in the MRE project at USC. The models of emotion and dialogue started independently, though each makes crucial use of a central task model. In this paper we describe the task model, dialogue model, and emotion model, and the interactions between them.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
Technical Details of a Domain-independent Framework for Modeling Emotion Technical Report
University of Southern California Institute for Creative Technologies Marina del Rey, CA, no. ICT TR 04.2004, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@techreport{gratch_technical_2004,
title = {Technical Details of a Domain-independent Framework for Modeling Emotion},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Technical%20Details%20of%20a%20Domain-independent%20Framework%20for%20Modeling%20Emotion.pdf},
year = {2004},
date = {2004-01-01},
number = {ICT TR 04.2004},
address = {Marina del Rey, CA},
institution = {University of Southern California Institute for Creative Technologies},
abstract = {This technical report elaborates on the technical details of the EMA model of emotional appraisal and coping. It should be seen as an appendix to the journal article on this topic (Gratch & Marsella, to appear)},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {techreport}
}
Gratch, Jonathan; Marsella, Stacy C.
A Domain-independent Framework for Modeling Emotion Journal Article
In: Journal of Cognitive Systems Research, vol. 5, no. 4, pp. 269–306, 2004.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{gratch_domain-independent_2004,
title = {A Domain-independent Framework for Modeling Emotion},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/A%20Domain-independent%20Framework%20for%20Modeling%20Emotion.pdf},
year = {2004},
date = {2004-01-01},
journal = {Journal of Cognitive Systems Research},
volume = {5},
number = {4},
pages = {269–306},
abstract = {In this article, we show how psychological theories of emotion shed light on the interaction between emotion and cognition, and thus can inform the design of human-like autonomous agents that must convey these core aspects of human behavior. We lay out a general computational framework of appraisal and coping as a central organizing principle for such systems. We then discuss a detailed domain-independent model based on this framework, illustrating how it has been applied to the problem of generating behavior for a significant social training application. The model is useful not only for deriving emotional state, but also for informing a number of the behaviors that must be modeled by virtual humans such as facial expressions, dialogue management, planning, reacting, and social understanding. Thus, the work is of potential interest to models of strategic decision-making, action selection, facial animation, and social intelligence.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
2003
Hill, Randall W.; Gratch, Jonathan; Marsella, Stacy C.; Swartout, William; Traum, David
Virtual Humans in the Mission Rehearsal Exercise System Proceedings Article
In: Kunstliche Intelligenzi (KI) (special issue on Embodied Conversational Agents), 2003.
Abstract | Links | BibTeX | Tags: Social Simulation, Virtual Humans
@inproceedings{hill_virtual_2003,
title = {Virtual Humans in the Mission Rehearsal Exercise System},
author = {Randall W. Hill and Jonathan Gratch and Stacy C. Marsella and William Swartout and David Traum},
url = {http://ict.usc.edu/pubs/Virtual%20Humans%20in%20the%20Mission%20Rehearsal%20Exercise%20System.pdf},
year = {2003},
date = {2003-06-01},
booktitle = {Kunstliche Intelligenzi (KI) (special issue on Embodied Conversational Agents)},
abstract = {How can simulation be made more compelling and effective as a tool for learning? This is the question that the Institute for Creative Technologies (ICT) set out to answer when it was formed at the University of Southern California in 1999, to serve as a nexus between the simulation and entertainment communities. The ultimate goal of the ICT is to create the Experience Learning System (ELS), which will advance the state of the art in virtual reality immersion through use of high-resolution graphics, immersive audio, virtual humans and story-based scenarios. Once fully realized, ELS will make it possible for participants to enter places in time and space where they can interact with believable characters capable of conversation and action, and where they can observe and participate in events that are accessible only through simulation.},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Marsella, Stacy C.
Fight the Way You Train:The Role and Limits of Emotions in Training for Combat Journal Article
In: Brown Journal of World Affairs, vol. X, pp. 63–76, 2003.
Links | BibTeX | Tags: Social Simulation, Virtual Humans
@article{gratch_fight_2003,
title = {Fight the Way You Train:The Role and Limits of Emotions in Training for Combat},
author = {Jonathan Gratch and Stacy C. Marsella},
url = {http://ict.usc.edu/pubs/Fight%20the%20Way%20You%20Train-The%20Role%20and%20Limits%20of%20Emotions%20in%20Training%20for%20Combat.pdf},
year = {2003},
date = {2003-06-01},
journal = {Brown Journal of World Affairs},
volume = {X},
pages = {63–76},
keywords = {Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}