Publications
Search
Reger, Greg M.; Rizzo, Albert A.; Gahm, Gregory A.
Initial Development and Dissemination of Virtual Reality Exposure Therapy for Combat-Related PTSD Book Section
In: Safir, Marilyn P.; Wallach, Helene S.; Rizzo, Albert "Skip" (Ed.): Future Directions in Post-Traumatic Stress Disorder, pp. 289–302, Springer US, Boston, MA, 2014, ISBN: 978-1-4899-7521-8 978-1-4899-7522-5.
@incollection{reger_initial_2014,
title = {Initial Development and Dissemination of Virtual Reality Exposure Therapy for Combat-Related PTSD},
author = {Greg M. Reger and Albert A. Rizzo and Gregory A. Gahm},
editor = {Marilyn P. Safir and Helene S. Wallach and Albert "Skip" Rizzo},
url = {http://link.springer.com/10.1007/978-1-4899-7522-5_15},
isbn = {978-1-4899-7521-8 978-1-4899-7522-5},
year = {2014},
date = {2014-11-01},
booktitle = {Future Directions in Post-Traumatic Stress Disorder},
pages = {289–302},
publisher = {Springer US},
address = {Boston, MA},
abstract = {Military personnel are at risk for the development of posttraumatic stress disorder. Although effective treatments are available, the need for improved treatment efficacy and less stigmatizing approaches to treatment have resulted in the evolution of virtual reality exposure therapy. This chapter reviews the development and dissemination efforts of a virtual reality system supporting exposure therapy for deployment-related posttraumatic stress disorder. Specifically, the chapter will review the work done to incorporate the feedback of military personnel into the early development of a Virtual Iraq/Afghanistan system and also reviews efforts to disseminate this treatment to military and Veteran behavioral health researchers and providers.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Scherer, Stefan; Stratou, Giota; Lucas, Gale; Mahmoud, Marwa; Boberg, Jill; Gratch, Jonathan; Rizzo, Albert (Skip); Morency, Louis-Philippe
Automatic audiovisual behavior descriptors for psychological disorder analysis Journal Article
In: Image and Vision Computing Journal, vol. 32, no. 10, pp. 648–658, 2014, ISSN: 02628856.
@article{scherer_automatic_2014,
title = {Automatic audiovisual behavior descriptors for psychological disorder analysis},
author = {Stefan Scherer and Giota Stratou and Gale Lucas and Marwa Mahmoud and Jill Boberg and Jonathan Gratch and Albert (Skip) Rizzo and Louis-Philippe Morency},
url = {http://linkinghub.elsevier.com/retrieve/pii/S0262885614001000},
doi = {10.1016/j.imavis.2014.06.001},
issn = {02628856},
year = {2014},
date = {2014-10-01},
journal = {Image and Vision Computing Journal},
volume = {32},
number = {10},
pages = {648–658},
abstract = {We investigate the capabilities of automatic audiovisual nonverbal behavior descriptors to identify indicators of psychological disorders such as depression, anxiety, and post-traumatic stress disorder. Due to strong correlations between these disordersas measured with standard self-assessment questionnaires in this study, we focus our investigations in particular on a generic distress measure as identified using factor analysis. Within this work, we seek to confirm and enrich present state of the art, predominantly based on qualitative manual annotations, with automatic quantitative behavior descriptors. We propose a number of nonverbal behavior descriptors that can be automatically estimated from audiovisual signals. Such automatic behavior descriptors could be used to support healthcare providers with quantified and objective observations that could ultimately improve clinical assessment. We evaluate our work on the dataset called the Distress Assessment Interview Corpus (DAIC) which comprises dyadic interactions between a confederate interviewer and a paid participant. Our evaluation on this dataset shows correlation of our automatic behavior descriptors with the derived general distress measure. Our analysis also includes a deeper study of self-adaptor and fidgeting behaviors based on detailed annotations of where these behaviors occur.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yeh, Shih-Ching; Wang, Jin-Liang; Wang, Chin-Yeh; Lin, Po-Han; Chen, Gwo-Dong; Rizzo, Albert
In: British Journal of Educational Technology, vol. 45, no. 4, pp. 666–675, 2014, ISSN: 00071013.
@article{yeh_motion_2014,
title = {Motion controllers for learners to manipulate and interact with 3D objects for mental rotation training: Motion controllers for mental rotation training},
author = {Shih-Ching Yeh and Jin-Liang Wang and Chin-Yeh Wang and Po-Han Lin and Gwo-Dong Chen and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Motion%20controllers%20for%20learners%20to%20manipulate%20and%20interact%20with%203D%20objects%20for%20mental%20rotation%20training.pdf},
doi = {10.1111/bjet.12059},
issn = {00071013},
year = {2014},
date = {2014-07-01},
journal = {British Journal of Educational Technology},
volume = {45},
number = {4},
pages = {666–675},
abstract = {Mental rotation is an important spatial processing ability and an important element in intelligence tests. However, the majority of past attempts at training mental rotation have used paper-and-pencil tests or digital images. This study proposes an innovative mental rotation training approach using magnetic motion controllers to allow learners to manipulate and interact with three-dimensional (3D) objects. Stereovision allows learners to perceive the spatial geometric form of a 3D object. This approach allows learners to perceive 3D objects in space through stereovision and make mental rotation visible from each intrinsic and invisible mental rotation step using motion-interaction methods. This study examines the effects of user training performance and perceptions. The results indicate that the proposed approach can improve user mental rotation ability effectively. Learners expressed high degrees of concentration toward the mechanism that included direct control and immediate feedback. The results also suggest that female testers perceive greater degrees of playfulness toward the mechanism and improve more through training than male testers.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Hartholt, Arno; Grimani, Mario; Leeds, Andrew; Liewer, Matt
Virtual Reality Exposure Therapy for Combat-Related Posttraumatic Stress Disorder Journal Article
In: IEEE Computer Society, vol. 47, Issue 7, no. 7, pp. 31–37, 2014.
@article{rizzo_virtual_2014,
title = {Virtual Reality Exposure Therapy for Combat-Related Posttraumatic Stress Disorder},
author = {Albert Rizzo and Arno Hartholt and Mario Grimani and Andrew Leeds and Matt Liewer},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20Exposure%20Therapy%20for%20Treating%20Combat-Related%20PTSD.pdf},
year = {2014},
date = {2014-07-01},
journal = {IEEE Computer Society},
volume = {47, Issue 7},
number = {7},
pages = {31–37},
abstract = {Virtual reality (VR) technology is rapidly evolving to support prolonged exposure (PE) therapy, a proven treatment for combat-related posttraumatic stress disorder. Building on the successful 2007 Virtual Iraq/Afghanistan VRET system, a team of behavioral scientists, software engineers, and virtual artists has created Bravemind, a flexible VR system that offers significantly enhanced PE treatment possibilities. The first Web extra at http://youtu.be/EiYg-kMNMtQ is a video demonstration of an original early virtual reality exposure therapy (VRET) prototype that shows a small section of an Iraqi city with a landing helicopter (2004). The second Web extra at http://youtu.be/_cS-ynWZmeQ is a video demonstration of virtual reality exposure therapy (VRET) that simulates driving a Humvee in a rural part of Iraq, showcasing several encounters, including IED and road-side attacks (2007). The third Web extra at http://youtu.be/78QXX_F4mc8 is a video demonstration of virtual reality exposure therapy (VRET) that simulates an overview of several Iraqi city areas (2007). The fourth Web extra at http://youtu.be/_AnixslkVLU is a video demonstration of virtual reality exposure therapy (VRET) that simulates a patrol entering interior buildings in Iraq (2007). The fifth Web extra at http://youtu.be/S22aQ-DqKKU is a video demonstration of an original virtual reality exposure therapy (VRET) tablet interface that allows the clinician to change virtual reality settings and trigger encounters (2007). The sixth Web extra at http://youtu.be/C-fspuLo4vw is a video demonstration of the Bravemind virtual reality exposure therapy (VRET) prototype showing a variety of driving and dismounted scenarios with encounters in Iraq and Afghanistan (2013). The sixth Web extra at http://youtu.be/HSPDomDAigg is a video collection of Iraqi and Afghanistan virtual reality exposure therapy (VRET) scenarios within the Bravemind prototype (2013).},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gordon, Andrew; Core, Mark; Kang, Sin-Hwa; Wang, Catherine; Wienberg, Christopher
Civilian Analogs of Army Tasks: Supporting Pedagogical Storytelling Across Domains Journal Article
In: Proceedings of the 11th International Conference of the Learning Sciences, 2014.
@article{gordon_civilian_2014,
title = {Civilian Analogs of Army Tasks: Supporting Pedagogical Storytelling Across Domains},
author = {Andrew Gordon and Mark Core and Sin-Hwa Kang and Catherine Wang and Christopher Wienberg},
url = {http://ict.usc.edu/pubs/Civilian%20Analogs%20of%20Army%20Tasks%20-%20Supporting%20Pedagogical%20Storytelling%20Across%20Domains.pdf},
year = {2014},
date = {2014-06-01},
journal = {Proceedings of the 11th International Conference of the Learning Sciences},
abstract = {Storytelling is the most basic means by which people learn from the experiences of others. Advances in educational technologies offer new opportunities and experiences for learners, but risk losing the natural forms of pedagogical storytelling afforded by face-to-face teacher-student discussion. In this paper, we present a technology-supported solution to the problem of curating and algorithmically delivering relevant stories to learners in computer-based learning environments. Our approach is to mine public weblogs for textual narratives related to specific activity contexts, both inside and outside the domain of the target skillset. These stories are then linked directly to task representations in the learner model of an intelligent tutoring system, and delivered to learners along with other tutoring guidance. We demonstrate our approach to curating stories by creating collections of narratives that are analogous to tactical tasks of the U.S. Army, and evaluate the difficulty of incorporating these stories into intelligent tutoring systems.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krum, David M.; Kang, Sin-Hwa; Bolas, Mark
Virtual Coaches over Mobile Video Proceedings Article
In: Proceedingsof International Conference on Computer Animation and Social Agents (CASA), 2014.
@inproceedings{krum_virtual_2014,
title = {Virtual Coaches over Mobile Video},
author = {David M. Krum and Sin-Hwa Kang and Mark Bolas},
url = {http://ict.usc.edu/pubs/Virtual%20Coaches%20over%20Mobile%20Video.pdf},
year = {2014},
date = {2014-05-01},
booktitle = {Proceedingsof International Conference on Computer Animation and Social Agents (CASA)},
abstract = {We hypothesize that the context of a smartphone, how a virtual human is presented within a smartphone app, and indeed, the nature of that app, can profoundly affect how the virtual human is perceived by a real human. We believe that virtual humans, presented over video chat services (such as Skype) and delivered using mobile phones, can be an effective way to deliver coaching applications. We propose to build a prototype system that allows virtual humans to initiate and receive Skype calls. This hardware will enable broadcast of the audio and video imagery of a character. Using this platform and a virtual human, we will conduct two user studies. The first study will examine factors involved in making a mobile video based character seem engaging and “real”. This study will examine how character appearance and the artifacts of the communication channel, such as video and audio quality, can affect rapport with a virtual human. The second study will examine ways to maintain a long-term relationship with a character, leveraging the character’s ability to call and interact with a real human over a longer period of time. These studies will help develop design guidelines for presenting virtual humans over mobile video.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Talbot, Thomas
Balancing Physiology, Anatomy and Immersion: How Much Biological Fidelity Is Necessary in a Medical Simulation? Journal Article
In: Military Medicine, vol. 178, pp. 28–36, 2013.
@article{talbot_balancing_2013,
title = {Balancing Physiology, Anatomy and Immersion: How Much Biological Fidelity Is Necessary in a Medical Simulation?},
author = {Thomas Talbot},
url = {http://ict.usc.edu/pubs/Balancing%20Physiology,%20Anatomy%20&%20Immersion%20How%20Much%20Biological%20Fidelity%20is%20Necessary%20in%20a%20Medical%20Simulation.pdf},
year = {2013},
date = {2013-10-01},
journal = {Military Medicine},
volume = {178},
pages = {28–36},
abstract = {Physiology and anatomy can be depicted at varying levels of fidelity in a medical simulation or training encounter. Another factor in a medical simulation concerns design features intended to engage the learner through a sense of immersion. Physiology can be simulated by various means including physiology engines, complex state machines, simple state machines, kinetic models, and static readouts. Each approach has advantages in terms of complexity of development and impact on the learner. Such factors are detailed within the article. Various other biological, hardware-based, and virtual models are used in medical training with varying levels of fidelity. For many medical simulation-based educational experiences, low-fidelity approaches are often adequate if not preferable.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krch, D.; Nikelshpur, O.; Lavrador, S.; Chiaravalloti, N. D.; Koenig, Sebastian; Rizzo, Albert
Pilot results from a virtual reality executive function task Proceedings Article
In: International Conference on Virtual Rehabilitation, pp. 15–21, Philadelphia, PA, 2013.
@inproceedings{krch_pilot_2013,
title = {Pilot results from a virtual reality executive function task},
author = {D. Krch and O. Nikelshpur and S. Lavrador and N. D. Chiaravalloti and Sebastian Koenig and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Pilot%20results%20from%20a%20virtual%20reality%20executive%20function%20task.pdf},
year = {2013},
date = {2013-08-01},
booktitle = {International Conference on Virtual Rehabilitation},
pages = {15–21},
address = {Philadelphia, PA},
abstract = {Impairments in executive functions (EF) negatively impact the capacity for independent living, damaging personal autonomy, and diminishing quality of life. Virtual environments offer an ecologically valid way to evaluate a person’s ability to carry out tasks that depend on EFs. The purpose of this pilot study was to evaluate the feasibility of a virtual reality office task, the Assessim Office (AO), in persons with Multiple Sclerosis and Traumatic Brain Injury, to evaluate performance of patient groups relative to each other and to healthy controls on the AO, and to explore the relationship between patient’s performance on AO tasks and neuropsychological measures of EF.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Koenig, Sebastian; Ardanza, Aitor; Cortes, Camilo; Alessandro, De Mauro; Lange, Belinda
Introduction to Low-Cost Motion-Tracking for Virtual Rehabilitation Book Section
In: Pons, Jose L.; Torricelli, Diego (Ed.): Emerging Therapies in Neurorehabilitation, vol. 4, pp. 287–303, 2013, ISBN: 978-3-642-38555-1.
@incollection{koenig_introduction_2013,
title = {Introduction to Low-Cost Motion-Tracking for Virtual Rehabilitation},
author = {Sebastian Koenig and Aitor Ardanza and Camilo Cortes and De Mauro Alessandro and Belinda Lange},
editor = {Jose L. Pons and Diego Torricelli},
url = {http://ict.usc.edu/pubs/Introduction%20to%20Low-Cost%20Motion-Tracking%20for%20Virtual%20Rehabilitation.pdf},
isbn = {978-3-642-38555-1},
year = {2013},
date = {2013-08-01},
booktitle = {Emerging Therapies in Neurorehabilitation},
volume = {4},
pages = {287–303},
series = {Biosystems & Biorobotics},
abstract = {Low-cost motion sensors have seen tremendous increase in popularity in the past few years. Accelerometers, gyroscopes or cameras can be found in most available smart phones and gaming controllers. The Apple⬚ iPhone, Nintendo⬚ Wii and the PlayStation EyeToy are just a few examples where such technology is used to provide a more natural interaction for the user. Depth-sensing cameras by companies such as Microsoft, PrimeSense and Asus can enhance the user experience even further by enabling full-body interaction. This chapter will specifically discuss the use of the Microsoft⬚ Kinect depth-sensing camera (Kinect) for rehabilitation of patients with motor disabilities. In addition, examples will be provided of how the Kinect can be used with off-the-shelf computer games or utilized in conjunction with modern game development tools such as the game engine Unity. The examples will outline concepts and required resources in order to enable the reader to use low-cost depth-sensing cameras for rehabilitation.⬚},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Ortiz-Catalan, Max; Nijenhuis, Sharon; Ambrosch, Kurt; Bovend'Eerdt, Thamar; Koenig, Sebastian; Lange, Belinda
Virtual Reality Book Section
In: Pons, Jose L.; Torricelli, Diego (Ed.): Emerging Therapies in Neurorehabilitation, vol. 4, pp. 287–303, 2013, ISBN: 978-3-642-38555-1.
@incollection{ortiz-catalan_virtual_2013,
title = {Virtual Reality},
author = {Max Ortiz-Catalan and Sharon Nijenhuis and Kurt Ambrosch and Thamar Bovend'Eerdt and Sebastian Koenig and Belinda Lange},
editor = {Jose L. Pons and Diego Torricelli},
url = {http://ict.usc.edu/pubs/Virtual%20Reality.pdf},
isbn = {978-3-642-38555-1},
year = {2013},
date = {2013-08-01},
booktitle = {Emerging Therapies in Neurorehabilitation},
volume = {4},
pages = {287–303},
series = {Biosystems & Biorobotics},
abstract = {ThischapterprovidesanoverviewontheuseofVirtualReality(VR)in rehabilitation with respect to recent neuroscience and physical therapy reviews of individuals with motor impairments. A wide range of technologies have been employed to provide rehabilitation supported by VR. Several studies have found evidence of the benefits of VR rehabilitation technologies. However, support for their efficacy is still limited due the lack of generalizable results and the uncoor- dinated effort of many individual, heterogeneous studies that have been conducted. Although VR has clear potential as a rehabilitation tool to improve treatment outcomes, future trials need to take into account the individual perspective of each patient group and consolidate research methodologies across trials to allow for stronger conclusions across the heterogeneous field of neurorehabilitation.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Forbell, Eric; Kalisch, Nicolai; Morbini, Fabrizio; Christofferson, Kelly; Sagae, Kenji; Traum, David; Rizzo, Albert
Roundtable: An Online Framework for Building Web-based Conversational Agents Proceedings Article
In: Annual SIGdial Meeting on Discourse and Dialogue, Metz, France, 2013.
@inproceedings{forbell_roundtable_2013,
title = {Roundtable: An Online Framework for Building Web-based Conversational Agents},
author = {Eric Forbell and Nicolai Kalisch and Fabrizio Morbini and Kelly Christofferson and Kenji Sagae and David Traum and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Roundtable-%20An%20Online%20Framework%20for%20Building%20Web-based%20Conversational%20Agents.pdf},
year = {2013},
date = {2013-08-01},
booktitle = {Annual SIGdial Meeting on Discourse and Dialogue},
address = {Metz, France},
abstract = {We present an online system that provides a complete web-based sandbox for creating, testing and publishing embodied conversational agents. The tool, called Roundtable, empowers many different types of authors and varying team sizes to create flexible interactions by automating many editing workflows while limiting complexity and hiding architectural concerns. Finished characters can be published directly to web servers, enabling highly interactive applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Roy, Michael; Costanzo, Michelle E.; Jovanovic, Tanja; Leaman, Suzanne; Norrholm, Seth D. ⬚; Rizzo, Albert
Heart Rate Response to Fear Conditioning and Virtual Reality in Subthreshold PTSD Journal Article
In: Studies in Health Technology and Informatics, vol. 191, pp. 115–119, 2013.
@article{roy_heart_2013,
title = {Heart Rate Response to Fear Conditioning and Virtual Reality in Subthreshold PTSD},
author = {Michael Roy and Michelle E. Costanzo and Tanja Jovanovic and Suzanne Leaman and Seth D. ⬚ Norrholm and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Heart%20Rate%20Response%20to%20Fear%20Conditioning%20and%20Virtual%20Reality%20in%20Subthreshold%20PTSD.pdf},
year = {2013},
date = {2013-07-01},
journal = {Studies in Health Technology and Informatics},
volume = {191},
pages = {115–119},
abstract = {Posttraumatic stress disorder (PTSD) is a significant health concern for U.S. military service members (SMs) returning from Afghanistan and Iraq. Early intervention to prevent chronic disability requires greater understanding of subthreshold PTSD symptoms, which are associated with impaired physical health, mental health, and risk for delayed onset PTSD. We report a comparison of physiologic responses for recently deployed SMs with high and low subthreshold PTSD symptoms, respectively, to a fear conditioning task and novel virtual reality paradigm (Virtual Iraq). The high symptom group demonstrated elevated heart rate (HR) response during fear conditioning. Virtual reality sequences evoked significant HR responses which predicted variance of the PTSD Checklist-Military Version self-report. Our results support the value of physiologic assessment during fear conditioning and combat-related virtual reality exposure as complementary tools in detecting subthreshold PTSD symptoms in Veterans.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Parsons, Thomas D.; Courtney, Chris; Dawson, Michael E.; Rizzo, Albert; Arizmendi, Brian
Visuospatial Processing and Learning Effects in Virtual Reality Based Mental Rotation and Navigational Tasks Proceedings Article
In: International Conference on Human-Computer Interaction, Las Vegas, NV, 2013.
@inproceedings{parsons_visuospatial_2013,
title = {Visuospatial Processing and Learning Effects in Virtual Reality Based Mental Rotation and Navigational Tasks},
author = {Thomas D. Parsons and Chris Courtney and Michael E. Dawson and Albert Rizzo and Brian Arizmendi},
url = {http://ict.usc.edu/pubs/Visuospatial%20Processing%20and%20Learning%20Effects%20in%20Virtual%20Reality%20Based%20Mental%20Rotation%20and%20Navigational%20Tasks.pdf},
year = {2013},
date = {2013-07-01},
booktitle = {International Conference on Human-Computer Interaction},
address = {Las Vegas, NV},
abstract = {Visuospatial function and performance in interactions between humans and computers involve the human identification and manipulation of computer generated stimuli and their location. The impact of learning on mental rotation has been demonstrated in studies relating everyday spatial activities and spatial abilities. An aspect of visuospatial learning in virtual environments that has not been widely studied is the impact of threat on learning in a navigational task. In fact, to our knowledge, the combined assessment of learning during mental rotation trials and learning in an ecologically valid virtual reality-based navigational environment (that has both high and low threat zones) has not been adequately studied. Results followed expectation: 1) learning occurred in the virtual reality based mental rotation test. Although there was a relation between route learning and practice, a primacy effect was observed as participants performed more poorly when going from the first zone to the last.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Talbot, Thomas
Playing with Biology: Making medical games that appear lifelike Journal Article
In: International Journal of Gaming and Computer Mediated Simulations, vol. 5, no. 3, pp. 83–96, 2013.
@article{talbot_playing_2013,
title = {Playing with Biology: Making medical games that appear lifelike},
author = {Thomas Talbot},
url = {http://www.igi-global.com/article/playing-with-biology/93030},
year = {2013},
date = {2013-07-01},
journal = {International Journal of Gaming and Computer Mediated Simulations},
volume = {5},
number = {3},
pages = {83–96},
abstract = {Game-based medical simulations differ from other training modalities in that life processes must be simulated as part of the experience. Biological fidelity is the degree to which character anatomical appearance and physiology behavior are represented within a game or simulation. Methods to achieve physiological fidelity include computational physiology engines, complex state machines, simple state machines and kinetic models. Traditional games also employ health scores that can also be employed for medical gaming. The selection of technique to is dependent upon the goals of the simulation, the types of input expected of the user, the amount of development work possible and the level of fidelity required. Apparent biological fidelity, responsiveness to user inputs and the ability to correct mistakes is often more important than actual biological fidelity.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Suma, Evan; Krum, David M.; Lange, Belinda; Koenig, Sebastian; Rizzo, Albert; Bolas, Mark
Adapting user interfaces for gestural interaction with the flexible action and articulated skeleton toolkit Journal Article
In: Computers & Graphics, vol. 37, no. 3, pp. 193–201, 2013.
@article{suma_adapting_2013,
title = {Adapting user interfaces for gestural interaction with the flexible action and articulated skeleton toolkit},
author = {Evan Suma and David M. Krum and Belinda Lange and Sebastian Koenig and Albert Rizzo and Mark Bolas},
url = {http://ict.usc.edu/pubs/Adapting%20user%20interfaces%20for%20gestural%20interaction%20with%20the%20%EF%AC%82exible%20action%20and%20articulated%20skeleton%20toolkit.pdf},
year = {2013},
date = {2013-05-01},
journal = {Computers & Graphics},
volume = {37},
number = {3},
pages = {193–201},
abstract = {We present the Flexible Action and Articulated Skeleton Toolkit (FAAST), a middleware software framework for integrating full-body interaction with virtual environments, video games, and other user interfaces. This toolkit provides a complete end-to-end solution that includes a graphical user interface for custom gesture creation, sensor configuration, skeletal tracking, action recognition, and a variety of output mechanisms to control third party applications, allowing virtually any PC application to be repurposed for gestural control even if it does not explicit support input from motion sensors. To facilitate intuitive and transparent gesture design, we define a syntax for representing human gestures using rule sets that correspond to the basic spatial and temporal components of an action. These individual rules form primitives that, although conceptually simple on their own, can be combined both simultaneously and in sequence to form sophisticated gestural interactions. In addition to presenting the system architecture and our approach for representing and designing gestural interactions, we also describe two case studies that evaluated the use of FAAST for controlling first-person video games and improving the accessibility of computing interfaces for individuals with motor impairments. Thus, this work represents an important step toward making gestural interaction more accessible for practitioners, researchers, and hobbyists alike.⬚},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Buckwalter, John Galen; Forbell, Eric; Reist, Chris; Difede, JoAnn; Rothbaum, Barbara O.; Lange, Belinda; Koenig, Sebastian; Talbot, Thomas
Virtual Reality Applications to Address the Wounds of War Journal Article
In: Psychiatric Annals, vol. 43, no. 3, pp. 123–138, 2013.
@article{rizzo_virtual_2013-2,
title = {Virtual Reality Applications to Address the Wounds of War},
author = {Albert Rizzo and John Galen Buckwalter and Eric Forbell and Chris Reist and JoAnn Difede and Barbara O. Rothbaum and Belinda Lange and Sebastian Koenig and Thomas Talbot},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20Applications%20to%20Address%20the%20Wounds%20of%20War.pdf},
year = {2013},
date = {2013-03-01},
journal = {Psychiatric Annals},
volume = {43},
number = {3},
pages = {123–138},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Proffitt, Rachel; Lange, Belinda
User Centered Design and Development of a Game for Exercise in Older Adults Journal Article
In: The International Journal of Technology, Knowledge, and Society, vol. 8, no. 6, pp. 95–112, 2013.
@article{proffitt_user_2013,
title = {User Centered Design and Development of a Game for Exercise in Older Adults},
author = {Rachel Proffitt and Belinda Lange},
url = {http://ict.usc.edu/pubs/User%20Centered%20Design%20and%20Development%20of%20a%20Game%20for%20Exercise%20in%20Older%20Adults.pdf},
year = {2013},
date = {2013-03-01},
journal = {The International Journal of Technology, Knowledge, and Society},
volume = {8},
number = {6},
pages = {95–112},
abstract = {Thirty percent of older adults fall every year. One of the most effective methods to help prevent falls is participation in a regular fitness or exercise program to build and maintain capacities. Few exercise programs for older adults utilize the advances in technology and gaming for fitness. Fifteen older adults currently involved in a fitness program for seniors at California State University–Fullerton participated in focus groups. The focus groups explored perceptions of health and wellness, the use of and access to technology, and ideas for the development of a game for fitness. The data were analyzed using open coding and the themes that emerged from the data were used in the design of a prototype game. Nineteen older adults participated in an iterative user testing process of the prototype game. The iterative user testing process in- volved several cycles of user testing and changes to the prototype. The feedback from the user testing process as well as the focus groups will be summarized and explored in this paper. Details of a preliminary game will be presented with a focus on access to technology for older adults, participation as means for prevention and building capacities.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gratch, Jonathan; Morency, Louis-Philippe; Scherer, Stefan; Stratou, Giota; Boberg, Jill; Koenig, Sebastian; Adamson, Todd; Rizzo, Albert
User-State Sensing for Virtual Health Agents and TeleHealth Applications Proceedings Article
In: Medicine Meets Virtual Reality, San Diego, CA, 2013.
@inproceedings{gratch_user-state_2013,
title = {User-State Sensing for Virtual Health Agents and TeleHealth Applications},
author = {Jonathan Gratch and Louis-Philippe Morency and Stefan Scherer and Giota Stratou and Jill Boberg and Sebastian Koenig and Todd Adamson and Albert Rizzo},
url = {http://ict.usc.edu/pubs/User-State%20Sensing%20for%20Virtual%20Health%20Agents%20and%20TeleHealth%20Applications.pdf},
year = {2013},
date = {2013-02-01},
booktitle = {Medicine Meets Virtual Reality},
address = {San Diego, CA},
abstract = {Nonverbal behaviors play a crucial role in shaping outcomes in face-to- face clinical interactions. Experienced clinicians use nonverbals to foster rapport and “read” their clients to inform diagnoses. The rise of telemedicine and virtual health agents creates new opportunities, but it also strips away much of this non- verbal channel. Recent advances in low-cost computer vision and sensing technol- ogies have the potential to address this challenge by learning to recognize nonver- bal cues from large datasets of clinical interactions. These techniques can enhance both telemedicine and the emerging technology of virtual health agents. This arti- cle describes our current research in addressing these challenges in the domain of PTSD and depression screening for U.S. Veterans. We describe our general ap- proach and report on our initial contribution: the creation of a large dataset of clin- ical interview data that facilitates the training of user-state sensing technology.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Rizzo, Albert; Difede, JoAnn; Rothbaum, Barbara O.; Daughtry, J. Martin; Reger, Greg
Virtual Reality as a Tool for Delivering PTSD Exposure Therapy Book Section
In: Post-Traumatic Stress Disorder: Future Directions in Prevention, Diagnosis, and Treatment, Springer, 2013.
@incollection{rizzo_virtual_2013,
title = {Virtual Reality as a Tool for Delivering PTSD Exposure Therapy},
author = {Albert Rizzo and JoAnn Difede and Barbara O. Rothbaum and J. Martin Daughtry and Greg Reger},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20as%20a%20Tool%20for%20Delivering%20PTSD%20Exposure%20Therapy.pdf},
year = {2013},
date = {2013-01-01},
booktitle = {Post-Traumatic Stress Disorder: Future Directions in Prevention, Diagnosis, and Treatment},
publisher = {Springer},
abstract = {Virtual Reality (VR) technology offers new opportunities for the development of innovative assessment and intervention tools. VR-based testing, training, and treatment approaches that would be difficult, if not impossible, to deliver using traditional methods are now being developed that take advantage of the assets available with VR technology. If empirical studies continue to demonstrate effectiveness, VR applications could provide new options for targeting the cognitive, psychological, motor and functional impairments that result from various psychological and physical disorders and conditions. VR allows for the precise presentation and control of stimuli within dynamic multi-sensory 3D computer generated environments, as well as providing advanced methods for capturing and quantifying behavioral responses. These characteristics serve as the basis for the rationale for VR applications in the clinical assessment, intervention and training domains. This chapter will begin with a brief review of the history and rationale for the use of VR with clinical populations followed by a description of the technology for creating and using VR clinically. The chapter will then focus on reviewing the rationale for VR Exposure Therapy (VRET) applied to Anxiety Disorders. The use of VRET for the treatment of PTSD will then be detailed followed by a description of the Virtual Iraq/Afghanistan VRET system and the results from its use with OEF/OIF Service Members and Veterans.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Rizzo, Albert; John, Bruce Sheffield; Newman, Brad; Williams, Josh; Hartholt, Arno; Lethin, Clarke; Buckwalter, John Galen
Virtual Reality as a Tool for Delivering PTSD Exposure Therapy and Stress Resilience Training Journal Article
In: Military Behavioral Health, vol. 1, pp. 48–54, 2013.
@article{rizzo_virtual_2013-1,
title = {Virtual Reality as a Tool for Delivering PTSD Exposure Therapy and Stress Resilience Training},
author = {Albert Rizzo and Bruce Sheffield John and Brad Newman and Josh Williams and Arno Hartholt and Clarke Lethin and John Galen Buckwalter},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20as%20a%20Tool%20for%20Delivering%20PTSD%20Exposure%20Therapy%20and%20Stress%20Resilience%20Training.pdf},
doi = {10.1080/21635781.2012.721064},
year = {2013},
date = {2013-01-01},
journal = {Military Behavioral Health},
volume = {1},
pages = {48–54},
abstract = {The incidence of post-traumatic stress disorder (PTSD) in returning Operation Enduring Free- dom and Operation Iraqi Freedom military personnel has created a significant behavioral health care challenge. One emerging form of treatment for combat-related PTSD that has shown promise involves the delivery of exposure therapy using immersive virtual reality (VR). Initial outcomes from open clinical trials have been positive, and fully randomized controlled trials are currently in progress. Inspired by the initial success of our research using VR to emotionally engage and successfully treat persons undergoing exposure therapy for PTSD, we have developed a similar VR-based approach to deliver resilience training prior to an initial deployment. The STress Resilience In Virtual Environments (STRIVE) project aims to create a set of combat simulations (derived from our existing virtual Iraq/Afghanistan PTSD exposure therapy system) that are part of a multiepisode interactive narrative experience. Users can be immersed within challenging virtual combat contexts and interact with virtual characters as part of an experiential approach for learning psychoeducational material, stress manage- ment techniques, emotional coping strategies believed to enhance stress resilience. This article describes the development and evaluation of the virtual Iraq/Afghanistan exposure therapy system and then details its current transition into the STRIVE tool for predeployment stress resilience training.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Filter
Sorry, no publications matched your criteria.