Publications
Search
Mozgai, Sharon; Hartholt, Arno; Rizzo, Albert "Skip"
An Adaptive Agent-Based Interface for Personalized Health Interventions Proceedings Article
In: Proceedings of the 25th International Conference on Intelligent User Interfaces Companion, pp. 118–119, ACM, Cagliari Italy, 2020, ISBN: 978-1-4503-7513-9.
@inproceedings{mozgai_adaptive_2020,
title = {An Adaptive Agent-Based Interface for Personalized Health Interventions},
author = {Sharon Mozgai and Arno Hartholt and Albert "Skip" Rizzo},
url = {https://dl.acm.org/doi/10.1145/3379336.3381467},
doi = {10.1145/3379336.3381467},
isbn = {978-1-4503-7513-9},
year = {2020},
date = {2020-03-01},
booktitle = {Proceedings of the 25th International Conference on Intelligent User Interfaces Companion},
pages = {118–119},
publisher = {ACM},
address = {Cagliari Italy},
abstract = {This demo introduces a novel mHealth application with an agentbased interface designed to collect multimodal data with passive sensors native to popular wearables (e.g., Apple Watch, FitBit, and Garmin) as well as through user self-report. This mHealth application delivers personalized and adaptive multimedia content via smartphone application specifically tailored to the user in the interdependent domains of physical, cognitive, and emotional health via novel adaptive logic-based algorithms while employing behavior change techniques (e.g., goal-setting, barrier identification, etc.). A virtual human coach leads all interactions to improve adherence.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Saredakis, Dimitrios; Szpak, Ancret; Birckhead, Brandon; Keage, Hannah A. D.; Rizzo, Albert; Loetscher, Tobias
Factors Associated With Virtual Reality Sickness in Head-Mounted Displays: A Systematic Review and Meta-Analysis Journal Article
In: Frontiers in Human Neuroscience, vol. 14, pp. 96, 2020, ISSN: 1662-5161.
@article{saredakis_factors_2020,
title = {Factors Associated With Virtual Reality Sickness in Head-Mounted Displays: A Systematic Review and Meta-Analysis},
author = {Dimitrios Saredakis and Ancret Szpak and Brandon Birckhead and Hannah A. D. Keage and Albert Rizzo and Tobias Loetscher},
url = {https://www.frontiersin.org/article/10.3389/fnhum.2020.00096/full},
doi = {10.3389/fnhum.2020.00096},
issn = {1662-5161},
year = {2020},
date = {2020-03-01},
journal = {Frontiers in Human Neuroscience},
volume = {14},
pages = {96},
abstract = {The use of head-mounted displays (HMD) for virtual reality (VR) application-based purposes including therapy, rehabilitation, and training is increasing. Despite advancements in VR technologies, many users still experience sickness symptoms. VR sickness may be influenced by technological differences within HMDs such as resolution and refresh rate, however, VR content also plays a significant role. The primary objective of this systematic review and meta-analysis was to examine the literature on HMDs that report Simulator Sickness Questionnaire (SSQ) scores to determine the impact of content. User factors associated with VR sickness were also examined. A systematic search was conducted according to PRISMA guidelines. Fifty-five articles met inclusion criteria, representing 3,016 participants (mean age range 19.5–80; 41% female). Findings show gaming content recorded the highest total SSQ mean 34.26 (95%CI 29.57–38.95). VR sickness profiles were also influenced by visual stimulation, locomotion and exposure times. Older samples (mean age ≥35 years) scored significantly lower total SSQ means than younger samples, however, these findings are based on a small evidence base as a limited number of studies included older users. No sex differences were found. Across all types of content, the pooled total SSQ mean was relatively high 28.00 (95%CI 24.66–31.35) compared with recommended SSQ cut-off scores. These findings are of relevance for informing future research and the application of VR in different contexts.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Hartholt, Arno; Rizzo, Albert
The Passive Sensing Agent: A Multimodal Adaptive mHealth Application Proceedings Article
In: Proceedings of the 2020 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops), pp. 1–3, IEEE, Austin, TX, USA, 2020, ISBN: 978-1-72814-716-1.
@inproceedings{mozgai_passive_2020,
title = {The Passive Sensing Agent: A Multimodal Adaptive mHealth Application},
author = {Sharon Mozgai and Arno Hartholt and Albert Rizzo},
url = {https://ieeexplore.ieee.org/document/9156177/},
doi = {10.1109/PerComWorkshops48775.2020.9156177},
isbn = {978-1-72814-716-1},
year = {2020},
date = {2020-03-01},
booktitle = {Proceedings of the 2020 IEEE International Conference on Pervasive Computing and Communications Workshops (PerCom Workshops)},
pages = {1–3},
publisher = {IEEE},
address = {Austin, TX, USA},
abstract = {We are demoing the Passive Sensing Agent (PSA), an mHealth virtual human coach, that collects multimodal data through passive sensors native to popular wearables (e.g., Apple Watch, FitBit, and Garmin). This virtual human interface delivers adaptive multi-media content via smartphone application that is specifically tailored to the user in the interdependent domains of physical, cognitive, and emotional health. Initially developed for the military, the PSA delivers health interventions (e.g., educational exercises, physical challenges, and performance feedback) matched to the individual user via novel adaptive logic-based algorithms while employing various behavior change techniques (e.g., goal-setting, barrier identification, rewards, modeling, etc.). A virtual human coach leads all interactions including the firsttime user experience and the brief daily sessions. All interactions were specifically designed to engage and motivate the user while continuously collecting data on their cognitive, emotional, and physical fitness. This multi-component application is integrated and deployed on an iPhone and Apple Watch prototype; a civilian version is currently in-development.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
An, Capt Eric; Nolty, Anne A T; Amano, Stacy S; Rizzo, Albert A; Buckwalter, J Galen; Rensberger, Jared
Heart Rate Variability as an Index of Resilience Journal Article
In: Military Medicine, 2019, ISSN: 0026-4075, 1930-613X.
@article{an_heart_2019,
title = {Heart Rate Variability as an Index of Resilience},
author = {Capt Eric An and Anne A T Nolty and Stacy S Amano and Albert A Rizzo and J Galen Buckwalter and Jared Rensberger},
url = {https://academic.oup.com/milmed/advance-article/doi/10.1093/milmed/usz325/5586497},
doi = {10.1093/milmed/usz325},
issn = {0026-4075, 1930-613X},
year = {2019},
date = {2019-10-01},
journal = {Military Medicine},
abstract = {Introduction: Resilience is the ability to maintain or quickly return to a stable physical and psychological equilibrium despite experiencing stressful events. Flexibility of the autonomic nervous system is particularly important for adaptive stress responses and may contribute to individual differences in resilience. Power spectrum analysis of heart rate variability (HRV) allows measurement of sympathovagal balance, which helps to evaluate autonomic flexibility. The present study investigated HRV as a broad index of resilience. Materials and Methods: Twenty-four male participants from the Army National Guard Special Forces completed psychological measures known to relate to resilience and had HRV measured while undergoing stressful virtual environment scenarios. Pearson product-moment correlations were used to explore the relationships between HRV and resilience factors. All research was conducted with the oversight of the Human Subjects Review Committee of Fuller Theological Seminary. Results: Trends toward significance were reported in order to provide results that would reasonably be expected in a study of higher power. Trends between resilience factors and HRV were found only during specific stress-inducing simulations (see Tables III). Conclusion: Greater resilience to stress was associated with HRV during nonstress periods. Higher levels of resilience to traumatic events were associated with HRV during circumstances that were more stressful and emotionally distressing. Post hoc analysis revealed that specific factors including flexibility, emotional control, and spirituality were driving the relationship between general resilience and HRV following emotionally laden stressors. Less stress vulnerability was associated with HRV following intermittent brief stressors. In sum, HRV appears to represent some aspects of an individual’s overall resilience profile. Although resilience remains a complex, multidimensional construct, HRV shows promise as a global psychophysiological index of resilience. This study also offers important perspectives concerning ways to optimize both physical and psychological health.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Difede, JoAnn; Rothbaum, Barbara O.; Rizzo, Albert A.; Wyka, Katarzyna; Spielman, Lisa; Jovanovic, Tanja; Reist, Christopher; Roy, Michael J.; Norrholm, Seth D.; Glatt, Charles; Lee, Francis
Enhanced exposure therapy for combat-related Posttraumatic Stress Disorder (PTSD): Study protocol for a randomized controlled trial Journal Article
In: Contemporary Clinical Trials, pp. 105857, 2019, ISSN: 15517144.
@article{difede_enhanced_2019,
title = {Enhanced exposure therapy for combat-related Posttraumatic Stress Disorder (PTSD): Study protocol for a randomized controlled trial},
author = {JoAnn Difede and Barbara O. Rothbaum and Albert A. Rizzo and Katarzyna Wyka and Lisa Spielman and Tanja Jovanovic and Christopher Reist and Michael J. Roy and Seth D. Norrholm and Charles Glatt and Francis Lee},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1551714419305725},
doi = {10.1016/j.cct.2019.105857},
issn = {15517144},
year = {2019},
date = {2019-10-01},
journal = {Contemporary Clinical Trials},
pages = {105857},
abstract = {Background: PTSD, which has been identified in up to 23% of post-9-11 veterans, often results in a chronic, pernicious course. Thus, effective treatments are imperative. The Institute of Medicine (IOM) concluded that the only intervention for PTSD with sufficient evidence to conclude efficacy is exposure therapy. This Phase III trial compares the efficacy of exposure therapy for combat-related PTSD delivered in two different formats- via virtual reality exposure therapy (VRE) or prolonged exposure therapy (PE)- combined with D-Cycloserine (DCS), a cognitive enhancer shown to facilitate the extinction of fear.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Hartholt, Arno; Rizzo, Albert “Skip”
Systematic Representative Design and Clinical Virtual Reality Journal Article
In: Psychological Inquiry, vol. 30, no. 4, pp. 231–245, 2019, ISSN: 1047-840X, 1532-7965.
@article{mozgai_systematic_2019,
title = {Systematic Representative Design and Clinical Virtual Reality},
author = {Sharon Mozgai and Arno Hartholt and Albert “Skip” Rizzo},
url = {https://www.tandfonline.com/doi/full/10.1080/1047840X.2019.1693873},
doi = {10.1080/1047840X.2019.1693873},
issn = {1047-840X, 1532-7965},
year = {2019},
date = {2019-10-01},
journal = {Psychological Inquiry},
volume = {30},
number = {4},
pages = {231–245},
abstract = {The authors of the article, “Causal Inference in Generalizable Environments: Systematic Representative Design”, boldly announce their core point in the opening line of the abstract stating that, “Causal inference and generalizability both matter.” While a surface glance might suggest this to be a simple notion, a closer examination reveals the complexity of what they are proposing. This complexity is apparent when one considers that the bulk of human experimental research has always been challenged in its inability to concurrently deliver on both of these aims. This is no slight on the tens of 1000’s of human researchers and behavioral scientists who have devoted long careers to highly controlled human psychological and social science laboratory research. Rather, it reflects the sheer enormity of the challenges for conducting human studies designed to specify human function with physics-informed lab methods, while at the same time producing results that lead to enhanced understanding and prediction of how people will operate in the complex and ever-changing contexts that make up everyday life. At the core of this issue is a methodological and philosophical challenge that is relevant to all areas of human subjects’ research, beyond the social science focus of the Miller et al. (this issue) article. It is our aim to discuss the central topics in their article through the lens of our own work using Virtual/Augmented Reality and Virtual Human simulation technologies for clinical and training applications},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hartholt, Arno; Mozgai, Sharon; Fast, Ed; Liewer, Matt; Reilly, Adam; Whitcup, Wendy; Rizzo, Albert "Skip"
Virtual Humans in Augmented Reality: A First Step towards Real-World Embedded Virtual Roleplayers Proceedings Article
In: Proceedings of the 7th International Conference on Human-Agent Interaction - HAI '19, pp. 205–207, ACM Press, Kyoto, Japan, 2019, ISBN: 978-1-4503-6922-0.
@inproceedings{hartholt_virtual_2019-1,
title = {Virtual Humans in Augmented Reality: A First Step towards Real-World Embedded Virtual Roleplayers},
author = {Arno Hartholt and Sharon Mozgai and Ed Fast and Matt Liewer and Adam Reilly and Wendy Whitcup and Albert "Skip" Rizzo},
url = {http://dl.acm.org/citation.cfm?doid=3349537.3352766},
doi = {10.1145/3349537.3352766},
isbn = {978-1-4503-6922-0},
year = {2019},
date = {2019-10-01},
booktitle = {Proceedings of the 7th International Conference on Human-Agent Interaction - HAI '19},
pages = {205–207},
publisher = {ACM Press},
address = {Kyoto, Japan},
abstract = {We present one of the first applications of virtual humans in Augmented Reality (AR), which allows young adults with Autism Spectrum Disorder (ASD) the opportunity to practice job interviews. It uses the Magic Leap’s AR hardware sensors to provide users with immediate feedback on six different metrics, including eye gaze, blink rate and head orientation. The system provides two characters, with three conversational modes each. Ported from an existing desktop application, the main development lessons learned were: 1) provide users with navigation instructions in the user interface, 2) avoid dark colors as they are rendered transparently, 3) use dynamic gaze so characters maintain eye contact with the user, 4) use hardware sensors like eye gaze to provide user feedback, and 5) use surface detection to place characters dynamically in the world.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Pynadath, David V.; Wang, Ning; Kamireddy, Sreekar
A Markovian Method for Predicting Trust Behavior in Human-Agent Interaction Proceedings Article
In: Proceedings of the 7th International Conference on Human-Agent Interaction - HAI '19, pp. 171–178, ACM Press, Kyoto, Japan, 2019, ISBN: 978-1-4503-6922-0.
@inproceedings{pynadath_markovian_2019,
title = {A Markovian Method for Predicting Trust Behavior in Human-Agent Interaction},
author = {David V. Pynadath and Ning Wang and Sreekar Kamireddy},
url = {http://dl.acm.org/citation.cfm?doid=3349537.3351905},
doi = {10.1145/3349537.3351905},
isbn = {978-1-4503-6922-0},
year = {2019},
date = {2019-10-01},
booktitle = {Proceedings of the 7th International Conference on Human-Agent Interaction - HAI '19},
pages = {171–178},
publisher = {ACM Press},
address = {Kyoto, Japan},
abstract = {Trust calibration is critical to the success of human-agent interaction (HAI). However, individual differences are ubiquitous in people’s trust relationships with autonomous systems. To assist its heterogeneous human teammates calibrate their trust in it, an agent must first dynamically model them as individuals, rather than communicating with them all in the same manner. It can then generate expectations of its teammates’ behavior and optimize its own communication based on the current state of the trust relationship it has with them. In this work, we examine how an agent can generate accurate expectations given observations of only the teammate’s trust-related behaviors (e.g., did the person follow or ignore its advice?). In addition to this limited input, we also seek a specific output: accurately predicting its human teammate’s future trust behavior (e.g., will the person follow or ignore my next suggestion?). In this investigation, we construct a model capable of generating such expectations using data gathered in a humansubject study of behavior in a simulated human-robot interaction (HRI) scenario. We first analyze the ability of measures from a presurvey on trust-related traits to accurately predict subsequent trust behaviors. However, as the interaction progresses, this effect is dwarfed by the direct experience. We therefore analyze the ability of sequences of prior behavior by the teammate to accurately predict subsequent trust behaviors. Such behavioral sequences have shown to be indicative of the subjective beliefs of other teammates, and we show here that they have a predictive power as well.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lucas, Gale M.; Rizzo, Albert; Gratch, Jonathan; Scherer, Stefan; Stratou, Giota; Boberg, Jill; Morency, Louis-Philippe
Reporting Mental Health Symptoms: Breaking Down Barriers to Care with Virtual Human Interviewers Book Section
In: The Impact of Virtual and Augmented Reality on Individuals and Society, pp. 256–264, Frontiers Media SA, 2019.
@incollection{lucas_reporting_2019,
title = {Reporting Mental Health Symptoms: Breaking Down Barriers to Care with Virtual Human Interviewers},
author = {Gale M. Lucas and Albert Rizzo and Jonathan Gratch and Stefan Scherer and Giota Stratou and Jill Boberg and Louis-Philippe Morency},
url = {https://books.google.com/books?hl=en&lr=&id=N724DwAAQBAJ&oi=fnd&pg=PP1&dq=The+Impact+of+Virtual+and+Augmented+Reality+on+Individuals+and+Society&ots=ZMD1P9T-K5&sig=Qqh7iHZ4Xq2iRyYecrECHwNNE38#v=onepage&q=The%20Impact%20of%20Virtual%20and%20Augmented%20Reality%20on%20Individuals%20and%20Society&f=false},
year = {2019},
date = {2019-09-01},
booktitle = {The Impact of Virtual and Augmented Reality on Individuals and Society},
pages = {256–264},
publisher = {Frontiers Media SA},
abstract = {A common barrier to healthcare for psychiatric conditions is the stigma associated with these disorders. Perceived stigma prevents many from reporting their symptoms. Stigma is a particularly pervasive problem among military service members, preventing them from reporting symptoms of combat-related conditions like posttraumatic stress disorder (PTSD). However, research shows (increased reporting by service members when anonymous assessments are used. For example, service members report more symptoms of PTSD when they anonymously answer the Post-Deployment Health Assessment (PDHA) symptom checklist compared to the official PDHA, which is identifiable and linked to their military records. To investigate the factors that influence reporting of psychological symptoms by service members, we used a transformative technology: automated virtual humans that interview people about their symptoms. Such virtual human interviewers allow simultaneous use of two techniques for eliciting disclosure that would otherwise be incompatible; they afford anonymity while also building rapport. We examined whether virtual human interviewers could increase disclosure of mental health symptoms among active-duty service members that just returned from a year-long deployment in Afghanistan. Service members reported more symptoms during a conversation with a virtual human interviewer than on the official PDHA. They also reported more to a virtual human interviewer than on an anonymized PDHA. A second, larger sample of active-duty and former service members found a similar effect that approached statistical significance. Because respondents in both studies shared more with virtual human interviewers than an anonymized PDHA—even though both conditions control for stigma and ramifications for service members’ military records—virtual human interviewers that build rapport may provide a superior option to encourage reporting.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Bond, William F; Lynch, Theresa J.; Mischler, Matthew J.; Fish, Jessica L.; McGarvey, Jeremy S.; Taylor, Jason T.; Kumar, Dipen M.; Mou, Kyle M.; Ebert-Allen, Rebecca A.; Mahale, Dilip N.; Talbot, Thomas B.; Aiyer, Meenakshy
Virtual Standardized Patient Simulation Journal Article
In: Simulation in Healthcare, vol. 14, no. 4, pp. 10, 2019.
@article{bond_virtual_2019,
title = {Virtual Standardized Patient Simulation},
author = {William F Bond and Theresa J. Lynch and Matthew J. Mischler and Jessica L. Fish and Jeremy S. McGarvey and Jason T. Taylor and Dipen M. Kumar and Kyle M. Mou and Rebecca A. Ebert-Allen and Dilip N. Mahale and Thomas B. Talbot and Meenakshy Aiyer},
url = {https://journals.lww.com/simulationinhealthcare/Fulltext/2019/08000/Virtual_Standardized_Patient_Simulation__Case.6.aspx#pdf-link},
year = {2019},
date = {2019-08-01},
journal = {Simulation in Healthcare},
volume = {14},
number = {4},
pages = {10},
abstract = {Introduction: High-value care (HVC) suggests that good history taking and physical examination should lead to risk stratification that drives the use or withholding of diagnostic testing. This study describes the development of a series of virtual standardized patient (VSP) cases and provides preliminary evidence that supports their ability to provide experiential learning in HVC. Methods: This pilot study used VSPs, or natural language processing–based patient avatars, within the USC Standard Patient platform. Faculty consensus was used to develop the cases, including the optimal diagnostic testing strategies, treatment options, and scored content areas. First-year resident physician learners experienced two 90-minute didactic sessions before completing the cases in a computer laboratory, using typed text to interview the avatar for history taking, then completing physical examination, differential diagnosis, diagnostic testing, and treatment modules for each case. Learners chose a primary and 2 alternative “possible” diagnoses from a list of 6 to 7 choices, diagnostic testing options from an extensive list, and treatments from a brief list ranging from 6 to 9 choices. For the historytaking module, both faculty and the platform scored the learners, and faculty assessed the appropriateness of avatar responses. Four randomly selected learner-avatar interview transcripts for each case were double rated by faculty for interrater reliability calculations. Intraclass correlations were calculated for interrater reliability, and Spearman ρ was used to determine the correlation between the platform and faculty ranking of learners' historytaking scores. Results: Eight VSP cases were experienced by 14 learners. Investigators reviewed 112 transcripts (4646 learner query-avatar responses). Interrater reliability means were 0.87 for learner query scoring and 0.83 for avatar response. Mean learner success for history taking was scored by the faculty at 57% and by the platform at 51% (ρ correlation of learner rankings = 0.80},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Talbot, Thomas; Rizzo, Albert “Skip”
Virtual Human Standardized Patients for Clinical Training Book Section
In: Virtual Reality for Psychological and Neurocognitive Interventions, pp. 387–405, Springer New York, New York, NY, 2019, ISBN: 978-1-4939-9480-9 978-1-4939-9482-3.
@incollection{talbot_virtual_2019-1,
title = {Virtual Human Standardized Patients for Clinical Training},
author = {Thomas Talbot and Albert “Skip” Rizzo},
url = {http://link.springer.com/10.1007/978-1-4939-9482-3_17},
doi = {10.1007/978-1-4939-9482-3_17},
isbn = {978-1-4939-9480-9 978-1-4939-9482-3},
year = {2019},
date = {2019-08-01},
booktitle = {Virtual Reality for Psychological and Neurocognitive Interventions},
pages = {387–405},
publisher = {Springer New York},
address = {New York, NY},
abstract = {Since Dr. Howard Barrows (1964) introduced the human standardized patient in 1963, there have been attempts to game a computer-based simulacrum of a patient encounter; the first being a heart attack simulation using the online PLATO system (Bitzer M, Nursing Research 15:144–150, 1966). With the now ubiquitous use of computers in medicine, interest and effort have expended in the area of Virtual Patients (VPs). There are excellent summaries in the literature (Talbot TB, International Journal of Gaming and Computer Mediated Simulations 4:1–19, 2012) that explain the different types of virtual patients along with their best case applications, strengths and limitations.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Lahav, Orly; Talis, Vadim; Cinamon, Rachel Gali; Rizzo, Albert
Virtual interactive consulting agent to support freshman students in transition to higher education Journal Article
In: Journal of Computing in Higher Education, pp. 1–35, 2019, ISSN: 1042-1726, 1867-1233.
@article{lahav_virtual_2019,
title = {Virtual interactive consulting agent to support freshman students in transition to higher education},
author = {Orly Lahav and Vadim Talis and Rachel Gali Cinamon and Albert Rizzo},
url = {http://link.springer.com/10.1007/s12528-019-09237-8},
doi = {10.1007/s12528-019-09237-8},
issn = {1042-1726, 1867-1233},
year = {2019},
date = {2019-08-01},
journal = {Journal of Computing in Higher Education},
pages = {1–35},
abstract = {The virtual interactive consulting agent system is an online virtual career center that supports freshman students in transition to higher education. This virtual counseling system, based on accumulative empirical knowledge for working students and knowledge about effective career intervention, aims to guide first-year university students in combining study and work effectively. Three main aspects of career interventions are supplied by this virtual interactive consulting agent system: personal assessment, information, and personal encouragement and relatedness. The virtual interactive consulting agent is based on the SimCoach system. The current research includes two studies that examine acceptability and satisfaction from two perspectives: that of the counselors (the experts) and of the consultees (the target consumers). Both studies included 87 participants divided into two research groups: 45 counselors and 42 counseled freshman students. The data were collected through four data collection tools: acceptability and satisfaction questionnaire, an openended question, Google Docs, and screen recording applications. The participants’ answers were analyzed using quantitative software. The results show that the majority of the counselors were satisfied with the usability of the system but not with the process of counseling through the virtual agent, with some expressing concern about the impact on the profession. In contrast, most of the consultees were satisfied with the counseling process and some stated that the virtual agent helped them to determine how to integrate work and study more effectively.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Parsons, Thomas D.; Rizzo, Albert “Skip”
A Review of Virtual Classroom Environments for Neuropsychological Assessment Book Section
In: Virtual Reality for Psychological and Neurocognitive Interventions, pp. 247–265, Springer New York, New York, NY, 2019, ISBN: 978-1-4939-9480-9 978-1-4939-9482-3.
@incollection{parsons_review_2019,
title = {A Review of Virtual Classroom Environments for Neuropsychological Assessment},
author = {Thomas D. Parsons and Albert “Skip” Rizzo},
url = {http://link.springer.com/10.1007/978-1-4939-9482-3_11},
doi = {10.1007/978-1-4939-9482-3_11},
isbn = {978-1-4939-9480-9 978-1-4939-9482-3},
year = {2019},
date = {2019-08-01},
booktitle = {Virtual Reality for Psychological and Neurocognitive Interventions},
pages = {247–265},
publisher = {Springer New York},
address = {New York, NY},
abstract = {Differential diagnosis and treatment of neuropsychological disorders require assessments that can differentiate overlapping symptoms. Previous research has most often relied on paper-and-pencil as well as computerized psychometric tests of cognitive functions. Although these approaches provide highly systematic control and delivery of performance challenges, they have also been criticized as limited in the area of ecological validity. A possible answer to the problems of ecological validity in assessment of cognitive functioning in neurological populations is to immerse the participant in a virtual environment. This chapter reviews the potential of various virtual classroom environments that have been developed for neuropsychological assessment.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Rizzo, Albert "Skip"; Bouchard, Stéphane (Ed.)
Virtual Reality for Psychological and Neurocognitive Interventions Book
Springer New York, New York, NY, 2019, ISBN: 978-1-4939-9480-9 978-1-4939-9482-3.
@book{rizzo_virtual_2019,
title = {Virtual Reality for Psychological and Neurocognitive Interventions},
editor = {Albert "Skip" Rizzo and Stéphane Bouchard},
url = {http://link.springer.com/10.1007/978-1-4939-9482-3},
doi = {10.1007/978-1-4939-9482-3},
isbn = {978-1-4939-9480-9 978-1-4939-9482-3},
year = {2019},
date = {2019-08-01},
publisher = {Springer New York},
address = {New York, NY},
series = {Virtual Reality Technologies for Health and Clinical Applications},
abstract = {This exciting collection tours virtual reality in both its current therapeutic forms and its potential to transform a wide range of medical and mental health-related fields. Extensive findings track the contributions of VR devices, systems, and methods to accurate assessment, evidence-based and client-centered treatment methods, and—as described in a stimulating discussion of virtual patient technologies—innovative clinical training. Immersive digital technologies are shown enhancing opportunities for patients to react to situations, therapists to process patients’ physiological responses, and scientists to have greater control over test conditions and access to results. Expert coverage details leading-edge applications of VR across a broad spectrum of psychological and neurocognitive conditions, including: Treating anxiety disorders and PTSD. Treating developmental and learning disorders, including Autism Spectrum Disorder, Assessment of and rehabilitation from stroke and traumatic brain injuries. Assessment and treatment of substance abuse. Assessment of deviant sexual interests. Treating obsessive-compulsive and related disorders. Augmenting learning skills for blind persons. Readable and relevant, Virtual Reality for Psychological and Neurocognitive Interventions is an essential idea book for neuropsychologists, rehabilitation specialists (including physical, speech, vocational, and occupational therapists), and neurologists. Researchers across the behavioral and social sciences will find it a roadmap toward new and emerging areas of study.},
keywords = {},
pubstate = {published},
tppubtype = {book}
}
Bouchard, Stéphane; Rizzo, Albert “Skip”
Applications of Virtual Reality in Clinical Psychology and Clinical Cognitive Neuroscience–An Introduction Book Section
In: Virtual Reality for Psychological and Neurocognitive Interventions, pp. 1–13, Springer New York, New York, NY, 2019, ISBN: 978-1-4939-9480-9 978-1-4939-9482-3.
@incollection{bouchard_applications_2019,
title = {Applications of Virtual Reality in Clinical Psychology and Clinical Cognitive Neuroscience–An Introduction},
author = {Stéphane Bouchard and Albert “Skip” Rizzo},
url = {http://link.springer.com/10.1007/978-1-4939-9482-3_1},
doi = {10.1007/978-1-4939-9482-3_1},
isbn = {978-1-4939-9480-9 978-1-4939-9482-3},
year = {2019},
date = {2019-08-01},
booktitle = {Virtual Reality for Psychological and Neurocognitive Interventions},
pages = {1–13},
publisher = {Springer New York},
address = {New York, NY},
abstract = {Simulation technology has a long history of adding value in aviation, military training, automotive/aircraft design, and surgical planning. In clinical psychology, Norcross et al. (2013) surveyed 70 therapy experts regarding interventions they predicted to increase in the next decade and virtual reality (VR) was ranked 4th out of 45 options, with other computer-supported methods occupying 4 out of the top 5 positions. The increased popularity of VR in the news, social media, conferences, and from innovative start-ups may give the impression that VR is something new. However, it is important to look back in time and recognize that as early as the 1960’s, Heilig proposed a multisensory immersive experienced called the Sensorama, and Sutherland and Sproull had created a stereoscopic head mounted display (HMD) (Berryman 2012; Srivastava et al. 2014). The term VR was coined more than 30 years ago by Jaron Lanier and commercial games were distributed to the public as early as 1989 by Mattel (in the US, and by PAX in Japan) for its PowerGlove™ and Nintendo’s failed Virtual Boy™ was released in 1995. Clinical VR applications were proposed as early as the mid 1990’s by Lamson, Pugnetti, Rothbaum, Riva, Rizzo, Weiss, and Wiederhold (named in alphabetical order), among others. Moreover, several scientific journals, conferences, and handbooks dedicated to the subject have been reporting scientific findings for decades.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Coleman, Benjamin; Marion, Sarah; Rizzo, Albert; Turnbull, Janiece; Nolty, Anne
In: Frontiers in Psychology, vol. 10, 2019, ISSN: 1664-1078.
@article{coleman_virtual_2019,
title = {Virtual Reality Assessment of Classroom – Related Attention: An Ecologically Relevant Approach to Evaluating the Effectiveness of Working Memory Training},
author = {Benjamin Coleman and Sarah Marion and Albert Rizzo and Janiece Turnbull and Anne Nolty},
url = {https://www.frontiersin.org/article/10.3389/fpsyg.2019.01851/full},
doi = {10.3389/fpsyg.2019.01851},
issn = {1664-1078},
year = {2019},
date = {2019-08-01},
journal = {Frontiers in Psychology},
volume = {10},
abstract = {Computerized cognitive interventions to improve working memory also purport to improve ADHD-related inattention and off task behavior. Such interventions have been shown to improve working memory, executive functioning, and fluid reasoning on standardized neuropsychological measures. However, debate continues as to whether such programs lead to improvement on ecologically relevant outcomes, such as classroom behavior. This study sought to propose a novel, ecologically relevant approach to evaluate the effectiveness of working memory training on real-world attention performance. Participants included 15 children, aged 6–15, identified as having attention problems were assessed via the virtual classroom continuous performance task (VCCPT) before and after completing 5 weeks of Cogmed working memory training. The VCCPT is a validated measure of sustained and selective attention set within a virtual reality (VR) environment. Several key areas of attention performance were observed to improve, including omission errors, reaction time, reaction time variability, and hit variability. Results suggest that working memory training led to substantial improvements in sustained attention in a real-life scenario of classroom learning. Moreover, the use of psychometrically validated VR measurement provides incremental validity beyond that of teacher or parent report of behavior. Observing such improvements on ecologically relevant measures of attention adds to the discussion around how to evaluate the effectiveness of working memory training as it pertains to real-life improvements and serves to inform consumer awareness of such products and their claims.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hartholt, Arno; Mozgai, Sharon; Rizzo, Albert "Skip"
Virtual Job Interviewing Practice for High-Anxiety Populations Proceedings Article
In: Proceedings of the 19th ACM International Conference on Intelligent Virtual Agents - IVA '19, pp. 238–240, ACM Press, Paris, France, 2019, ISBN: 978-1-4503-6672-4.
@inproceedings{hartholt_virtual_2019,
title = {Virtual Job Interviewing Practice for High-Anxiety Populations},
author = {Arno Hartholt and Sharon Mozgai and Albert "Skip" Rizzo},
url = {http://dl.acm.org/citation.cfm?doid=3308532.3329417},
doi = {10.1145/3308532.3329417},
isbn = {978-1-4503-6672-4},
year = {2019},
date = {2019-07-01},
booktitle = {Proceedings of the 19th ACM International Conference on Intelligent Virtual Agents - IVA '19},
pages = {238–240},
publisher = {ACM Press},
address = {Paris, France},
abstract = {We present a versatile system for training job interviewing skills that focuses specifically on segments of the population facing increased challenges during the job application process. In particular, we target those with Autism Spectrum Disorder (ADS), veterans transitioning to civilian life, and former convicts integrating back into society. The system itself follows the SAIBA framework and contains several interviewer characters, who each represent a different type of vocational field, (e.g. service industry, retail, office, etc.) Each interviewer can be set to one of three conversational modes, which not only affects what they say and how they say it, but also their supporting body language. This approach offers varying difficulties, allowing users to start practicing with interviewers who are more encouraging and accommodating before moving on to personalities that are more direct and indifferent. Finally, the user can place the interviewers in different environmental settings (e.g. conference room, restaurant, executive office, etc.), allowing for many different combinations in which to practice.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Rizzo, Albert S.
Clinical virtual reality in mental health and rehabilitation: a brief review of the future! Proceedings Article
In: Fulop, Gabor F.; Hanson, Charles M.; Andresen, Bjørn F. (Ed.): Infrared Technology and Applications XLV, pp. 51–74, SPIE, Baltimore, United States, 2019, ISBN: 978-1-5106-2669-0 978-1-5106-2670-6.
@inproceedings{rizzo_clinical_2019,
title = {Clinical virtual reality in mental health and rehabilitation: a brief review of the future!},
author = {Albert S. Rizzo},
editor = {Gabor F. Fulop and Charles M. Hanson and Bjørn F. Andresen},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/11002/2524302/Clinical-virtual-reality-in-mental-health-and-rehabilitation–a/10.1117/12.2524302.full},
doi = {10.1117/12.2524302},
isbn = {978-1-5106-2669-0 978-1-5106-2670-6},
year = {2019},
date = {2019-05-01},
booktitle = {Infrared Technology and Applications XLV},
volume = {37},
number = {1},
pages = {51--74},
publisher = {SPIE},
address = {Baltimore, United States},
abstract = {The paper details some of the history of Clinical Virtual Reality (VR) as it has evolved over the last 25 years and provides a brief overview of the key scientific findings for making a judgment regarding its value in the areas of mental health and rehabilitation. This write-up is designed be a companion piece to my SPIE keynote on the topic of, “Is Clinical Virtual Reality Ready for Primetime?” As such, the paper is packed with citations to key scientific research in this area that should provide readers who are interested in this topic with a roadmap for further exploration of the literature. After presenting a brief history of the area, a discussion follows as to the theory, research, and pragmatic issues that support the view that this VR use case is theoretically informed, has a large and convincing scientific literature to support its clinical application, and that recent technology advances and concomitant cost reductions have made clinical implementation feasible and pragmatically supported. The paper concludes with the perspective that Clinical VR applications will soon become indispensable tools in the toolbox of psychological researchers and practitioners and will only grow in relevance and popularity in the future.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yeom, Dongwoo; Choi, Joon-Ho; Kang, Sin-Hwa
In: Building and Environment, vol. 154, pp. 44–54, 2019, ISSN: 03601323.
@article{yeom_investigation_2019,
title = {Investigation of the physiological differences in the immersive virtual reality environment and real indoor environment: Focused on skin temperature and thermal sensation},
author = {Dongwoo Yeom and Joon-Ho Choi and Sin-Hwa Kang},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0360132319301672},
doi = {10.1016/j.buildenv.2019.03.013},
issn = {03601323},
year = {2019},
date = {2019-05-01},
journal = {Building and Environment},
volume = {154},
pages = {44–54},
abstract = {The goal of this research is to investigate and determine whether the effect of an IVE condition on an occupant's environmental sensations and physiological responses is different from the effect of a real environmental condition in the indoor environment. The research included a series of human subject experiments, with 16 participants in an environmental chamber. A thermal quality condition was selected as a primary environmental parameter, based on current IEQ-relevant studies. While the ambient thermal condition was gradually changed from 20 °C to 30 °C, the participants were asked to report their overall thermal sensations. Their skin temperatures were also continuously measured to collect physiological signal information in real time. The results of this experimental study revealed that the participants mostly generated higher skin temperature at the selected seven skin areas. Their reported thermal sensations were significantly higher in the IVE condition, than in the real environment, showing a difference of 12%.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Koenig, Sebastian T.; Krch, Denise; Lange, Belinda S.; Rizzo, Albert
Virtual reality and rehabilitation. Book Section
In: Handbook of rehabilitation psychology (3rd ed.)., pp. 521–539, American Psychological Association, Washington, DC, 2019, ISBN: 978-1-4338-2985-7 978-1-4338-2984-0.
@incollection{koenig_virtual_2019,
title = {Virtual reality and rehabilitation.},
author = {Sebastian T. Koenig and Denise Krch and Belinda S. Lange and Albert Rizzo},
url = {http://content.apa.org/books/16122-032},
isbn = {978-1-4338-2985-7 978-1-4338-2984-0},
year = {2019},
date = {2019-04-01},
booktitle = {Handbook of rehabilitation psychology (3rd ed.).},
pages = {521–539},
publisher = {American Psychological Association},
address = {Washington, DC},
abstract = {Virtual reality (VR) technology has seen a recent surge in market adoption for video games, movies, broadcasting of live events, and industries such as real estate and health care. This chapter focuses on evidence and technology released within the past 5 years to avoid discussing outdated or discontinued VR systems. It provides a pragmatic overview of VR technology, including its strengths and limitations, recent research and system development, and future perspectives. The widespread appeal of VR in society, untethered hardware, and integration of VR accessories, tracking devices, and psychophysiological monitoring are among the topics that have pushed VR into the spotlight, even in nontraditional VR markets such as rehabilitation psychology. The chapter provides an updated view of VR as a natural fit for clinical use, highlighting key features that can positively impact rehabilitation outcomes and address social and vocational aspects of cognitive rehabilitation. (PsycInfo Database Record (c) 2020 APA, all rights reserved)},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Filter
2017
Eini, Dalit Shefer; Ratzon, Navah Z.; Rizzo, Albert A.; Yeh, Shih-Ching; Lange, Belinda; Yaffe, Batia; Daich, Alexander; Weiss, Patrice L.; Kizony, Rachel
Camera-tracking gaming control device for evaluation of active wrist flexion and extension Journal Article
In: Journal of Hand Therapy, vol. 30, no. 1, pp. 89–96, 2017, ISSN: 08941130.
Abstract | Links | BibTeX | Tags: MedVR
@article{shefer_eini_camera-tracking_2017,
title = {Camera-tracking gaming control device for evaluation of active wrist flexion and extension},
author = {Dalit Shefer Eini and Navah Z. Ratzon and Albert A. Rizzo and Shih-Ching Yeh and Belinda Lange and Batia Yaffe and Alexander Daich and Patrice L. Weiss and Rachel Kizony},
url = {http://linkinghub.elsevier.com/retrieve/pii/S0894113016301132},
doi = {10.1016/j.jht.2016.07.002},
issn = {08941130},
year = {2017},
date = {2017-01-01},
journal = {Journal of Hand Therapy},
volume = {30},
number = {1},
pages = {89–96},
abstract = {Study Design: Cross sectional. Introduction: Measuring wrist range of motion (ROM) is an essential procedure in hand therapy clinics. Purpose of the Study: To test the reliability and validity of a dynamic ROM assessment, the Camera WristTracker (CWT). Methods: Wrist flexion and extension ROM of 15 patients with distal radius fractures and 15 matchedcontrols were assessed with the CWT and with a universal goniometer. Results: One-way model intraclass correlation coefficient analysis indicated high test-retest reliability for extension (ICC ¼ 0.92) and moderate reliability for flexion (ICC ¼ 0.49). Standard error for extension was 2.45 and for flexion was 4.07 . Repeated-measures analysis revealed a significant main effect for group; ROM was greater in the control group (F[1, 28] ¼ 47.35; P textbackslashtextbackslashtextbackslashtextless.001). The concurrent validity of the CWT was partially supported. Conclusion: The results indicate that the CWT may provide highly reliable scores for dynamic wrist extension ROM, and moderately reliable scores for flexion, in people recovering from a distal radius fracture. Level of Evidence: N/A.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
2016
Talbot, Thomas B.; Lyon, Thomas D.; Rizzo, Albert; John, Bruce
Virtual Child Witness-Effects of single and multiple use on performance with Novice and Expert cohorts in a structured virtual human interview Proceedings Article
In: Proceedings of the Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2016, Orlando, Florida, 2016.
Abstract | Links | BibTeX | Tags: MedVR
@inproceedings{talbot_virtual_2016,
title = {Virtual Child Witness-Effects of single and multiple use on performance with Novice and Expert cohorts in a structured virtual human interview},
author = {Thomas B. Talbot and Thomas D. Lyon and Albert Rizzo and Bruce John},
url = {http://ict.usc.edu/pubs/Virtual%20Child%20Witness%20Effects%20of%20single%20and%20multiple%20use%20on%20performance%20with%20Novice%20and%20Expert%20cohorts%20in%20a%20structured%20virtual%20human%20interview.pdf},
year = {2016},
date = {2016-12-01},
booktitle = {Proceedings of the Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2016},
address = {Orlando, Florida},
abstract = {Virtual human avatars can be used to train and assess a myriad of complex skills, such as interviewing, interpersonal, and clinical skills, in a safe environment that provides consistency, reduced cost, greater accessibility, and objective feedback. We created a structured virtual human interview which consisted of a conversational avatar that interacts verbally in response to on screen question choices. Our prototype was a forensic interview simulation called Virtual Child Witness (VCW). VCW provides a content-rich interview in response to open-ended questions and is designed to assess user’s interviewing strategy. In a quasi-experimental design, we evaluated 222 subjects to determine if the system could discriminate between Experts (M = .713},
keywords = {MedVR},
pubstate = {published},
tppubtype = {inproceedings}
}
Rizzo, Albert; Scherer, Scherer; DeVault, David; Gratch, Jonathan; Artstein, Ronald; Hartholt, Arno; Lucas, Gale; Marsella, Stacy; Morbini, Fabrizio; Nazarian, Angela; Stratou, Giota; Traum, David; Wood, Rachel; Boberg, Jill; Morency, Louis Philippe
Detection and computational analysis of psychological signals using a virtual human interviewing agent Journal Article
In: Journal of Pain Management, pp. 311–321, 2016, ISSN: 1939-5914.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{rizzo_detection_2016,
title = {Detection and computational analysis of psychological signals using a virtual human interviewing agent},
author = {Albert Rizzo and Scherer Scherer and David DeVault and Jonathan Gratch and Ronald Artstein and Arno Hartholt and Gale Lucas and Stacy Marsella and Fabrizio Morbini and Angela Nazarian and Giota Stratou and David Traum and Rachel Wood and Jill Boberg and Louis Philippe Morency},
url = {http://www.icdvrat.org/2014/papers/ICDVRAT2014_S03N3_Rizzo_etal.pdf},
issn = {1939-5914},
year = {2016},
date = {2016-11-01},
journal = {Journal of Pain Management},
pages = {311–321},
abstract = {It has long been recognized that facial expressions, body posture/gestures and vocal parameters play an important role in human communication and the implicit signalling of emotion. Recent advances in low cost computer vision and behavioral sensing technologies can now be applied to the process of making meaningful inferences as to user state when a person interacts with a computational device. Effective use of this additive information could serve to promote human interaction with virtual human (VH) agents that may enhance diagnostic assessment. This paper will focus on our current research in these areas within the DARPA-funded "Detection and Computational Analysis of Psychological Signals" project, with specific attention to the SimSensei application use case. SimSensei is a virtual human interaction platform that is able to sense and interpret real-time audiovisual behavioral signals from users interacting with the system. It is specifically designed for health care support and leverages years of virtual human research and development at USC-ICT. The platform enables an engaging face-to-face interaction where the virtual human automatically reacts to the state and inferred intent of the user through analysis of behavioral signals gleaned from facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human to human interaction and communication, SimSensei aims to capture and infer from user non-verbal communication to improve engagement between a VH and a user. The system can also quantify and interpret sensed behavioral signals longitudinally that can be used to inform diagnostic assessment within a clinical context.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
McAlinden, Ryan; Kang, Sin-Hwa; Nye, Benjamin; Phillips, Artemisa; Campbell, Julia; Goldberg, Stephan L.
Cost-Effective Strategies for Producing Engaging Online Courseware Proceedings Article
In: Proceedings from the Interservice/Industry Training, Simulation and Education Conference (I/ITSEC) 2016, National Training and Simulation Association, Orlando, FL, 2016.
Abstract | Links | BibTeX | Tags: ARL, DoD, Learning Sciences, MedVR, MxR, STG, UARC
@inproceedings{mcalinden_cost-effective_2016,
title = {Cost-Effective Strategies for Producing Engaging Online Courseware},
author = {Ryan McAlinden and Sin-Hwa Kang and Benjamin Nye and Artemisa Phillips and Julia Campbell and Stephan L. Goldberg},
url = {http://www.iitsecdocs.com/search},
year = {2016},
date = {2016-11-01},
booktitle = {Proceedings from the Interservice/Industry Training, Simulation and Education Conference (I/ITSEC) 2016},
publisher = {National Training and Simulation Association},
address = {Orlando, FL},
abstract = {As distributed learning (dL) and computer-based training (CBT) continue to proliferate, the methods of delivery often remain unengaging and bland for participants. Though many of the leaders in commercial online learning have improved their delivery style and quality in recent years, they continue to fall short in terms of user engagement and satisfaction. PowerPoint regurgitation and video lectures are commonplace and leave end users uninspired and wanting more. This paper discusses results from an ongoing research project, Captivating Virtual Instruction for Training (CVIT), which is aimed at understanding and improving dL through a series of recommendations and best practices for promoting and enhancing student engagement online. Though the central focus is on engagement, and how that translates to learning potential, a third variable (cost) has been examined to understand the financial and resource impacts on making content more interesting (i.e. the return on investment, or ROI). The paper presents findings from a 3-year long experiment comparing existing dL methods and techniques both within and outside of the Army. The project developed two dL versions of an existing Army course (Advanced Situational Awareness-Basic (ASA-B)) – the first was designed around producing material that was as engaging and as immersive as possible within a target budget; the second was a scaled-down version using more traditional, yet contemporary dL techniques (PowerPoint recital, video lectures). The two were then compared along three dimensions– engagement, learning and cost. The findings show that improved engagement in distributed courseware is possible without breaking the bank, though the returns on learning with these progressive approaches remain inconclusive. More importantly, it was determined that the quality and experience of the designers, production staff, writers, animators, programmers, and others cannot be underestimated, and that the familiar phrase – ‘you get what you pay for’ is as true with online learning as it is with other areas of content design and software development.},
keywords = {ARL, DoD, Learning Sciences, MedVR, MxR, STG, UARC},
pubstate = {published},
tppubtype = {inproceedings}
}
Kang, Sin-Hwa; Feng, Andrew W.; Seymour, Mike; Shapiro, Ari
Study comparing video-based characters and 3D-based characters on mobile devices for chat Proceedings Article
In: Proceedings of the 9th International Conference on Motion in Games, pp. 181–186, ACM Press, Burlingame, California, 2016, ISBN: 978-1-4503-4592-7.
Abstract | Links | BibTeX | Tags: MedVR, Virtual Humans
@inproceedings{kang_study_2016,
title = {Study comparing video-based characters and 3D-based characters on mobile devices for chat},
author = {Sin-Hwa Kang and Andrew W. Feng and Mike Seymour and Ari Shapiro},
url = {http://dl.acm.org/citation.cfm?id=2994274},
doi = {10.1145/2994258.2994274},
isbn = {978-1-4503-4592-7},
year = {2016},
date = {2016-10-01},
booktitle = {Proceedings of the 9th International Conference on Motion in Games},
pages = {181–186},
publisher = {ACM Press},
address = {Burlingame, California},
abstract = {This study explores presentation techniques for a chat-based virtual human that communicates engagingly with users. Interactions with the virtual human occur via a smartphone outside of the lab in natural settings. Our work compares the responses of users who interact with an animated virtual character as opposed to a real human video character capable of displaying realistic backchannel behaviors. An audio-only interface is compared additionally with the two types of characters. The findings of our study suggest that people are socially attracted to a 3D animated character that does not display backchannel behaviors more than a real human video character that presents realistic backchannel behaviors. People engage in conversation more by talking for a longer amount of time when they interact with a 3D animated virtual human that exhibits realistic backchannel behaviors, compared to communicating with a real human video character that does not display backchannel behaviors.},
keywords = {MedVR, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Kang, Sin-Hwa; Feng, Andrew W.; Seymour, Mike; Shapiro, Ari
Smart Mobile Virtual Characters: Video Characters vs. Animated Characters Proceedings Article
In: Proceedings of the Fourth International Conference on Human Agent Interaction, pp. 371–374, ACM Press, Biopolis, Singapore, 2016, ISBN: 978-1-4503-4508-8.
Abstract | Links | BibTeX | Tags: MedVR, Virtual Humans
@inproceedings{kang_smart_2016,
title = {Smart Mobile Virtual Characters: Video Characters vs. Animated Characters},
author = {Sin-Hwa Kang and Andrew W. Feng and Mike Seymour and Ari Shapiro},
url = {http://dl.acm.org/citation.cfm?id=2980511},
doi = {10.1145/2974804.2980511},
isbn = {978-1-4503-4508-8},
year = {2016},
date = {2016-10-01},
booktitle = {Proceedings of the Fourth International Conference on Human Agent Interaction},
pages = {371–374},
publisher = {ACM Press},
address = {Biopolis, Singapore},
abstract = {This study investigates presentation techniques for a chatbased virtual human that communicates engagingly with users via a smartphone outside of the lab in natural settings. Our work compares the responses of users who interact with an animated 3D virtual character as opposed to a real human video character capable of displaying backchannel behaviors. The findings of our study demonstrate that people are socially attracted to a 3D animated character that does not display backchannel behaviors more than a real human video character that presents realistic backchannel behaviors. People engage in conversation more by talking for a longer amount of time when they interact with a 3D animated virtual human that exhibits backchannel behaviors, compared to communicating with a real human video character that does not display backchannel behaviors.},
keywords = {MedVR, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Bresnahan, T.; Rizzo, A.; Burke, S. L.; Partin, M.; Ahlness, R. M.; Trimmer, M.
Using Virtual Interactive Training Agents (VITA) with Adults with Autism and other Developmental Disabilities Proceedings Article
In: Proceedings of the 2016 International Conference on Disability, Virtual Reality, and Associated Technology, pp. 49–56, ICDVRAT and the University of Reading, Los Angeles, CA, 2016, ISBN: 978-0-7049-1547-3.
Abstract | Links | BibTeX | Tags: MedVR, Virtual Humans
@inproceedings{bresnahan_using_2016,
title = {Using Virtual Interactive Training Agents (VITA) with Adults with Autism and other Developmental Disabilities},
author = {T. Bresnahan and A. Rizzo and S. L. Burke and M. Partin and R. M. Ahlness and M. Trimmer},
url = {http://www.icdvrat.org/2016/papers/ICDVRAT2016_S02N2_Bresnahan_etal.pdf},
isbn = {978-0-7049-1547-3},
year = {2016},
date = {2016-09-01},
booktitle = {Proceedings of the 2016 International Conference on Disability, Virtual Reality, and Associated Technology},
pages = {49–56},
publisher = {ICDVRAT and the University of Reading},
address = {Los Angeles, CA},
abstract = {Conversational Virtual Human (VH) agents are increasingly being used to support role-play experiential learning across a range of use-cases and populations. This project examined whether use of the Virtual Interactive Training Agent (VITA) system would improve job interviewing skills in a sample of persons with autism or other developmental disability. The study examined performance differences between baseline and final interviews in face-to-face and virtual reality conditions, and whether statistically significant increases were demonstrated between interviewing conditions. Paired samples t-tests were utilized to examine mean changes in performance by interview stage and in the overall difference between baseline and final interview stages. The preliminary results indicated that VITA is a positive factor when preparing young adults with autism or other developmental disability for employment interviews. Statistically significant results were demonstrated across all pilot conditions and in all but one post-assessment condition.},
keywords = {MedVR, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Buckwalter, J. Galen; Castellani, Brian; Mcewen, Bruce; Karlamangla, Arun S.; Rizzo, Albert A.; John, Bruce; O'donnell, Kyle; Seeman, Teresa
Allostatic Load as a Complex Clinical Construct: A Case-Based Computational Modeling Approach Journal Article
In: Complexity, vol. 21, no. S1, pp. 291–306, 2016, ISSN: 10762787.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{galen_buckwalter_allostatic_2016,
title = {Allostatic Load as a Complex Clinical Construct: A Case-Based Computational Modeling Approach},
author = {J. Galen Buckwalter and Brian Castellani and Bruce Mcewen and Arun S. Karlamangla and Albert A. Rizzo and Bruce John and Kyle O'donnell and Teresa Seeman},
url = {http://doi.wiley.com/10.1002/cplx.21743},
doi = {10.1002/cplx.21743},
issn = {10762787},
year = {2016},
date = {2016-09-01},
journal = {Complexity},
volume = {21},
number = {S1},
pages = {291–306},
abstract = {Allostatic load (AL) is a complex clinical construct, providing a unique window into the cumulative impact of stress. However, due to its inherent complexity, AL presents two major measurement challenges to conventional statistical modeling (the field’s dominant methodology): it is comprised of a complex causal network of bioallostatic systems, represented by an even larger set of dynamic biomarkers; and, it is situated within a web of antecedent socioecological systems, linking AL to differences in health outcomes and disparities. To address these challenges, we employed casebased computational modeling (CBM), which allowed us to make four advances: (1) we developed a multisystem, 7-factor (20 biomarker) model of AL’s network of allostatic systems; (2) used it to create a catalog of nine different clinical AL profiles (causal pathways); (3) linked each clinical profile to a typology of 23 health outcomes; and (4) explored our results (post hoc) as a function of gender, a key socioecological factor. In terms of highlights, (a) the Healthy clinical profile had few health risks; (b) the pro-inflammatory profile linked to high blood pressure and diabetes; (c) Low Stress Hormones linked to heart disease, TIA/Stroke, diabetes, and circulation problems; and (d) high stress hormones linked to heart disease and high blood pressure. Post hoc analyses also found that males were overrepresented on the High Blood Pressure (61.2%), Metabolic Syndrome (63.2%), High Stress Hormones (66.4%), and High Blood Sugar (57.1%); while females were overrepresented on the Healthy (81.9%), Low Stress Hormones (66.3%), and Low Stress Antagonists (stress buffers) (95.4%) profiles.},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
Reger, Greg M.; Koenen-Woods, Patricia; Zetocha, Kimberlee; Smolenski, Derek J.; Holloway, Kevin M.; Rothbaum, Barbara O.; Difede, JoAnn; Rizzo, Albert A.; Edwards-Stewart, Amanda; Skopp, Nancy A.; Mishkind, Matthew; Reger, Mark A.; Gahm, Gregory A.
In: Journal of Consulting and Clinical Psychology, 2016, ISSN: 1939-2117, 0022-006X.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{reger_randomized_2016,
title = {Randomized Controlled Trial of Prolonged Exposure Using Imaginal Exposure vs. Virtual Reality Exposure in Active Duty Soldiers With Deployment-Related Posttraumatic Stress Disorder (PTSD).},
author = {Greg M. Reger and Patricia Koenen-Woods and Kimberlee Zetocha and Derek J. Smolenski and Kevin M. Holloway and Barbara O. Rothbaum and JoAnn Difede and Albert A. Rizzo and Amanda Edwards-Stewart and Nancy A. Skopp and Matthew Mishkind and Mark A. Reger and Gregory A. Gahm},
url = {https://www.researchgate.net/profile/Amanda_Edwards-Stewart/publication/307950241_Randomized_Controlled_Trial_of_Prolonged_Exposure_Using_Imaginal_Exposure_vs_Virtual_Reality_Exposure_in_Active_Duty_Soldiers_With_Deployment-Related_Posttraumatic_Stress_Disorder_PTSD/links/57d6f13f08ae601b39ac25d9.pdf},
doi = {10.1037/ccp0000134},
issn = {1939-2117, 0022-006X},
year = {2016},
date = {2016-09-01},
journal = {Journal of Consulting and Clinical Psychology},
abstract = {Prolonged exposure (PE) is an evidence-based psychotherapy for posttraumatic stress disorder (PTSD) but there is limited research with active-duty military populations. Virtual reality exposure (VRE) has shown promise but randomized trials are needed to evaluate efficacy relative to existing standards of care. This study evaluated the efficacy of VRE and PE for active duty soldiers with PTSD from deployments to Iraq and Afghanistan. Active-duty soldiers ( = 162) were randomized to 10-sessions of PE, VRE, or a minimal attention waitlist (WL). Blinded assessors evaluated symptoms at baseline, halfway through treatment, at posttreatment, and at 3- and 6-month follow-ups using the Clinician Administered PTSD Scale (CAPS). Intent-to-treat analyses found that both PE and VRE resulted in significant reductions in PTSD symptoms relative to those in the WL. The majority of patients demonstrated reliable change in PTSD symptoms. There was no difference between PE and VRE regarding treatment drop out before completing 10 sessions (44 and 41% for VRE and PE, respectively). Contrary to hypotheses, analyses at posttreatment did not show that VRE was superior to PE. Post hoc analyses found that PE resulted in significantly greater symptom reductions than VRE at 3- and 6-month follow-up. Both treatments significantly reduced self-reported stigma. PE is an efficacious treatment for active-duty Army soldiers with PTSD from deployments to Iraq or Afghanistan. Results extend previous evidence supporting the efficacy of PE to active-duty military personnel and raise important questions for future research on VRE},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
Rizzo, AA; Lucas, G; Gratch, J; Stratou, G; Morency, L-P; Shilling, R; Hartholt, A; Scherer, S
Clinical interviewing by a virtual human agent with automatic behavior analysis Proceedings Article
In: Proceedings of The 2016 Proceedings of the International Conference on Disability, Virtual Reality and Associated Technologies, pp. 57–64, ICDVRAT and the University of Reading, Los Angeles, CA, 2016, ISBN: 978-0-7049-1547-3.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@inproceedings{rizzo_clinical_2016,
title = {Clinical interviewing by a virtual human agent with automatic behavior analysis},
author = {AA Rizzo and G Lucas and J Gratch and G Stratou and L-P Morency and R Shilling and A Hartholt and S Scherer},
url = {http://centaur.reading.ac.uk/66645/8/ICDVRAT2016_Full_Proceedings_11th%20_Conf.pdf},
isbn = {978-0-7049-1547-3},
year = {2016},
date = {2016-09-01},
booktitle = {Proceedings of The 2016 Proceedings of the International Conference on Disability, Virtual Reality and Associated Technologies},
pages = {57–64},
publisher = {ICDVRAT and the University of Reading},
address = {Los Angeles, CA},
abstract = {SimSensei is a Virtual Human (VH) interviewing platform that uses off-the-shelf sensors (i.e., webcams, Microsoft Kinect and a microphone) to capture and interpret real-time audiovisual behavioral signals from users interacting with the VH system. The system was specifically designed for clinical interviewing and health care support by providing a face-to-face interaction between a user and a VH that can automatically react to the inferred state of the user through analysis of behavioral signals gleaned from the user’s facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human-to-human interaction and communication, SimSensei aims to capture and infer user state from signals generated from user non-verbal communication to improve engagement between a VH and a user and to quantify user state from the data captured across a 20 minute interview. As well, previous research with SimSensei indicates that users engaging with this automated system, have less fear of evaluation and self-disclose more personal information compare to when they believe the VH agent is actually an avatar being operated by a “wizard of oz” human-in-the-loop (Lucas et al., 2014). The current study presents results from a sample of military service members (SMs) who were interviewed within the SimSensei system before and after a deployment to Afghanistan. Results indicate that SMs reveal more PTSD symptoms to the SimSensei VH agent than they self-report on the Post Deployment Health Assessment. Pre/Post deployment facial expression analysis indicated more sad expressions and fewer happy expressions at post deployment.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Freed, Michael C.; Novak, Laura A.; Killgore, William D. S.; Rauch, Sheila A. M.; Koehlmoos, Tracey P.; Ginsberg, J. P.; Krupnick, Janice L.; Rizzo, Albert "Skip"; Andrews, Anne; Engel, Charles C.
IRB and Research Regulatory Delays Within the Military Health System: Do They Really Matter? And If So, Why and for Whom? Journal Article
In: The American Journal of Bioethics, vol. 16, no. 8, pp. 30–37, 2016, ISSN: 1526-5161, 1536-0075.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{freed_irb_2016,
title = {IRB and Research Regulatory Delays Within the Military Health System: Do They Really Matter? And If So, Why and for Whom?},
author = {Michael C. Freed and Laura A. Novak and William D. S. Killgore and Sheila A. M. Rauch and Tracey P. Koehlmoos and J. P. Ginsberg and Janice L. Krupnick and Albert "Skip" Rizzo and Anne Andrews and Charles C. Engel},
url = {http://www.tandfonline.com/doi/full/10.1080/15265161.2016.1187212},
doi = {10.1080/15265161.2016.1187212},
issn = {1526-5161, 1536-0075},
year = {2016},
date = {2016-08-01},
journal = {The American Journal of Bioethics},
volume = {16},
number = {8},
pages = {30–37},
abstract = {Institutional review board (IRB) delays may hinder the successful completion of federally funded research in the U.S. military. When this happens, time-sensitive, mission-relevant questions go unanswered. Research participants face unnecessary burdens and risks if delays squeeze recruitment timelines, resulting in inadequate sample sizes for definitive analyses. More broadly, military members are exposed to untested or undertested interventions, implemented by well-intentioned leaders who bypass the research process altogether. To illustrate, we offer two case examples. We posit that IRB delays often appear in the service of managing institutional risk, rather than protecting research participants. Regulators may see more risk associated with moving quickly than risk related to delay, choosing to err on the side of bureaucracy. The authors of this article, all of whom are military-funded researchers, government stakeholders, and/or human subject protection experts, offer feasible recommendations to improve the IRB system and, ultimately, research within military, veteran, and civilian populations.},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
Washburn, Micki; Bordnick, Patrick; Rizzo, Albert “Skip”
A pilot feasibility study of virtual patient simulation to enhance social work students’ brief mental health assessment skills Journal Article
In: Social Work in Health Care, pp. 1–19, 2016, ISSN: 0098-1389, 1541-034X.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{washburn_pilot_2016,
title = {A pilot feasibility study of virtual patient simulation to enhance social work students’ brief mental health assessment skills},
author = {Micki Washburn and Patrick Bordnick and Albert “Skip” Rizzo},
url = {https://www.tandfonline.com/doi/full/10.1080/00981389.2016.1210715},
doi = {10.1080/00981389.2016.1210715},
issn = {0098-1389, 1541-034X},
year = {2016},
date = {2016-08-01},
journal = {Social Work in Health Care},
pages = {1–19},
abstract = {This study presents preliminary feasibility and acceptability data on the use of virtual patient (VP) simulations to develop brief assessment skills within an interdisciplinary care setting. Results support the acceptability of technology-enhanced simulations and offer preliminary evidence for an association between engagement in VP practice simulations and improvements in diagnostic accuracy and clinical interviewing skills. Recommendations and next steps for research on technologyenhanced simulations within social work are discussed.},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
Norrholm, Seth Davin; Jovanovic, Tanja; Gerardi, Maryrose; Breazeale, Kathryn G.; Price, Matthew; Davis, Michael; Duncan, Erica; Ressler, Kerry J.; Bradley, Bekh; Rizzo, Albert; Tuerk, Peter W.; Rothbaum, Barbara O.
Baseline psychophysiological and cortisol reactivity as a predictor of PTSD treatment outcome in virtual reality exposure therapy Journal Article
In: Behaviour Research and Therapy, vol. 82, pp. 28–37, 2016, ISSN: 00057967.
Abstract | Links | BibTeX | Tags: MedVR
@article{norrholm_baseline_2016,
title = {Baseline psychophysiological and cortisol reactivity as a predictor of PTSD treatment outcome in virtual reality exposure therapy},
author = {Seth Davin Norrholm and Tanja Jovanovic and Maryrose Gerardi and Kathryn G. Breazeale and Matthew Price and Michael Davis and Erica Duncan and Kerry J. Ressler and Bekh Bradley and Albert Rizzo and Peter W. Tuerk and Barbara O. Rothbaum},
url = {http://linkinghub.elsevier.com/retrieve/pii/S0005796716300663},
doi = {10.1016/j.brat.2016.05.002},
issn = {00057967},
year = {2016},
date = {2016-07-01},
journal = {Behaviour Research and Therapy},
volume = {82},
pages = {28–37},
abstract = {Baseline cue-dependent physiological reactivity may serve as an objective measure of posttraumatic stress disorder (PTSD) symptoms. Additionally, prior animal model and psychological studies would suggest that subjects with greatest symptoms at baseline may have the greatest violation of expectancy to danger when undergoing exposure based psychotherapy; thus treatment approaches which enhanced the learning under these conditions would be optimal for those with maximal baseline cue-dependent reactivity. However methods to study this hypothesis objectively are lacking. Virtual reality (VR) methodologies have been successfully employed as an enhanced form of imaginal prolonged exposure therapy for the treatment of PTSD. Our goal was to examine the predictive nature of initial psychophysiological (e.g., startle, skin conductance, heart rate) and stress hormone responses (e.g., cortisol) during presentation of VR-based combat-related stimuli on PTSD treatment outcome. Combat veterans with PTSD underwent 6 weeks of VR exposure therapy combined with either D-cycloserine (DCS), alprazolam (ALP), or placebo (PBO). In the DCS group, startle response to VR scenes prior to initiation of treatment accounted for 76% of the variance in CAPS change scores, p textbackslashtextbackslashtextbackslashtextless 0.001, in that higher responses predicted greater changes in symptom severity over time. Additionally, baseline cortisol reactivity was inversely associated with treatment response in the ALP group},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
Kang, Sin-Hwa; Phan, Thai; Bolas, Mark; Krum, David M.
User Perceptions of a Virtual Human Over Mobile Video Chat Interactions Book Section
In: Human-Computer Interaction. Novel User Experiences, vol. 9733, pp. 107–118, Springer International Publishing, Cham, Switzerland, 2016, ISBN: 978-3-319-39512-8 978-3-319-39513-5.
Abstract | Links | BibTeX | Tags: MedVR, MxR, UARC
@incollection{kang_user_2016,
title = {User Perceptions of a Virtual Human Over Mobile Video Chat Interactions},
author = {Sin-Hwa Kang and Thai Phan and Mark Bolas and David M. Krum},
url = {http://download.springer.com/static/pdf/913/chp%253A10.1007%252F978-3-319-39513-5_10.pdf?originUrl=http%3A%2F%2Flink.springer.com%2Fchapter%2F10.1007%2F978-3-319-39513-5_10&token2=exp=1474906977 acl=%2Fstatic%2Fpdf%2F913%2Fchp%25253A10.1007%25252F978-3-319-39513-5_10.pdf%3ForiginUrl%3Dhttp%253A%252F%252Flink.springer.com%252Fchapter%252F10.1007%252F978-3-319-39513-5_10* hmac=14d38ee320936bf1edfc65a0d3fcc0855c42e0baba46e0f3a9a81293698b8b68},
isbn = {978-3-319-39512-8 978-3-319-39513-5},
year = {2016},
date = {2016-06-01},
booktitle = {Human-Computer Interaction. Novel User Experiences},
volume = {9733},
pages = {107–118},
publisher = {Springer International Publishing},
address = {Cham, Switzerland},
abstract = {We believe that virtual humans, presented over video chat services, such as Skype, and delivered using smartphones, can be an effective way to deliver innovative applications where social interactions are important, such as counseling and coaching. To explore this subject, we have built a hardware and software apparatus that allows virtual humans to initiate, receive, and interact over video calls using Skype or any similar service. With this platform, we conducted two experiments to investigate the applications and characteristics of virtual humans that interact over mobile video. In Experiment 1, we investigated user reactions to the physical realism of the background scene in which a virtual human was displayed. In Experiment 2, we examined how virtual characters can establish and maintain longer term relationships with users, using ideas from Social Exchange Theory to strengthen bonds between interactants. Experiment 2 involved repeated interactions with a virtual human over a period of time. Both studies used counseling-style interactions with users. The results demonstrated that males were more attracted socially to a virtual human that was presented over a realistic background than a featureless background while females were more socially attracted to a virtual human with a less realistic featureless background. The results further revealed that users felt the virtual human was a compassionate partner when they interacted with the virtual human over multiple calls, rather than just a single call.},
keywords = {MedVR, MxR, UARC},
pubstate = {published},
tppubtype = {incollection}
}
Nolin, Pierre; Stipanicic, Annie; Henry, Mylène; Lachapelle, Yves; Lussier-Desrochers, Dany; Rizzo, Albert “Skip”; Allain, Philippe
ClinicaVR: Classroom-CPT: A virtual reality tool for assessing attention and inhibition in children and adolescents Journal Article
In: Computers in Human Behavior, vol. 59, pp. 327–333, 2016, ISSN: 07475632.
Abstract | Links | BibTeX | Tags: MedVR
@article{nolin_clinicavr_2016,
title = {ClinicaVR: Classroom-CPT: A virtual reality tool for assessing attention and inhibition in children and adolescents},
author = {Pierre Nolin and Annie Stipanicic and Mylène Henry and Yves Lachapelle and Dany Lussier-Desrochers and Albert “Skip” Rizzo and Philippe Allain},
url = {http://linkinghub.elsevier.com/retrieve/pii/S0747563216300759},
doi = {10.1016/j.chb.2016.02.023},
issn = {07475632},
year = {2016},
date = {2016-06-01},
journal = {Computers in Human Behavior},
volume = {59},
pages = {327–333},
abstract = {Having garnered interest both in clinic and research areas, the Virtual Classroom (Rizzo et al., 2000) assesses children's attention in a virtual context. The Digital MediaWorks team (www.dmw.ca) has evolved the original basic classroom concept over a number of iterations to form the ClinicaVR Suite containing the Classroom-CPT as one of its components. The present study has three aims: investigate certain validity and reliability aspects of the tool; examine the relationship between performance in the virtual test and the attendant sense of presence and cybersickness experienced by participants; assess potential effects of gender and age on performance in the test. The study was conducted with 102 children and adolescents from Grade 2 to Grade 10. All participants were enrolled in a regular school program. Results support both concurrent and construct validity as well as temporal stability of ClinicaVR: Classroom-Continuous Performance Test (CPT). Gender exerted no effect on performance, while age did. The test did not cause much cybersickness. We recommend ClinicaVR: Classroom-CPT as an assessment tool for selective and sustained attention, and inhibition, in clinic and research domains.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
Mühlberger, Andreas; Jekel, K.; Probst, Thomas; Schecklmann, Martin; Conzelmann, A.; Andreatta, M.; Rizzo, A. A.; Pauli, P.; Romanos, M.
The Influence of Methylphenidate on Hyperactivity and Attention Deficits in Children With ADHD: A Virtual Classroom Test Journal Article
In: Journal of attention disorders, 2016.
Abstract | Links | BibTeX | Tags: MedVR
@article{muhlberger_influence_2016,
title = {The Influence of Methylphenidate on Hyperactivity and Attention Deficits in Children With ADHD: A Virtual Classroom Test},
author = {Andreas Mühlberger and K. Jekel and Thomas Probst and Martin Schecklmann and A. Conzelmann and M. Andreatta and A. A. Rizzo and P. Pauli and M. Romanos},
url = {http://journals.sagepub.com/doi/abs/10.1177/1087054716647480},
doi = {10.1177/1087054716647480},
year = {2016},
date = {2016-05-01},
journal = {Journal of attention disorders},
abstract = {This study compares the performance in a continuous performance test within a virtual reality classroom (CPT-VRC) between medicated children with ADHD, unmedicated children with ADHD, and healthy children. Method:N = 94 children with ADHD (n = 26 of them received methylphenidate and n = 68 were unmedicated) and n = 34 healthy children performed the CPT-VRC. Omission errors, reaction time/variability, commission errors, and body movements were assessed. Furthermore, ADHD questionnaires were administered and compared with the CPT-VRC measures. Results: The unmedicated ADHD group exhibited more omission errors and showed slower reaction times than the healthy group. Reaction time variability was higher in the unmedicated ADHD group compared with both the healthy and the medicated ADHD group. Omission errors and reaction time variability were associated with inattentiveness ratings of experimenters. Head movements were correlated with hyperactivity ratings of parents and experimenters. Conclusion: Virtual reality is a promising technology to assess ADHD symptoms in an ecologically valid environment.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
TALBOT, Thomas B.; KALISCH, Nicolai; CHRISTOFFERSEN, Kelly; LUCAS, Gale; FORBELL, Eric
Natural Language Understanding Performance & Use Considerations in Virtual Medical Encounters. Journal Article
In: Medicine Meets Virtual Reality 22: NextMed/MMVR22, vol. 220, pp. 407–413, 2016.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{talbot_natural_2016,
title = {Natural Language Understanding Performance & Use Considerations in Virtual Medical Encounters.},
author = {Thomas B. TALBOT and Nicolai KALISCH and Kelly CHRISTOFFERSEN and Gale LUCAS and Eric FORBELL},
url = {http://books.google.com/books?hl=en&lr=&id=sLgtDAAAQBAJ&oi=fnd&pg=PA407&dq=%22through+regular+web+browsers+and+is+capable+of+multiple+types+of%22+%22practice+targeting+diagnostic+interviews.+A+natural+language+interview%22+%22narrative+statement+based+upon+dialog+context.+The+dialog+manager%27s%22+&ots=Ej8L8hxLlb&sig=GMnqEb5n7CB9x1lWE4gfe5_4n8o},
doi = {10.3233/978-1-61499-625-5-407},
year = {2016},
date = {2016-04-01},
journal = {Medicine Meets Virtual Reality 22: NextMed/MMVR22},
volume = {220},
pages = {407–413},
abstract = {A virtual standardized patient (VSP) prototype was tested for natural language understanding (NLU) performance. The conversational VSP was evaluated in a controlled 61 subject study over four repetitions of a patient case. The prototype achieved more than 92% appropriate response rate from naïve users on their first attempt and results were stable by their fourth case repetition. This level of performance exceeds prior efforts and is at a level comparable of accuracy as seen in human conversational patient training, with caveats. This level of performance was possible due to the use of a unified medical taxonomy underpinning that allows virtual patient language training to be applied to all cases in our system as opposed to benefiting a single patient case.},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
RIZZO, Albert; LUCAS, Gale; GRATCH, Jonathan; STRATOU, Giota; MORENCY, Louis-Philippe; CHAVEZ, Kenneth; SHILLING, Russ; SCHERER, Stefan
Automatic Behavior Analysis During a Clinical Interview with a Virtual Human. Journal Article
In: Medicine Meets Virtual Reality 22: NextMed/MMVR22, vol. 220, pp. 316–322, 2016.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{rizzo_automatic_2016,
title = {Automatic Behavior Analysis During a Clinical Interview with a Virtual Human.},
author = {Albert RIZZO and Gale LUCAS and Jonathan GRATCH and Giota STRATOU and Louis-Philippe MORENCY and Kenneth CHAVEZ and Russ SHILLING and Stefan SCHERER},
url = {http://books.google.com/books?hl=en&lr=&id=sLgtDAAAQBAJ&oi=fnd&pg=PA316&dq=%22captured+across+a+20+minute+interview.+Results+from+of+sample+of+service%22+%22technology+for+clinical+purposes.+Recent+shifts+in+the+social+and%22+%22needed+to+create+VH+systems+is+now+driving+application+development+across%22+&ots=Ej8M4iuPfb&sig=Ad6Z3DPSwN3qA2gMDKWPe1YTPhg},
year = {2016},
date = {2016-04-01},
journal = {Medicine Meets Virtual Reality 22: NextMed/MMVR22},
volume = {220},
pages = {316–322},
abstract = {SimSensei is a Virtual Human (VH) interviewing platform that uses off-the-shelf sensors (i.e., webcams, Microsoft Kinect and a microphone) to capture and interpret real-time audiovisual behavioral signals from users interacting with the VH system. The system was specifically designed for clinical interviewing and health care support by providing a face-to-face interaction between a user and a VH that can automatically react to the inferred state of the user through analysis of behavioral signals gleaned from the user’s facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human-to-human interaction and communication, SimSensei aims to capture and infer user state from signals generated from user non-verbal communication to improve engagement between a VH and a user and to quantify user state from the data captured across a 20 minute interview. Results from of sample of service members (SMs) who were interviewed before and after a deployment to Afghanistan indicate that SMs reveal more PTSD symptoms to the VH than they report on the Post Deployment Health Assessment. Pre/Post deployment facial expression analysis indicated more sad expressions and few happy expressions at post deployment.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Krum, David M.; Kang, Sin-Hwa; Phan, Thai; Dukes, Lauren Cairco; Bolas, Mark
Head Mounted Projection for Enhanced Gaze in Social Interactions Proceedings Article
In: 2016 IEEE Virtual Reality (VR), pp. 209–210, IEEE, Greenville, SC, 2016.
Abstract | Links | BibTeX | Tags: MedVR, MxR, UARC
@inproceedings{krum_head_2016,
title = {Head Mounted Projection for Enhanced Gaze in Social Interactions},
author = {David M. Krum and Sin-Hwa Kang and Thai Phan and Lauren Cairco Dukes and Mark Bolas},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7504727},
doi = {10.1109/VR.2016.7504727},
year = {2016},
date = {2016-03-01},
booktitle = {2016 IEEE Virtual Reality (VR)},
pages = {209–210},
publisher = {IEEE},
address = {Greenville, SC},
abstract = {Projected displays can present life-sized imagery of a virtual human character that can be seen by multiple observers. However, typical projected displays can only render that virtual human from a single viewpoint, regardless of whether head tracking is employed. This results in the virtual human being rendered from an incorrect perspective for most individuals. This could cause perceptual miscues, such as the “Mona Lisa” effect, causing the virtual human to appear as if it is simultaneously gazing and pointing at all observers regardless of their location. This may be detrimental to training scenarios in which all trainees must accurately assess where the virtual human is looking or pointing a weapon. We discuss our investigations into the presentation of eye gaze using REFLCT, a previously introduced head mounted projective display. REFLCT uses head tracked, head mounted projectors and retroreflective screens to present personalized, perspective correct imagery to multiple users without the occlusion of a traditional head mounted display. We examined how head mounted projection for enhanced presentation of eye gaze might facilitate or otherwise affect social interactions during a multi-person guessing game of “Twenty Questions.”},
keywords = {MedVR, MxR, UARC},
pubstate = {published},
tppubtype = {inproceedings}
}
Cukor, Judith; Gerardi, Maryrose; Alley, Stephanie; Reist, Christopher; Roy, Michael; Rothbaum, Barbara O.; Difede, JoAnn; Rizzo, Albert
Virtual Reality Exposure Therapy for Combat-Related PTSD Book Section
In: Posttraumatic Stress Disorder and Related Diseases in Combat Veterans, pp. 69–83, Springer International Publishing, Cham, Switzerland, 2016, ISBN: 978-3-319-22984-3 978-3-319-22985-0.
Abstract | Links | BibTeX | Tags: MedVR
@incollection{cukor_virtual_2016,
title = {Virtual Reality Exposure Therapy for Combat-Related PTSD},
author = {Judith Cukor and Maryrose Gerardi and Stephanie Alley and Christopher Reist and Michael Roy and Barbara O. Rothbaum and JoAnn Difede and Albert Rizzo},
url = {http://link.springer.com/10.1007/978-3-319-22985-0_7},
isbn = {978-3-319-22984-3 978-3-319-22985-0},
year = {2016},
date = {2016-01-01},
booktitle = {Posttraumatic Stress Disorder and Related Diseases in Combat Veterans},
pages = {69–83},
publisher = {Springer International Publishing},
address = {Cham, Switzerland},
abstract = {War is perhaps one of the most challenging situations that a human being can experience. The physical, emotional, cognitive, and psychological demands of a combat environment place enormous stress on even the best-prepared military personnel. Numerous reports indicate that the incidence of posttraumatic stress disorder (PTSD) in returning Operation Enduring Freedom/Operation Iraqi Freedom (OEF/OIF) military personnel is significant. This has served to motivate research on how to better develop and disseminate evidence-based treatments for PTSD that leverage the unique features available with virtual reality (VR) technology. VR-delivered exposure therapy for PTSD is currently being used to treat combatand terrorist attack-related PTSD with initial reports of positive outcomes. This chapter presents a brief overview and rationale for the use of VR exposure for combat-related PTSD and describes the Virtual Iraq/Afghanistan exposure therapy system. This includes a short review of the previous literature, a description of the system components and the treatment protocol, and a case presentation. VR offers an alternative format for delivering exposure-based therapies for PTSD that may appeal to certain service members and veterans who grew up “digital” and who might be inclined to seek treatment in this fashion.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {incollection}
}
Scherer, Stefan; Lucas, Gale M.; Gratch, Jonathan; Rizzo, Albert Skip; Morency, Louis-Philippe
Self-reported symptoms of depression and PTSD are associated with reduced vowel space in screening interviews Journal Article
In: IEEE Transactions on Affective Computing, vol. 7, no. 1, pp. 59–73, 2016, ISSN: 1949-3045.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{scherer_self-reported_2016,
title = {Self-reported symptoms of depression and PTSD are associated with reduced vowel space in screening interviews},
author = {Stefan Scherer and Gale M. Lucas and Jonathan Gratch and Albert Skip Rizzo and Louis-Philippe Morency},
url = {http://ieeexplore.ieee.org/document/7117386/?arnumber=7117386},
doi = {10.1109/TAFFC.2015.2440264},
issn = {1949-3045},
year = {2016},
date = {2016-01-01},
journal = {IEEE Transactions on Affective Computing},
volume = {7},
number = {1},
pages = {59–73},
abstract = {Reduced frequency range in vowel production is a well documented speech characteristic of individuals’ with psychological and neurological disorders. Affective disorders such as depression and post-traumatic stress disorder (PTSD) are known to influence motor control and in particular speech production. The assessment and documentation of reduced vowel space and reduced expressivity often either rely on subjective assessments or on analysis of speech under constrained laboratory conditions (e.g.sustained vowel production, reading tasks). These constraints render the analysis of such measures expensive and impractical. Within this work, we investigate an automatic unsupervised machine learning based approach to assess a speaker’s vowel space. Our experiments are based on recordings of 253 individuals. Symptoms of depression and PTSD are assessed using standard self-assessment questionnaires and their cut-off scores. The experiments show a significantly reduced vowel space in subjects that scored positively on the questionnaires. We show the measure’s statistical robustness against varying demographics of individuals and articulation rate. The reduced vowel space for subjects with symptoms of depression can be explained by the common condition of psychomotor retardation influencing articulation and motor control. These findings could potentially support treatment of affective disorders, like depression and PTSD in the future.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Venek, Verena; Scherer, Stefan; Morency, Louis-Philippe; Rizzo, Albert; Pestian, John
Adolescent Suicidal Risk Assessment in Clinician-Patient Interaction Journal Article
In: IEEE Transactions on Affective Computing, vol. PP, no. 99, 2016, ISSN: 1949-3045.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{venek_adolescent_2016,
title = {Adolescent Suicidal Risk Assessment in Clinician-Patient Interaction},
author = {Verena Venek and Stefan Scherer and Louis-Philippe Morency and Albert Rizzo and John Pestian},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7384418},
doi = {10.1109/TAFFC.2016.2518665},
issn = {1949-3045},
year = {2016},
date = {2016-01-01},
journal = {IEEE Transactions on Affective Computing},
volume = {PP},
number = {99},
abstract = {Youth suicide is a major public health problem. It is the third leading cause of death in the United States for ages 13 through 18. Many adolescents that face suicidal thoughts or make a suicide plan never seek professional care or help. Within this work, we evaluate both verbal and nonverbal responses to a five-item ubiquitous questionnaire to identify and assess suicidal risk of adolescents. We utilize a machine learning approach to identify suicidal from non-suicidal speech as well as characterize adolescents that repeatedly attempted suicide in the past. Our findings investigate both verbal and nonverbal behavior information of the face-to-face clinician-patient interaction. We investigate 60 audio-recorded dyadic clinician-patient interviews of 30 suicidal (13 repeaters and 17 non-repeaters) and 30 non-suicidal adolescents. The interaction between clinician and adolescents is statistically analyzed to reveal differences between suicidal vs. non-suicidal adolescents and to investigate suicidal repeaters’ behaviors in comparison to suicidal non-repeaters. By using a hierarchical classifier we were able to show that the verbal responses to the ubiquitous questions sections of the interviews were useful to discriminate suicidal and non-suicidal patients. However, to additionally classify suicidal repeaters and suicidal non-repeaters more information especially nonverbal information is required.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Talbot, Thomas
Virtual Reality Standardized Patients for Clinical Training Book Section
In: The Digital Patient, pp. 255–272, John Wiley & Sons, Inc, Hoboken, NJ, 2016, ISBN: 978-1-118-95278-8 978-1-118-95275-7.
Abstract | Links | BibTeX | Tags: MedVR
@incollection{rizzo_virtual_2016,
title = {Virtual Reality Standardized Patients for Clinical Training},
author = {Albert Rizzo and Thomas Talbot},
url = {http://doi.wiley.com/10.1002/9781118952788.ch18},
isbn = {978-1-118-95278-8 978-1-118-95275-7},
year = {2016},
date = {2016-01-01},
booktitle = {The Digital Patient},
pages = {255–272},
publisher = {John Wiley & Sons, Inc},
address = {Hoboken, NJ},
abstract = {There are several quite distinct educational approaches that are all called a virtual patient. It includes case presentations, interactive patient scenarios, virtual patient games, human standardized patients (HSPs), high-fidelity software simulations, high-fidelity manikins, and virtual human (VH) conversational agents. VH conversations are possible that include an avatar that responds to pre-selected choices; such an interview is called a structured encounter. Most VSPs attempted to date have been on traditional computers. With the increased prevalence of mobile devices, it is logical to consider the migration of VSP technology to phones and tablets. Future distant recognition (DSR) systems will require a high level of individual speaker discrimination and will likely adopt microphone array-based acoustic beam forming technology. Future success may no longer be rate-limited by the pace of technology, but by the creativity and innovation of educators who will create compelling VSP experiences and curricula.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {incollection}
}
2015
Pynadath, David V.; Wang, Ning; Merchant, Chirag
Toward Acquiring a Human Behavior Model of Competition vs. Cooperation Proceedings Article
In: Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015, National Training and Simulation Association, Orlando, Florida, 2015.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation
@inproceedings{pynadath_toward_2015,
title = {Toward Acquiring a Human Behavior Model of Competition vs. Cooperation},
author = {David V. Pynadath and Ning Wang and Chirag Merchant},
url = {http://www.iitsecdocs.com/search},
year = {2015},
date = {2015-12-01},
booktitle = {Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2015},
publisher = {National Training and Simulation Association},
address = {Orlando, Florida},
abstract = {One of the challenges in modeling human behavior is accurately capturing the conditions under which people will behave selfishly or selflessly. Researchers have been unable to craft purely cooperative (or competitive) scenarios without significant numbers of subjects displaying unintended selfish (or selfless) behavior (e.g., Rapoport & Chammah, 1965). In this work, rather than try to further isolate competitive vs. cooperative behavior, we instead construct an experimental setting that deliberately includes both, in a way that fits within an operational simulation model. Using PsychSim, a multiagent social simulation framework with both Theory of Mind and decision theory, we have implemented an online resource allocation game called “Team of Rivals”, where four players seek to defeat a common enemy. The players have individual pools of resources which they can allocate toward that common goal. In addition to their progress toward this common goal, the players also receive individual feedback, in terms of the number of resources they own and have won from the enemy. By giving the players both an explicit cooperative goal and implicit feedback on potential competitive goals, we give them room to behave anywhere on the spectrum between these two extremes. Furthermore, by moving away from the more common two-player laboratory settings (e.g., Prisoner’s Dilemma), we can observe differential behavior across the richer space of possible interpersonal relationships. We discuss the design of the game that allows us to observe and analyze these relationships from human behavior data acquired through this game. We then describe decision-theoretic agents that can simulate hypothesized variations on human behavior. Finally, we present results of a preliminary playtest of the testbed and discuss the gathered data.},
keywords = {MedVR, Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Kang, Sin-Hwa; Krum, David M.; Phan, Thai; Bolas, Mark
"Hi, It's Me Again!": Virtual Coaches over Mobile Video Proceedings Article
In: Proceedings of the 3rd International Conference on Human-Agent Interaction, pp. 183–186, ACM, Daegu, Korea, 2015, ISBN: 978-1-4503-3527-0.
Abstract | Links | BibTeX | Tags: MedVR, MxR
@inproceedings{kang_hi_2015,
title = {"Hi, It's Me Again!": Virtual Coaches over Mobile Video},
author = {Sin-Hwa Kang and David M. Krum and Thai Phan and Mark Bolas},
url = {http://dl.acm.org/citation.cfm?id=2814970},
isbn = {978-1-4503-3527-0},
year = {2015},
date = {2015-10-01},
booktitle = {Proceedings of the 3rd International Conference on Human-Agent Interaction},
pages = {183–186},
publisher = {ACM},
address = {Daegu, Korea},
abstract = {We believe that virtual humans presented over video chat services, such as Skype via smartphones, can be an effective way to deliver innovative applications where social interactions are important, such as counseling and coaching. We hypothesize that the context of a smartphone communication channel, i.e. how a virtual human is presented within a smartphone app, and indeed, the nature of that app, can profoundly affect how a real human perceives the virtual human. We have built an apparatus that allows virtual humans to initiate, receive, and interact over video calls using Skype or any similar service. With this platform, we are examining effective designs and social implications of virtual humans that interact over mobile video. The current study examines a relationship involving repeated counseling-style interactions with a virtual human, leveraging the virtual human’s ability to call and interact with a real human on multiple occasions over a period of time. The results and implications of this preliminary study suggest that repeated interactions may improve perceived social characteristics of the virtual human.},
keywords = {MedVR, MxR},
pubstate = {published},
tppubtype = {inproceedings}
}
Kang, Sin-Hwa; Feng, Andrew W.; Leuski, Anton; Casas, Dan; Shapiro, Ari
The Effect of An Animated Virtual Character on Mobile Chat Interactions Book Section
In: Proceedings of the 3rd International Conference on Human-Agent Interaction, pp. 105–112, ACM, Daegu, Korea, 2015, ISBN: 978-1-4503-3527-0.
Abstract | Links | BibTeX | Tags: MedVR, Virtual Humans
@incollection{kang_effect_2015,
title = {The Effect of An Animated Virtual Character on Mobile Chat Interactions},
author = {Sin-Hwa Kang and Andrew W. Feng and Anton Leuski and Dan Casas and Ari Shapiro},
url = {http://dl.acm.org/citation.cfm?id=2814957},
isbn = {978-1-4503-3527-0},
year = {2015},
date = {2015-10-01},
booktitle = {Proceedings of the 3rd International Conference on Human-Agent Interaction},
pages = {105–112},
publisher = {ACM},
address = {Daegu, Korea},
abstract = {This study explores presentation techniques for a 3D animated chat-based virtual human that communicates engagingly with users. Interactions with the virtual human occur via a smartphone outside of the lab in natural settings. Our work compares the responses of users who interact with no image or a static image of a virtual character as opposed to the animated visage of a virtual human capable of displaying appropriate nonverbal behavior. We further investigate users’ responses to the animated character’s gaze aversion which displayed the character’s act of looking away from users and was presented as a listening behavior. The findings of our study demonstrate that people tend to engage in conversation more by talking for a longer amount of time when they interact with a 3D animated virtual human that averts its gaze, compared to an animated virtual human that does not avert its gaze, a static image of a virtual character, or an audio-only interface.},
keywords = {MedVR, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Rizzo, Albert "Skip"; Shilling, Russell; Forbell, Eric; Scherer, Stefan; Gratch, Jonathan; Morency, Louis-Philippe
Autonomous Virtual Human Agents for Healthcare Information Support and Clinical Interviewing Book Section
In: pp. 53–79, Elsevier, Inc., Philadelphia, PA, 2015, ISBN: 978-0-12-420248-1.
Abstract | Links | BibTeX | Tags: MedVR, Virtual Humans
@incollection{rizzo_autonomous_2015,
title = {Autonomous Virtual Human Agents for Healthcare Information Support and Clinical Interviewing},
author = {Albert "Skip" Rizzo and Russell Shilling and Eric Forbell and Stefan Scherer and Jonathan Gratch and Louis-Philippe Morency},
url = {http://www.sciencedirect.com/science/article/pii/B9780124202481000039},
isbn = {978-0-12-420248-1},
year = {2015},
date = {2015-10-01},
pages = {53–79},
publisher = {Elsevier, Inc.},
address = {Philadelphia, PA},
abstract = {Over the last 20 years, a virtual revolution has taken place in the use of Virtual Reality simulation technology for clinical purposes. Recent shifts in the social and scientific landscape have now set the stage for the next major movement in Clinical Virtual Reality with the “birth” of intelligent virtual human (VH) agents. Seminal research and development has appeared in the creation of highly interactive, artificially intelligent and natural language capable VHs that can engage real human users in a credible fashion. VHs can now be designed to perceive and act in a virtual world, engage in face-to-face spoken dialogues, and in some cases they are capable of exhibiting human-like emotional reactions. This chapter will detail our applications in this area where a virtual human can provide private online healthcare information and support (i.e., SimCoach) and where a VH can serve the role as a clinical interviewer (i.e., SimSensei).},
keywords = {MedVR, Virtual Humans},
pubstate = {published},
tppubtype = {incollection}
}
Stratou, Giota; Morency, Louis-Philippe; DeVault, David; Hartholt, Arno; Fast, Edward; Lhommet, Margaux; Lucas, Gale; Morbini, Fabrizio; Georgila, Kallirroi; Scherer, Stefan; Gratch, Jonathan; Stacy, Marcella; Traum, David; Rizzo, Albert
A Demonstration of the Perception System in SimSensei, a Virtual Human Application for Healthcare Interviews Proceedings Article
In: Affective Computing and Intelligent Interaction (ACII), 2015 International Conference on, pp. 787–789, IEEE, Xi'an, China, 2015.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, Virtual Humans
@inproceedings{stratou_demonstration_2015,
title = {A Demonstration of the Perception System in SimSensei, a Virtual Human Application for Healthcare Interviews},
author = {Giota Stratou and Louis-Philippe Morency and David DeVault and Arno Hartholt and Edward Fast and Margaux Lhommet and Gale Lucas and Fabrizio Morbini and Kallirroi Georgila and Stefan Scherer and Jonathan Gratch and Marcella Stacy and David Traum and Albert Rizzo},
url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=7344661},
doi = {10.1109/ACII.2015.7344661},
year = {2015},
date = {2015-09-01},
booktitle = {Affective Computing and Intelligent Interaction (ACII), 2015 International Conference on},
pages = {787–789},
publisher = {IEEE},
address = {Xi'an, China},
abstract = {We present the SimSensei system, a fully automatic virtual agent that conducts interviews to assess indicators of psychological distress. With this demo, we focus our attention on the perception part of the system, a multimodal framework which captures and analyzes user state behavior for both behavioral understanding and interactional purposes. We will demonstrate real-time user state sensing as a part of the SimSensei architecture and discuss how this technology enabled automatic analysis of behaviors related to psychological distress.},
keywords = {MedVR, Social Simulation, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Wang, Ning; Pynadath, David V.; Unnikrishnan, K. V.; Shankar, Santosh; Merchant, Chirag
Intelligent Agents for Virtual Simulation of Human-Robot Interaction Proceedings Article
In: Virtual, Augmented and Mixed Reality, pp. 228 – 239, Springer International Publishing, Los Angeles, CA, 2015, ISBN: 978-3-319-21066-7 978-3-319-21067-4.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation
@inproceedings{wang_intelligent_2015,
title = {Intelligent Agents for Virtual Simulation of Human-Robot Interaction},
author = {Ning Wang and David V. Pynadath and K. V. Unnikrishnan and Santosh Shankar and Chirag Merchant},
url = {http://ict.usc.edu/pubs/Intelligent%20Agents%20for%20Virtual%20Simulation%20of%20Human-Robot%20Interaction.pdf},
doi = {10.1007/978-3-319-21067-4 24},
isbn = {978-3-319-21066-7 978-3-319-21067-4},
year = {2015},
date = {2015-08-01},
booktitle = {Virtual, Augmented and Mixed Reality},
volume = {9179},
pages = {228 – 239},
publisher = {Springer International Publishing},
address = {Los Angeles, CA},
series = {Lecture Notes in Computer Science},
abstract = {To study how robots can work better with humans as a team, we have designed an agent-based online testbed that supports virtual simulation of domain-independent human-robot interaction. The simulation is implemented as an online game where humans and virtual robots work together in simulated scenarios. This testbed allows researchers to carry out human-robot interaction studies and gain better understanding of, for example, how a robot’s communication can improve human-robot team performance by fostering better trust relationships among humans and their robot teammates. In this paper, we discuss the requirements, challenges and the design of such human-robot simulation. We illustrate its operation with an example human-robot joint reconnaissance task.},
keywords = {MedVR, Social Simulation},
pubstate = {published},
tppubtype = {inproceedings}
}
Scherer, Stefan; Lucas, Gale; Gratch, Jonathan; Rizzo, Albert; Morency, Louis-Philippe
Self-reported symptoms of depression and PTSD are associated with reduced vowel space in screening interviews Journal Article
In: IEEE Transactions on Affective Computing (in press; doi: 10.1109/TAFFC.2015.2440264), no. 99, 2015, ISSN: 1949-3045.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{scherer_self-reported_2015,
title = {Self-reported symptoms of depression and PTSD are associated with reduced vowel space in screening interviews},
author = {Stefan Scherer and Gale Lucas and Jonathan Gratch and Albert Rizzo and Louis-Philippe Morency},
url = {http://ict.usc.edu/pubs/Self-reported%20symptoms%20of%20depression%20and%20PTSD%20are%20associated%20with%20reduced%20vowel%20space%20in%20screening%20interviews.pdf},
doi = {10.1109/TAFFC.2015.2440264},
issn = {1949-3045},
year = {2015},
date = {2015-06-01},
journal = {IEEE Transactions on Affective Computing (in press; doi: 10.1109/TAFFC.2015.2440264)},
number = {99},
abstract = {Reduced frequency range in vowel production is a well documented speech characteristic of individuals with psychological and neurological disorders. Affective disorders such as depression and post-traumatic stress disorder (PTSD) are known to influence motor control and in particular speech production. The assessment and documentation of reduced vowel space and reduced expressivity often either rely on subjective assessments or on analysis of speech under constrained laboratory conditions (e.g. sustained vowel production, reading tasks). These constraints render the analysis of such measures expensive and impractical. Within this work, we investigate an automatic unsupervised machine learning based approach to assess a speaker’s vowel space. Our experiments are based on recordings of 253 individuals. Symptoms of depression and PTSD are assessed using standard self-assessment questionnaires and their cut-off scores. The experiments show a significantly reduced vowel space in subjects that scored positively on the questionnaires. We show the measure’s statistical robustness against varying demographics of individuals and articulation rate. The reduced vowel space for subjects with symptoms of depression can be explained by the common condition of psychomotor retardation influencing articulation and motor control. These findings could potentially support treatment of affective disorders, like depression and PTSD in the future.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Cukor, Judith; Gerardi, Maryrose; Alley, Stephanie; Reist, Chris; Roy, Mike; Rothbaum, Barbara O.; Difede, JoAnn
Virtual Reality Exposure for PTSD Due to Military Combat and Terrorist Attacks Journal Article
In: Journal of Contemporary Psychotherapy, pp. 1 –10, 2015, ISSN: 0022-0116, 1573-3564.
Abstract | Links | BibTeX | Tags: DoD, MedVR
@article{rizzo_virtual_2015,
title = {Virtual Reality Exposure for PTSD Due to Military Combat and Terrorist Attacks},
author = {Albert Rizzo and Judith Cukor and Maryrose Gerardi and Stephanie Alley and Chris Reist and Mike Roy and Barbara O. Rothbaum and JoAnn Difede},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20Exposure%20for%20PTSD%20Due%20to%20Military%20Combat%20and%20Terrorist%20Attacks.pdf},
doi = {10.1007/s10879-015-9306-3},
issn = {0022-0116, 1573-3564},
year = {2015},
date = {2015-05-01},
journal = {Journal of Contemporary Psychotherapy},
pages = {1 –10},
abstract = {Humans exposed to war and terrorist attacks are at risk for the development of posttraumatic stress disorder (PTSD). Numerous reports indicate that the incidence of PTSD in both returning Operation Enduring Freedom/Operation Iraqi Freedom (OEF/OIF) military personnel and survivors of the 9/11 World Trade Center (WTC) attacks is significant. These situations have served to motivate research on how to better develop and disseminate evidence-based treatments for PTSD and other related psychosocial conditions. Virtual reality (VR) delivered exposure therapy for PTSD is currently being used to treat combat and terrorist attack related PTSD with initial reports of positive outcomes. This paper presents an overview and rationale for the use of VR exposure therapy with anxiety disorders and PTSD and describes the status of two systems (Virtual Iraq/Afghanistan and Virtual World Trade Center) developed for this purpose.},
keywords = {DoD, MedVR},
pubstate = {published},
tppubtype = {article}
}
Andreatta, Pamela; Klotz, Jessica J.; Madsen, James M.; Hurst, Charles G.; Talbot, Thomas B.
Outcomes From Two Forms of Training for First-Responder Competency in Cholinergic Crisis Management Journal Article
In: Military Medicine, vol. 180, no. 4, pp. 468–474, 2015, ISSN: 0026-4075, 1930-613X.
Abstract | Links | BibTeX | Tags: DoD, MedVR, UARC
@article{andreatta_outcomes_2015,
title = {Outcomes From Two Forms of Training for First-Responder Competency in Cholinergic Crisis Management},
author = {Pamela Andreatta and Jessica J. Klotz and James M. Madsen and Charles G. Hurst and Thomas B. Talbot},
url = {http://ict.usc.edu/pubs/Outcomes%20From%20Two%20Forms%20of%20Training%20for%20First-Responder%20Competency%20in%20Cholinergic%20Crisis%20Management.pdf},
doi = {10.7205/MILMED-D-14-00290},
issn = {0026-4075, 1930-613X},
year = {2015},
date = {2015-04-01},
journal = {Military Medicine},
volume = {180},
number = {4},
pages = {468–474},
abstract = {Military and civilian first responders must be able to recognize and effectively manage mass disaster casualties. Clinical management of injuries resulting from nerve agents provides different challenges for first responders than those of conventional weapons. We evaluated the impact of a mixed-methods training program on competency acquisition in cholinergic crisis clinical management using multimedia with either live animal or patient actor examples, and hands-on practice using SimMan3G mannequin simulators. A purposively selected sample of 204 civilian and military first responders who had not previously completed nerve agent training were assessed pre- and post-training for knowledge, performance, self-efficacy, and affective state. We conducted analysis of variance with repeated measures; statistical significance p textbackslashtextbackslashtextless 0.05. Both groups had significant performance improvement across all assessment dimensions: knowledge textbackslashtextbackslashtextgreater 20%, performance textbackslashtextbackslashtextgreater 50%, self-efficacy textbackslashtextbackslashtextgreater 34%, and affective state textbackslashtextbackslashtextgreater 15%. There were no significant differences between the live animal and patient actor groups. These findings could aid in the specification of training for first-responder personnel in military and civilian service. Although less comprehensive than U.S. Army Medical Research Institute of Chemical Defense courses, the training outcomes associated with this easily distributed program demonstrate its value in increasing the competency of first responders in recognizing and managing a mass casualty cholinergic event.},
keywords = {DoD, MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
Highland, Krista B.; Costanzo, Michelle E.; Jovanovic, Tanja; Norrholm, Seth D.; Ndiongue, Rochelle B.; Reinhardt, Brian J.; Rothbaum, Barbara; Rizzo, Albert A.; Roy, Michael J.
Catecholamine responses to virtual combat: implications for post-traumatic stress and dimensions of functioning Journal Article
In: Frontiers in Psychology, vol. 6, 2015, ISSN: 1664-1078.
Abstract | Links | BibTeX | Tags: MedVR
@article{highland_catecholamine_2015,
title = {Catecholamine responses to virtual combat: implications for post-traumatic stress and dimensions of functioning},
author = {Krista B. Highland and Michelle E. Costanzo and Tanja Jovanovic and Seth D. Norrholm and Rochelle B. Ndiongue and Brian J. Reinhardt and Barbara Rothbaum and Albert A. Rizzo and Michael J. Roy},
url = {http://ict.usc.edu/pubs/Catecholamine%20responses%20to%20virtual%20combat%20-%20implications%20for%20post-traumatic%20stress%20and%20dimensions%20of%20functioning.pdf},
doi = {10.3389/fpsyg.2015.00256},
issn = {1664-1078},
year = {2015},
date = {2015-03-01},
journal = {Frontiers in Psychology},
volume = {6},
abstract = {Posttraumatic stress disorder (PTSD) symptoms can result in functional impairment among service members (SMs), even in those without a clinical diagnosis. The variability in outcomes may be related to underlying catecholamine mechanisms. Individuals with PTSD tend to have elevated basal catecholamine levels, though less is known regarding catecholamine responses to trauma-related stimuli. We assessed whether catecholamine responses to a virtual combat environment impact the relationship between PTSD symptom clusters and elements of functioning. Eighty-seven clinically healthy SMs, within 2 months after deployment to Iraq or Afghanistan, completed self-report measures, viewed virtual-reality (VR) combat sequences, and had sequential blood draws. Norepinephrine responses to VR combat exposure moderated the relationship between avoidance symptoms and scales of functioning including physical functioning, physical-role functioning, and vitality. Among those with high levels of avoidance, norepinephrine change was inversely associated with functional status, whereas a positive correlation was observed for those with low levels of avoidance. Our findings represent a novel use of a virtual environment to display combat-related stimuli to returning SMs to elucidate mind-body connections inherent in their responses. The insight gained improves our understanding of post-deployment symptoms and quality of life in SMs and may facilitate enhancements in treatment. Further research is needed to validate these findings in other populations and to define the implications for treatment effectiveness.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
Talbot, Thomas
COMRADE: Methods for Adaptive Competency Management and Just-in-Time Clinical Acumen Augmentation Journal Article
In: MedSim Magazine, pp. 26 – 28, 2015.
Abstract | Links | BibTeX | Tags: MedVR, UARC
@article{talbot_comrade_2015,
title = {COMRADE: Methods for Adaptive Competency Management and Just-in-Time Clinical Acumen Augmentation},
author = {Thomas Talbot},
url = {http://ict.usc.edu/pubs/COMRADE%20-%20Methods%20for%20Adaptive%20Competency%20Management%20and%20Just-in-Time%20Clinical%20Acumen%20Augmentation.pdf},
year = {2015},
date = {2015-01-01},
journal = {MedSim Magazine},
pages = {26 – 28},
abstract = {Dr. Thomas Talbot shares ideas for enhancing the electronic medical Record to act as a didactic tool to support physician competency.},
keywords = {MedVR, UARC},
pubstate = {published},
tppubtype = {article}
}
2014
Rizzo, Albert; Scherer, Stefan; DeVault, David; Gratch, Jonathan; Artstein, Ron; Hartholt, Arno; Lucas, Gale; Marsella, Stacy; Morbini, Fabrizio; Nazarian, Angela; Stratou, Giota; Traum, David; Wood, Rachel; Boberg, Jill; Morency, Louis-Philippe
Detection and Computational Analysis of Psychological Signals Using a Virtual Human Interviewing Agent Proceedings Article
In: Proceedings of ICDVRAT 2014, International Journal of Disability and Human Development, Gothenburg, Sweden, 2014.
Abstract | Links | BibTeX | Tags: MedVR, Social Simulation, UARC, Virtual Humans
@inproceedings{rizzo_detection_2014,
title = {Detection and Computational Analysis of Psychological Signals Using a Virtual Human Interviewing Agent},
author = {Albert Rizzo and Stefan Scherer and David DeVault and Jonathan Gratch and Ron Artstein and Arno Hartholt and Gale Lucas and Stacy Marsella and Fabrizio Morbini and Angela Nazarian and Giota Stratou and David Traum and Rachel Wood and Jill Boberg and Louis-Philippe Morency},
url = {http://ict.usc.edu/pubs/Detection%20and%20Computational%20Analysis%20of%20Psychological%20Signals%20Using%20a%20Virtual%20Human%20Interviewing%20Agent.pdf},
year = {2014},
date = {2014-12-01},
booktitle = {Proceedings of ICDVRAT 2014},
publisher = {International Journal of Disability and Human Development},
address = {Gothenburg, Sweden},
abstract = {It has long been recognized that facial expressions, body posture/gestures and vocal parameters play an important role in human communication and the implicit signalling of emotion. Recent advances in low cost computer vision and behavioral sensing technologies can now be applied to the process of making meaningful inferences as to user state when a person interacts with a computational device. Effective use of this additive information could serve to promote human interaction with virtual human (VH) agents that may enhance diagnostic assessment. This paper will focus on our current research in these areas within the DARPA-funded “Detection and Computational Analysis of Psychological Signals” project, with specific attention to the SimSensei application use case. SimSensei is a virtual human interaction platform that is able to sense and interpret real-time audiovisual behavioral signals from users interacting with the system. It is specifically designed for health care support and leverages years of virtual human research and development at USC-ICT. The platform enables an engaging face-to-face interaction where the virtual human automatically reacts to the state and inferred intent of the user through analysis of behavioral signals gleaned from facial expressions, body gestures and vocal parameters. Akin to how non-verbal behavioral signals have an impact on human to human interaction and communication, SimSensei aims to capture and infer from user non-verbal communication to improve engagement between a VH and a user. The system can also quantify and interpret sensed behavioral signals.},
keywords = {MedVR, Social Simulation, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Beidel, Deborah; Neer, Sandra; Bowers, Clint; Frueh, B; Rizzo, Albert
Using Virtual Reality as Part of an Intensive Treatment Program for PTSD Proceedings Article
In: Proceedings of I/ITSEC 2014, pp. 1 –10, Orlando, Florida, 2014.
Abstract | Links | BibTeX | Tags: MedVR
@inproceedings{beidel_using_2014,
title = {Using Virtual Reality as Part of an Intensive Treatment Program for PTSD},
author = {Deborah Beidel and Sandra Neer and Clint Bowers and B Frueh and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Using%20Virtual%20Reality%20as%20Part%20of%20an%20Intensive%20Treatment%20Program%20for%20PTSD.pdf},
year = {2014},
date = {2014-12-01},
booktitle = {Proceedings of I/ITSEC 2014},
pages = {1 –10},
address = {Orlando, Florida},
abstract = {Up to 18.5% of veterans returning from OIF/OEF are diagnosed with posttraumatic stress disorder (PTSD). In addition to symptoms of anxiety (intrusive thoughts, re-experiencing, hyperarousal, and avoidance), PTSD can result in social maladjustment, poor quality of life, and medical problems. Other emotional problems include guilt, anger, and unemployment, impulsive or violent behavior, and family discord. Many veterans seeking treatment for PTSD also seek disability compensation for debilitating occupational impairment. There are few administrative or research data to indicate veterans are recovering from PTSD. Exposure therapy, a form of behavior therapy, alleviates anxiety symptoms, but may not address the anger, depression and social impairment that accompanies this disorder. In this presentation, we will discuss an intensive treatment program, known as Trauma Management Therapy (TMT), which combines individual virtual reality (VR) assisted exposure therapy with group social and emotional rehabilitation skills training, delivered in a 3 week format. The presentation will demonstrate the VR environment (Virtual Iraq), will discuss how often/successfully various VR elements are integrated into a comprehensive treatment program, and the adaptability of the program for active duty military personnel, as well as veterans. We will discuss the format of the intensive program as well as factors such as compliance and drop-out rates, comparing these important clinical variables to more traditional outpatient treatment programs. Additionally, we will address common clinical concerns regarding the use of VR exposure therapy for individuals suffering from PTSD.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {inproceedings}
}
Andreatta, Pamela; Klotz, Jessica; Madsen, James M.; Hurst, Charles G.; Talbot, Thomas
Assessment instrument validation for critical clinical competencies - pediatricneonatal intubation and cholinergic crisis management Proceedings Article
In: Proceedings of Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2014, Orlando, FL, 2014.
Abstract | Links | BibTeX | Tags: DoD, MedVR, UARC
@inproceedings{andreatta_assessment_2014,
title = {Assessment instrument validation for critical clinical competencies - pediatricneonatal intubation and cholinergic crisis management},
author = {Pamela Andreatta and Jessica Klotz and James M. Madsen and Charles G. Hurst and Thomas Talbot},
url = {http://ict.usc.edu/pubs/Assessment%20instrument%20validation%20for%20critical%20clinical%20competencies%20-%20pediatricneonatal%20intubation%20and%20cholinergic%20crisis%20management.pdf},
year = {2014},
date = {2014-12-01},
booktitle = {Proceedings of Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2014},
address = {Orlando, FL},
abstract = {Military and civilian first-responders must be able to recognize and effectively manage casualties that necessitate immediate application of critical clinical competencies. Two examples of these critical competencies are the clinical management of injuries resulting from nerve agents and difficult intubation, especially for pediatric or neonatal patients. The opportunity to learn and practice the necessary skills for these rare, but urgent, situations is complicated by the limited ability to replicate essential situational factors that influence performance in the applied clinical environment. Simulation-based training may resolve some of these challenges, however it is imperative that evidence be captured to document the achievement of performance competencies in the training environment that transfer to applied clinical care. The purpose of this study was to establish psychometric characteristics for competency assessment instruments associated with two such critical competencies: management of cholinergic crisis and pediatric-neonatal intubation. Methods: To inform the development of assessment instruments, we conducted comprehensive task analyses across each performance domain (knowledge, performance). Expert review confirmed content validity. Construct validity was established using the instruments to differentiate between the performance abilities of practitioners with variable experience (novice through expert). Purposively selected firstresponder subjects for pediatric-neonatal intubation (N=214) and cholinergic crisis management (N=123) were stratified by level of experience performing the requisite clinical competencies. All subjects completed knowledge and performance assessments. Reliability was established using test-retest (Pearson correlation) and internal consistency (Cronbach’s alpha) for knowledge and performance assessments. Results: Significantly higher scores for subjects with greater levels of experience, compared to those with less experience established construct validity for each assessment instrument (p textbackslashtextbackslashtextless .01). Significant correlations between test-retest outcomes indicated measurement reliability p textbackslashtextbackslashtextless .01. Cronbach’s alpha for knowledge and performance scores demonstrated excellent internal consistency. Conclusions: Psychometric evidence establishes the value of assessment for identifying and remedying critical competency performance gaps.},
keywords = {DoD, MedVR, UARC},
pubstate = {published},
tppubtype = {inproceedings}
}
Klotz, Jessica; Madsen, James M.; Hurst, Charles G.; Talbot, Thomas
Training Effects for First-responder Competency in Cholinergic Crisis Management Proceedings Article
In: Proceedings of Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2014, Orlando, FL, 2014.
Abstract | Links | BibTeX | Tags: DoD, MedVR, UARC
@inproceedings{klotz_training_2014,
title = {Training Effects for First-responder Competency in Cholinergic Crisis Management},
author = {Jessica Klotz and James M. Madsen and Charles G. Hurst and Thomas Talbot},
url = {http://ict.usc.edu/pubs/Training%20Effects%20for%20First-responder%20Competency%20in%20Cholinergic%20Crisis%20Management.pdf},
year = {2014},
date = {2014-12-01},
booktitle = {Proceedings of Interservice/Industry Training, Simulation, and Education Conference (I/ITSEC) 2014},
address = {Orlando, FL},
abstract = {Military and civilian first-responders must be able to recognize and effectively manage mass disaster casualties. Clinical management of injuries resulting from nerve agents provides different challenges for first responders than those of conventional weapons. We evaluated the impact of a mixed-methods training program on competency acquisition in cholinergic crisis clinical management. Methods: We developed a multimedia and simulation-based training program based on the more comprehensive USAMRICD courses. The training program was designed to provide first-responders with the necessary abilities to recognize and manage a mass casualty cholinergic crisis event. Training included a learner controlled multimedia iPad app and hands-on instruction using SimMan3G™ mannequin simulators. We evaluated the impact of the training through a purposively selected sample of 204 civilian and military first responders who had not previously completed either of the referenced USAMRICD courses. We assessed knowledge, performance, affect, and self-efficacy measures pre- and post-training using previously validated assessment instruments. We calculated results using analysis of variance with repeated measures, and with statistical significance set at p textbackslashtextbackslashtextless .05. Results: Analyses demonstrated a significant improvement (p = .000) across all domains (knowledge, performance, self-efficacy, and affect). Knowledge scores increased from 60% to 81% correct. Performance scores increased from 16% to 68% correct. Self-efficacy scores increased from 51% to 87% confidence in ability to effectively manage a cholinergic crisis event. Affect scores increased from 75% to 81% personal comfort during procedures. Conclusions: These findings could aid in the selection of instructional methodologies available to a broad community of first-responder personnel in military and civilian service. Although less comprehensive than the USAMRICD courses, training outcomes associated with this easily distributed instruction set demonstrated its value in increasing the competency of first responders in recognizing and managing a mass casualty cholinergic event. Retention outcomes are in process.},
keywords = {DoD, MedVR, UARC},
pubstate = {published},
tppubtype = {inproceedings}
}
Venek, Verena; Scherer, Stefan; Morency, Louis-Philippe; Rizzo, Albert; Pestian, John
ADOLESCENT SUICIDAL RISK ASSESSMENT IN CLINICIAN-PATIENT INTERACTION: A STUDY OF VERBAL AND ACOUSTIC BEHAVIORS Proceedings Article
In: Spoken Language Technology Workshop (SLT), 2014 IEEE, pp. 277–282, IEEE, South Lake Tahoe, NV, 2014, ISBN: 978-1-4799-7129-9.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@inproceedings{venek_adolescent_2014,
title = {ADOLESCENT SUICIDAL RISK ASSESSMENT IN CLINICIAN-PATIENT INTERACTION: A STUDY OF VERBAL AND ACOUSTIC BEHAVIORS},
author = {Verena Venek and Stefan Scherer and Louis-Philippe Morency and Albert Rizzo and John Pestian},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=7078587},
doi = {10.1109/SLT.2014.7078587},
isbn = {978-1-4799-7129-9},
year = {2014},
date = {2014-12-01},
booktitle = {Spoken Language Technology Workshop (SLT), 2014 IEEE},
pages = {277–282},
publisher = {IEEE},
address = {South Lake Tahoe, NV},
abstract = {Suicide among adolescents is a major public health problem: it is the third leading cause of death in the US for ages 13-18. Up to now, there is no objective ways to assess the suicidal risk, i.e. whether a patient is non-suicidal, suicidal re-attempter (i.e. repeater) or suicidal non-repeater (i.e. individuals with one suicide attempt or showing signs of suicidal gestures or ideation). Therefore, features of the conversation including verbal information and nonverbal acoustic information were investigated from 60 audio-recorded interviews of 30 suicidal (13 repeaters and 17 non-repeaters) and 30 non-suicidal adolescents interviewed by a social worker. The interaction between clinician and patients was statistically analyzed to reveal differences between suicidal vs. non-suicidal adolescents and to investigate suicidal repeaters' behaviors in comparison to suicidal non-repeaters. By using a hierarchical ensemble classifier we were able to successfully discriminate non-suicidal patients, suicidal repeaters and suicidal non-repeaters.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {inproceedings}
}
Safir, Marilyn P.; Wallach, Helene S.; Rizzo, Albert "Skip" (Ed.)
Future Directions in Post-Traumatic Stress Disorder Book
Springer US, Boston, MA, 2014, ISBN: 978-1-4899-7521-8 978-1-4899-7522-5.
Abstract | Links | BibTeX | Tags: MedVR
@book{safir_future_2014,
title = {Future Directions in Post-Traumatic Stress Disorder},
editor = {Marilyn P. Safir and Helene S. Wallach and Albert "Skip" Rizzo},
url = {http://link.springer.com/10.1007/978-1-4899-7522-5},
isbn = {978-1-4899-7521-8 978-1-4899-7522-5},
year = {2014},
date = {2014-11-01},
publisher = {Springer US},
address = {Boston, MA},
abstract = {Ours is an era of increasing tension, both global and local. And not surprisingly, PTSD is recognized not only in combat veterans and active military personnel, but also disaster and assault survivors across the demographic spectrum. As current events from mass shootings to the debate over trigger warnings keep the issue in the public eye, the disorder remains a steady concern among researchers and practitioners. Future Directions in Post-Traumatic Stress Disorder presents findings and ideas with the potential to influence both our conceptualization of the condition and the techniques used to address it. A multidisciplinary panel of experts offers new analyses of risk and resilience factors, individual and group approaches to prevention, the evolving process of diagnosis, and effective treatment and delivery. Chapters on treatment allow readers to compare widely-used prolonged exposure and VR methods with innovative applications of cognitive processing therapy and interpersonal therapy. And an especially compelling contribution surveys empirically-based programs relating to what for many is the emblematic trauma of our time, the events of September 11, 2001. Included in the coverage: •Predictors of vulnerability to PTSD: neurobiological and genetic risk factors. •Early intervention: is prevention better than cure? •The functional neuroanatomy of PTSD. •The development of evidence-based treatment for PTSD. •Enhancing exposure therapy using D-Cycloserine (DCS). •PLUS: a case example as seen through five therapeutic perspectives. While millions experience trauma, relatively few develop chronic PTSD. Future Directions in Post-Traumatic Stress Disorder is a practical and proactive reference for the health and clinical psychologists, sociologists, psychiatrists, and primary care physicians dedicated to further decreasing those numbers.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {book}
}
Reger, Greg M.; Rizzo, Albert A.; Gahm, Gregory A.
Initial Development and Dissemination of Virtual Reality Exposure Therapy for Combat-Related PTSD Book Section
In: Safir, Marilyn P.; Wallach, Helene S.; Rizzo, Albert "Skip" (Ed.): Future Directions in Post-Traumatic Stress Disorder, pp. 289–302, Springer US, Boston, MA, 2014, ISBN: 978-1-4899-7521-8 978-1-4899-7522-5.
Abstract | Links | BibTeX | Tags: DoD, MedVR
@incollection{reger_initial_2014,
title = {Initial Development and Dissemination of Virtual Reality Exposure Therapy for Combat-Related PTSD},
author = {Greg M. Reger and Albert A. Rizzo and Gregory A. Gahm},
editor = {Marilyn P. Safir and Helene S. Wallach and Albert "Skip" Rizzo},
url = {http://link.springer.com/10.1007/978-1-4899-7522-5_15},
isbn = {978-1-4899-7521-8 978-1-4899-7522-5},
year = {2014},
date = {2014-11-01},
booktitle = {Future Directions in Post-Traumatic Stress Disorder},
pages = {289–302},
publisher = {Springer US},
address = {Boston, MA},
abstract = {Military personnel are at risk for the development of posttraumatic stress disorder. Although effective treatments are available, the need for improved treatment efficacy and less stigmatizing approaches to treatment have resulted in the evolution of virtual reality exposure therapy. This chapter reviews the development and dissemination efforts of a virtual reality system supporting exposure therapy for deployment-related posttraumatic stress disorder. Specifically, the chapter will review the work done to incorporate the feedback of military personnel into the early development of a Virtual Iraq/Afghanistan system and also reviews efforts to disseminate this treatment to military and Veteran behavioral health researchers and providers.},
keywords = {DoD, MedVR},
pubstate = {published},
tppubtype = {incollection}
}
Scherer, Stefan; Stratou, Giota; Lucas, Gale; Mahmoud, Marwa; Boberg, Jill; Gratch, Jonathan; Rizzo, Albert (Skip); Morency, Louis-Philippe
Automatic audiovisual behavior descriptors for psychological disorder analysis Journal Article
In: Image and Vision Computing Journal, vol. 32, no. 10, pp. 648–658, 2014, ISSN: 02628856.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{scherer_automatic_2014,
title = {Automatic audiovisual behavior descriptors for psychological disorder analysis},
author = {Stefan Scherer and Giota Stratou and Gale Lucas and Marwa Mahmoud and Jill Boberg and Jonathan Gratch and Albert (Skip) Rizzo and Louis-Philippe Morency},
url = {http://linkinghub.elsevier.com/retrieve/pii/S0262885614001000},
doi = {10.1016/j.imavis.2014.06.001},
issn = {02628856},
year = {2014},
date = {2014-10-01},
journal = {Image and Vision Computing Journal},
volume = {32},
number = {10},
pages = {648–658},
abstract = {We investigate the capabilities of automatic audiovisual nonverbal behavior descriptors to identify indicators of psychological disorders such as depression, anxiety, and post-traumatic stress disorder. Due to strong correlations between these disordersas measured with standard self-assessment questionnaires in this study, we focus our investigations in particular on a generic distress measure as identified using factor analysis. Within this work, we seek to confirm and enrich present state of the art, predominantly based on qualitative manual annotations, with automatic quantitative behavior descriptors. We propose a number of nonverbal behavior descriptors that can be automatically estimated from audiovisual signals. Such automatic behavior descriptors could be used to support healthcare providers with quantified and objective observations that could ultimately improve clinical assessment. We evaluate our work on the dataset called the Distress Assessment Interview Corpus (DAIC) which comprises dyadic interactions between a confederate interviewer and a paid participant. Our evaluation on this dataset shows correlation of our automatic behavior descriptors with the derived general distress measure. Our analysis also includes a deeper study of self-adaptor and fidgeting behaviors based on detailed annotations of where these behaviors occur.},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Yeh, Shih-Ching; Wang, Jin-Liang; Wang, Chin-Yeh; Lin, Po-Han; Chen, Gwo-Dong; Rizzo, Albert
In: British Journal of Educational Technology, vol. 45, no. 4, pp. 666–675, 2014, ISSN: 00071013.
Abstract | Links | BibTeX | Tags: MedVR
@article{yeh_motion_2014,
title = {Motion controllers for learners to manipulate and interact with 3D objects for mental rotation training: Motion controllers for mental rotation training},
author = {Shih-Ching Yeh and Jin-Liang Wang and Chin-Yeh Wang and Po-Han Lin and Gwo-Dong Chen and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Motion%20controllers%20for%20learners%20to%20manipulate%20and%20interact%20with%203D%20objects%20for%20mental%20rotation%20training.pdf},
doi = {10.1111/bjet.12059},
issn = {00071013},
year = {2014},
date = {2014-07-01},
journal = {British Journal of Educational Technology},
volume = {45},
number = {4},
pages = {666–675},
abstract = {Mental rotation is an important spatial processing ability and an important element in intelligence tests. However, the majority of past attempts at training mental rotation have used paper-and-pencil tests or digital images. This study proposes an innovative mental rotation training approach using magnetic motion controllers to allow learners to manipulate and interact with three-dimensional (3D) objects. Stereovision allows learners to perceive the spatial geometric form of a 3D object. This approach allows learners to perceive 3D objects in space through stereovision and make mental rotation visible from each intrinsic and invisible mental rotation step using motion-interaction methods. This study examines the effects of user training performance and perceptions. The results indicate that the proposed approach can improve user mental rotation ability effectively. Learners expressed high degrees of concentration toward the mechanism that included direct control and immediate feedback. The results also suggest that female testers perceive greater degrees of playfulness toward the mechanism and improve more through training than male testers.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert; Hartholt, Arno; Grimani, Mario; Leeds, Andrew; Liewer, Matt
Virtual Reality Exposure Therapy for Combat-Related Posttraumatic Stress Disorder Journal Article
In: IEEE Computer Society, vol. 47, Issue 7, no. 7, pp. 31–37, 2014.
Abstract | Links | BibTeX | Tags: MedVR, UARC, Virtual Humans
@article{rizzo_virtual_2014,
title = {Virtual Reality Exposure Therapy for Combat-Related Posttraumatic Stress Disorder},
author = {Albert Rizzo and Arno Hartholt and Mario Grimani and Andrew Leeds and Matt Liewer},
url = {http://ict.usc.edu/pubs/Virtual%20Reality%20Exposure%20Therapy%20for%20Treating%20Combat-Related%20PTSD.pdf},
year = {2014},
date = {2014-07-01},
journal = {IEEE Computer Society},
volume = {47, Issue 7},
number = {7},
pages = {31–37},
abstract = {Virtual reality (VR) technology is rapidly evolving to support prolonged exposure (PE) therapy, a proven treatment for combat-related posttraumatic stress disorder. Building on the successful 2007 Virtual Iraq/Afghanistan VRET system, a team of behavioral scientists, software engineers, and virtual artists has created Bravemind, a flexible VR system that offers significantly enhanced PE treatment possibilities. The first Web extra at http://youtu.be/EiYg-kMNMtQ is a video demonstration of an original early virtual reality exposure therapy (VRET) prototype that shows a small section of an Iraqi city with a landing helicopter (2004). The second Web extra at http://youtu.be/_cS-ynWZmeQ is a video demonstration of virtual reality exposure therapy (VRET) that simulates driving a Humvee in a rural part of Iraq, showcasing several encounters, including IED and road-side attacks (2007). The third Web extra at http://youtu.be/78QXX_F4mc8 is a video demonstration of virtual reality exposure therapy (VRET) that simulates an overview of several Iraqi city areas (2007). The fourth Web extra at http://youtu.be/_AnixslkVLU is a video demonstration of virtual reality exposure therapy (VRET) that simulates a patrol entering interior buildings in Iraq (2007). The fifth Web extra at http://youtu.be/S22aQ-DqKKU is a video demonstration of an original virtual reality exposure therapy (VRET) tablet interface that allows the clinician to change virtual reality settings and trigger encounters (2007). The sixth Web extra at http://youtu.be/C-fspuLo4vw is a video demonstration of the Bravemind virtual reality exposure therapy (VRET) prototype showing a variety of driving and dismounted scenarios with encounters in Iraq and Afghanistan (2013). The sixth Web extra at http://youtu.be/HSPDomDAigg is a video collection of Iraqi and Afghanistan virtual reality exposure therapy (VRET) scenarios within the Bravemind prototype (2013).},
keywords = {MedVR, UARC, Virtual Humans},
pubstate = {published},
tppubtype = {article}
}
Gordon, Andrew; Core, Mark; Kang, Sin-Hwa; Wang, Catherine; Wienberg, Christopher
Civilian Analogs of Army Tasks: Supporting Pedagogical Storytelling Across Domains Journal Article
In: Proceedings of the 11th International Conference of the Learning Sciences, 2014.
Abstract | Links | BibTeX | Tags: Learning Sciences, MedVR, The Narrative Group, UARC
@article{gordon_civilian_2014,
title = {Civilian Analogs of Army Tasks: Supporting Pedagogical Storytelling Across Domains},
author = {Andrew Gordon and Mark Core and Sin-Hwa Kang and Catherine Wang and Christopher Wienberg},
url = {http://ict.usc.edu/pubs/Civilian%20Analogs%20of%20Army%20Tasks%20-%20Supporting%20Pedagogical%20Storytelling%20Across%20Domains.pdf},
year = {2014},
date = {2014-06-01},
journal = {Proceedings of the 11th International Conference of the Learning Sciences},
abstract = {Storytelling is the most basic means by which people learn from the experiences of others. Advances in educational technologies offer new opportunities and experiences for learners, but risk losing the natural forms of pedagogical storytelling afforded by face-to-face teacher-student discussion. In this paper, we present a technology-supported solution to the problem of curating and algorithmically delivering relevant stories to learners in computer-based learning environments. Our approach is to mine public weblogs for textual narratives related to specific activity contexts, both inside and outside the domain of the target skillset. These stories are then linked directly to task representations in the learner model of an intelligent tutoring system, and delivered to learners along with other tutoring guidance. We demonstrate our approach to curating stories by creating collections of narratives that are analogous to tactical tasks of the U.S. Army, and evaluate the difficulty of incorporating these stories into intelligent tutoring systems.},
keywords = {Learning Sciences, MedVR, The Narrative Group, UARC},
pubstate = {published},
tppubtype = {article}
}
Krum, David M.; Kang, Sin-Hwa; Bolas, Mark
Virtual Coaches over Mobile Video Proceedings Article
In: Proceedingsof International Conference on Computer Animation and Social Agents (CASA), 2014.
Abstract | Links | BibTeX | Tags: MedVR, MxR, UARC
@inproceedings{krum_virtual_2014,
title = {Virtual Coaches over Mobile Video},
author = {David M. Krum and Sin-Hwa Kang and Mark Bolas},
url = {http://ict.usc.edu/pubs/Virtual%20Coaches%20over%20Mobile%20Video.pdf},
year = {2014},
date = {2014-05-01},
booktitle = {Proceedingsof International Conference on Computer Animation and Social Agents (CASA)},
abstract = {We hypothesize that the context of a smartphone, how a virtual human is presented within a smartphone app, and indeed, the nature of that app, can profoundly affect how the virtual human is perceived by a real human. We believe that virtual humans, presented over video chat services (such as Skype) and delivered using mobile phones, can be an effective way to deliver coaching applications. We propose to build a prototype system that allows virtual humans to initiate and receive Skype calls. This hardware will enable broadcast of the audio and video imagery of a character. Using this platform and a virtual human, we will conduct two user studies. The first study will examine factors involved in making a mobile video based character seem engaging and “real”. This study will examine how character appearance and the artifacts of the communication channel, such as video and audio quality, can affect rapport with a virtual human. The second study will examine ways to maintain a long-term relationship with a character, leveraging the character’s ability to call and interact with a real human over a longer period of time. These studies will help develop design guidelines for presenting virtual humans over mobile video.},
keywords = {MedVR, MxR, UARC},
pubstate = {published},
tppubtype = {inproceedings}
}
2013
Talbot, Thomas
Balancing Physiology, Anatomy and Immersion: How Much Biological Fidelity Is Necessary in a Medical Simulation? Journal Article
In: Military Medicine, vol. 178, pp. 28–36, 2013.
Abstract | Links | BibTeX | Tags: MedVR
@article{talbot_balancing_2013,
title = {Balancing Physiology, Anatomy and Immersion: How Much Biological Fidelity Is Necessary in a Medical Simulation?},
author = {Thomas Talbot},
url = {http://ict.usc.edu/pubs/Balancing%20Physiology,%20Anatomy%20&%20Immersion%20How%20Much%20Biological%20Fidelity%20is%20Necessary%20in%20a%20Medical%20Simulation.pdf},
year = {2013},
date = {2013-10-01},
journal = {Military Medicine},
volume = {178},
pages = {28–36},
abstract = {Physiology and anatomy can be depicted at varying levels of fidelity in a medical simulation or training encounter. Another factor in a medical simulation concerns design features intended to engage the learner through a sense of immersion. Physiology can be simulated by various means including physiology engines, complex state machines, simple state machines, kinetic models, and static readouts. Each approach has advantages in terms of complexity of development and impact on the learner. Such factors are detailed within the article. Various other biological, hardware-based, and virtual models are used in medical training with varying levels of fidelity. For many medical simulation-based educational experiences, low-fidelity approaches are often adequate if not preferable.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {article}
}
Krch, D.; Nikelshpur, O.; Lavrador, S.; Chiaravalloti, N. D.; Koenig, Sebastian; Rizzo, Albert
Pilot results from a virtual reality executive function task Proceedings Article
In: International Conference on Virtual Rehabilitation, pp. 15–21, Philadelphia, PA, 2013.
Abstract | Links | BibTeX | Tags: MedVR
@inproceedings{krch_pilot_2013,
title = {Pilot results from a virtual reality executive function task},
author = {D. Krch and O. Nikelshpur and S. Lavrador and N. D. Chiaravalloti and Sebastian Koenig and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Pilot%20results%20from%20a%20virtual%20reality%20executive%20function%20task.pdf},
year = {2013},
date = {2013-08-01},
booktitle = {International Conference on Virtual Rehabilitation},
pages = {15–21},
address = {Philadelphia, PA},
abstract = {Impairments in executive functions (EF) negatively impact the capacity for independent living, damaging personal autonomy, and diminishing quality of life. Virtual environments offer an ecologically valid way to evaluate a person’s ability to carry out tasks that depend on EFs. The purpose of this pilot study was to evaluate the feasibility of a virtual reality office task, the Assessim Office (AO), in persons with Multiple Sclerosis and Traumatic Brain Injury, to evaluate performance of patient groups relative to each other and to healthy controls on the AO, and to explore the relationship between patient’s performance on AO tasks and neuropsychological measures of EF.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {inproceedings}
}
Koenig, Sebastian; Ardanza, Aitor; Cortes, Camilo; Alessandro, De Mauro; Lange, Belinda
Introduction to Low-Cost Motion-Tracking for Virtual Rehabilitation Book Section
In: Pons, Jose L.; Torricelli, Diego (Ed.): Emerging Therapies in Neurorehabilitation, vol. 4, pp. 287–303, 2013, ISBN: 978-3-642-38555-1.
Abstract | Links | BibTeX | Tags: MedVR
@incollection{koenig_introduction_2013,
title = {Introduction to Low-Cost Motion-Tracking for Virtual Rehabilitation},
author = {Sebastian Koenig and Aitor Ardanza and Camilo Cortes and De Mauro Alessandro and Belinda Lange},
editor = {Jose L. Pons and Diego Torricelli},
url = {http://ict.usc.edu/pubs/Introduction%20to%20Low-Cost%20Motion-Tracking%20for%20Virtual%20Rehabilitation.pdf},
isbn = {978-3-642-38555-1},
year = {2013},
date = {2013-08-01},
booktitle = {Emerging Therapies in Neurorehabilitation},
volume = {4},
pages = {287–303},
series = {Biosystems & Biorobotics},
abstract = {Low-cost motion sensors have seen tremendous increase in popularity in the past few years. Accelerometers, gyroscopes or cameras can be found in most available smart phones and gaming controllers. The Apple⬚ iPhone, Nintendo⬚ Wii and the PlayStation EyeToy are just a few examples where such technology is used to provide a more natural interaction for the user. Depth-sensing cameras by companies such as Microsoft, PrimeSense and Asus can enhance the user experience even further by enabling full-body interaction. This chapter will specifically discuss the use of the Microsoft⬚ Kinect depth-sensing camera (Kinect) for rehabilitation of patients with motor disabilities. In addition, examples will be provided of how the Kinect can be used with off-the-shelf computer games or utilized in conjunction with modern game development tools such as the game engine Unity. The examples will outline concepts and required resources in order to enable the reader to use low-cost depth-sensing cameras for rehabilitation.⬚},
keywords = {MedVR},
pubstate = {published},
tppubtype = {incollection}
}
Ortiz-Catalan, Max; Nijenhuis, Sharon; Ambrosch, Kurt; Bovend'Eerdt, Thamar; Koenig, Sebastian; Lange, Belinda
Virtual Reality Book Section
In: Pons, Jose L.; Torricelli, Diego (Ed.): Emerging Therapies in Neurorehabilitation, vol. 4, pp. 287–303, 2013, ISBN: 978-3-642-38555-1.
Abstract | Links | BibTeX | Tags: MedVR
@incollection{ortiz-catalan_virtual_2013,
title = {Virtual Reality},
author = {Max Ortiz-Catalan and Sharon Nijenhuis and Kurt Ambrosch and Thamar Bovend'Eerdt and Sebastian Koenig and Belinda Lange},
editor = {Jose L. Pons and Diego Torricelli},
url = {http://ict.usc.edu/pubs/Virtual%20Reality.pdf},
isbn = {978-3-642-38555-1},
year = {2013},
date = {2013-08-01},
booktitle = {Emerging Therapies in Neurorehabilitation},
volume = {4},
pages = {287–303},
series = {Biosystems & Biorobotics},
abstract = {ThischapterprovidesanoverviewontheuseofVirtualReality(VR)in rehabilitation with respect to recent neuroscience and physical therapy reviews of individuals with motor impairments. A wide range of technologies have been employed to provide rehabilitation supported by VR. Several studies have found evidence of the benefits of VR rehabilitation technologies. However, support for their efficacy is still limited due the lack of generalizable results and the uncoor- dinated effort of many individual, heterogeneous studies that have been conducted. Although VR has clear potential as a rehabilitation tool to improve treatment outcomes, future trials need to take into account the individual perspective of each patient group and consolidate research methodologies across trials to allow for stronger conclusions across the heterogeneous field of neurorehabilitation.},
keywords = {MedVR},
pubstate = {published},
tppubtype = {incollection}
}