Publications
Search
Aris, Timothy; Ustun, Volkan; Kumar, Rajay
Training Reinforcement Learning Agents to React to an Ambush for Military Simulations Journal Article
In: FLAIRS, vol. 37, 2024, ISSN: 2334-0762.
@article{aris_training_2024,
title = {Training Reinforcement Learning Agents to React to an Ambush for Military Simulations},
author = {Timothy Aris and Volkan Ustun and Rajay Kumar},
url = {https://journals.flvc.org/FLAIRS/article/view/135578},
doi = {10.32473/flairs.37.1.135578},
issn = {2334-0762},
year = {2024},
date = {2024-05-01},
urldate = {2024-08-13},
journal = {FLAIRS},
volume = {37},
abstract = {There is a need for realistic Opposing Forces (OPFOR)behavior in military training simulations. Current trainingsimulations generally only have simple, non-adaptivebehaviors, requiring human instructors to play the role ofOPFOR in any complicated scenario. This poster addressesthis need by focusing on a specific scenario: trainingreinforcement learning agents to react to an ambush. Itproposes a novel way to check for occlusion algorithmically.It shows vector fields showing the agent’s actions throughthe course of a training run. It shows that a single agentswitching between multiple goals is possible, at least in asimplified environment. Such an approach could reduce theneed to develop different agents for different scenarios.Finally, it shows a competent agent trained on a simplifiedReact to Ambush scenario, demonstrating the plausibility ofa scaled-up version.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert Skip; Hartholt, Arno; Mozgai, Sharon
Settling the Score: Virtual Reality as a Tool to Enhance Trauma-Focused Therapy for PTSD Book Section
In: Rich, Grant J.; Kumar, V. K.; Farley, Frank H. (Ed.): Handbook of Media Psychology, pp. 187–213, Springer Nature Switzerland, Cham, 2024, ISBN: 978-3-031-56536-6 978-3-031-56537-3.
@incollection{rich_settling_2024,
title = {Settling the Score: Virtual Reality as a Tool to Enhance Trauma-Focused Therapy for PTSD},
author = {Albert Skip Rizzo and Arno Hartholt and Sharon Mozgai},
editor = {Grant J. Rich and V. K. Kumar and Frank H. Farley},
url = {https://link.springer.com/10.1007/978-3-031-56537-3_14},
doi = {10.1007/978-3-031-56537-3_14},
isbn = {978-3-031-56536-6 978-3-031-56537-3},
year = {2024},
date = {2024-04-01},
urldate = {2024-06-18},
booktitle = {Handbook of Media Psychology},
pages = {187–213},
publisher = {Springer Nature Switzerland},
address = {Cham},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Rodrigues, Patrick B.; Becerik-Gerber, Burcin; Soibelman, Lucio; Lucas, Gale M.; Roll, Shawn C.
Virtual Environment for Studying the Effects of Operational and Environmental Sounds on Teleoperated Demolition Proceedings Article
In: Computing in Civil Engineering 2023, pp. 54–61, American Society of Civil Engineers, Corvallis, Oregon, 2024, ISBN: 978-0-7844-8523-1.
@inproceedings{rodrigues_virtual_2024,
title = {Virtual Environment for Studying the Effects of Operational and Environmental Sounds on Teleoperated Demolition},
author = {Patrick B. Rodrigues and Burcin Becerik-Gerber and Lucio Soibelman and Gale M. Lucas and Shawn C. Roll},
url = {https://ascelibrary.org/doi/10.1061/9780784485231.007},
doi = {10.1061/9780784485231.007},
isbn = {978-0-7844-8523-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-04-16},
booktitle = {Computing in Civil Engineering 2023},
pages = {54–61},
publisher = {American Society of Civil Engineers},
address = {Corvallis, Oregon},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Ruying; Becerik-Gerber, Burçin; Lucas, Gale M.; Busta, Kelly
Development of a VR Training Platform for Active Shooter Incident Preparedness in Healthcare Environments via a Stakeholder-Engaged Process Proceedings Article
In: Computing in Civil Engineering 2023, pp. 45–53, American Society of Civil Engineers, Corvallis, Oregon, 2024, ISBN: 978-0-7844-8523-1.
@inproceedings{liu_development_2024,
title = {Development of a VR Training Platform for Active Shooter Incident Preparedness in Healthcare Environments via a Stakeholder-Engaged Process},
author = {Ruying Liu and Burçin Becerik-Gerber and Gale M. Lucas and Kelly Busta},
url = {https://ascelibrary.org/doi/10.1061/9780784485231.006},
doi = {10.1061/9780784485231.006},
isbn = {978-0-7844-8523-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-04-16},
booktitle = {Computing in Civil Engineering 2023},
pages = {45–53},
publisher = {American Society of Civil Engineers},
address = {Corvallis, Oregon},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Junghaenel, Doerte U.; Schneider, Stefan; Lucas, Gale; Boberg, Jill; Weinstein, Faye M.; Richeimer, Steven H.; Stone, Arthur A.; Lumley, Mark A.
In: Psychosom Med, vol. 85, no. 7, pp. 627–638, 2023, ISSN: 1534-7796, 0033-3174.
@article{junghaenel_virtual_2023,
title = {Virtual Human–Delivered Interviews for Patients With Chronic Pain: Feasibility, Acceptability, and a Pilot Randomized Trial of Standard Medical, Psychosocial, and Educational Interviews},
author = {Doerte U. Junghaenel and Stefan Schneider and Gale Lucas and Jill Boberg and Faye M. Weinstein and Steven H. Richeimer and Arthur A. Stone and Mark A. Lumley},
url = {https://journals.lww.com/10.1097/PSY.0000000000001228},
doi = {10.1097/PSY.0000000000001228},
issn = {1534-7796, 0033-3174},
year = {2023},
date = {2023-09-01},
urldate = {2024-07-11},
journal = {Psychosom Med},
volume = {85},
number = {7},
pages = {627–638},
abstract = {ABSTRACT
Objective
Seminal advances in virtual human (VH) technology have introduced highly interactive, computer-animated VH interviewers. Their utility for aiding in chronic pain care is unknown. We developed three interactive telehealth VH interviews—a
standard
pain-focused, a
psychosocial
risk factor, and a pain psychology and neuroscience
educational
interview. We then conducted a preliminary investigation of their feasibility, acceptability, and efficacy. We also experimentally compared a human and a computer-generated VH voice.
Methods
Patients (
N = 94},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Objective
Seminal advances in virtual human (VH) technology have introduced highly interactive, computer-animated VH interviewers. Their utility for aiding in chronic pain care is unknown. We developed three interactive telehealth VH interviews—a
standard
pain-focused, a
psychosocial
risk factor, and a pain psychology and neuroscience
educational
interview. We then conducted a preliminary investigation of their feasibility, acceptability, and efficacy. We also experimentally compared a human and a computer-generated VH voice.
Methods
Patients (
N = 94
Adami, Pooya; Rodrigues, Patrick B.; Woods, Peter J.; Becerik-Gerber, Burcin; Soibelman, Lucio; Copur-Gencturk, Yasemin; Lucas, Gale
Impact of VR-Based Training on Human–Robot Interaction for Remote Operating Construction Robots Journal Article
In: J. Comput. Civ. Eng., vol. 36, no. 3, pp. 04022006, 2022, ISSN: 0887-3801, 1943-5487.
@article{adami_impact_2022,
title = {Impact of VR-Based Training on Human–Robot Interaction for Remote Operating Construction Robots},
author = {Pooya Adami and Patrick B. Rodrigues and Peter J. Woods and Burcin Becerik-Gerber and Lucio Soibelman and Yasemin Copur-Gencturk and Gale Lucas},
url = {https://ascelibrary.org/doi/10.1061/%28ASCE%29CP.1943-5487.0001016},
doi = {10.1061/(ASCE)CP.1943-5487.0001016},
issn = {0887-3801, 1943-5487},
year = {2022},
date = {2022-05-01},
urldate = {2022-09-23},
journal = {J. Comput. Civ. Eng.},
volume = {36},
number = {3},
pages = {04022006},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Schweitzer, Julie B.; Rizzo, Albert “Skip”
Virtual Reality and ADHD: Clinical Assessment and Treatment in the Metaverse Journal Article
In: The ADHD Report, vol. 30, no. 3, pp. 1–9, 2022, ISSN: 1065-8025.
@article{schweitzer_virtual_2022,
title = {Virtual Reality and ADHD: Clinical Assessment and Treatment in the Metaverse},
author = {Julie B. Schweitzer and Albert “Skip” Rizzo},
url = {https://guilfordjournals.com/doi/abs/10.1521/adhd.2022.30.3.1},
doi = {10.1521/adhd.2022.30.3.1},
issn = {1065-8025},
year = {2022},
date = {2022-05-01},
urldate = {2022-09-13},
journal = {The ADHD Report},
volume = {30},
number = {3},
pages = {1–9},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Talbot, Thomas Brett; Chinara, Chinmay
Open Medical Gesture: An Open-Source Experiment in Naturalistic Physical Interactions for Mixed and Virtual Reality Simulations Proceedings Article
In: 2022.
@inproceedings{brett_talbot_open_2022,
title = {Open Medical Gesture: An Open-Source Experiment in Naturalistic Physical Interactions for Mixed and Virtual Reality Simulations},
author = {Thomas Brett Talbot and Chinmay Chinara},
url = {https://openaccess.cms-conferences.org/#/publications/book/978-1-958651-26-1/article/978-1-958651-26-1_0},
doi = {10.54941/ahfe1002054},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-13},
abstract = {Mixed (MR) and Virtual Reality (VR) simulations are hampered by requirements for hand controllers or attempts to perseverate in use of two-dimensional computer interface paradigms from the 1980s. From our efforts to produce more naturalistic interactions for combat medic training for the military, we have developed an open-source toolkit that enables direct hand controlled responsive interactions that is sensor independent and can function with depth sensing cameras, webcams or sensory gloves. From this research and review of current literature, we have discerned several best approaches for hand-based human computer interactions which provide intuitive, responsive, useful, and low frustration experiences for VR users. The center of an effective gesture system is a universal hand model that can map to inputs from several different kinds of sensors rather than depending on a specific commercial product. Parts of the hand are effectors in simulation space with a physics-based model. Therefore, translational and rotational forces from the hands will impact physical objects in VR which varies based on the mass of the virtual objects. We incorporate computer code w/ objects, calling them “Smart Objects”, which allows such objects to have movement properties and collision detection for expected manipulation. Examples of smart objects include scissors, a ball, a turning knob, a moving lever, or a human figure with moving limbs. Articulation points contain collision detectors and code to assist in expected hand actions. We include a library of more than 40 Smart Objects in the toolkit. Thus, is it possible to throw a ball, hit that ball with a bat, cut a bandage, turn on a ventilator or to lift and inspect a human arm.We mediate the interaction of the hands with virtual objects. Hands often violate the rules of a virtual world simply by passing through objects. One must interpret user intent. This can be achieved by introducing stickiness of the hands to objects. If the human’s hands overshoot an object, we place the hand onto that object’s surface unless the hand passes the object by a significant distance. We also make hands and fingers contact an object according to the object’s contours and do not allow fingers to sink into the interior of an object. Haptics, or a sense of physical resistance and tactile sensation from contacting physical objects is a supremely difficult technical challenge and is an expensive pursuit. Our approach ignores true haptics, but we have experimented with an alternative approach, called audio tactile synesthesia where we substitute the sensation of touch for that of sound. The idea is to associate parts of each hand with a tone of a specific frequency upon contacting objects. The attack rate of the sound envelope varies with the velocity of contact and hardness of the object being ‘touched’. Such sounds can feel softer or harder depending on the nature of ‘touch’ being experienced. This substitution technique can provide tactile feedback through indirect, yet still naturalistic means. The artificial intelligence (AI) technique to determine discrete hand gestures and motions within the physical space is a special form of AI called Long Short Term Memory (LSTM). LSTM allows much faster and flexible recognition than other machine learning approaches. LSTM is particularly effective with points in motion. Latency of recognition is very low. In addition to LSTM, we employ other synthetic vision & object recognition AI to the discrimination of real-world objects. This allows for methods to conduct virtual simulations. For example, it is possible to pick up a virtual syringe and inject a medication into a virtual patient through hand motions. We track the hand points to contact with the virtual syringe. We also detect when the hand is compressing the syringe plunger. We could also use virtual medications & instruments on human actors or manikins, not just on virtual objects. With object recognition AI, we can place a syringe on a tray in the physical world. The human user can pick up the syringe and use it on a virtual patient. Thus, we are able to blend physical and virtual simulation together seamlessly in a highly intuitive and naturalistic manner.The techniques and technologies explained here represent a baseline capability whereby interacting in mixed and virtual reality can now be much more natural and intuitive than it has ever been. We have now passed a threshold where we can do away with game controllers and magnetic trackers for VR. This advancement will contribute to greater adoption of VR solutions. To foster this, our team has committed to freely sharing these technologies for all purposes and at no cost as an open-source tool. We encourage the scientific, research, educational and medical communities to adopt these resources and determine their effectiveness and utilize these tools and practices to grow the body of useful VR applications.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Stokes, Jared D.; Rizzo, Albert; Geng, Joy J.; Schweitzer, Julie B.
Measuring Attentional Distraction in Children With ADHD Using Virtual Reality Technology With Eye-Tracking Journal Article
In: Frontiers in Virtual Reality, vol. 3, 2022, ISSN: 2673-4192.
@article{stokes_measuring_2022,
title = {Measuring Attentional Distraction in Children With ADHD Using Virtual Reality Technology With Eye-Tracking},
author = {Jared D. Stokes and Albert Rizzo and Joy J. Geng and Julie B. Schweitzer},
url = {https://www.frontiersin.org/articles/10.3389/frvir.2022.855895},
issn = {2673-4192},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-13},
journal = {Frontiers in Virtual Reality},
volume = {3},
abstract = {Objective: Distractions inordinately impair attention in children with Attention-Deficit Hyperactivity Disorder (ADHD) but examining this behavior under real-life conditions poses a challenge for researchers and clinicians. Virtual reality (VR) technologies may mitigate the limitations of traditional laboratory methods by providing a more ecologically relevant experience. The use of eye-tracking measures to assess attentional functioning in a VR context in ADHD is novel. In this proof of principle project, we evaluate the temporal dynamics of distraction via eye-tracking measures in a VR classroom setting with 20 children diagnosed with ADHD between 8 and 12 years of age.Method: We recorded continuous eye movements while participants performed math, Stroop, and continuous performance test (CPT) tasks with a series of “real-world” classroom distractors presented. We analyzed the impact of the distractors on rates of on-task performance and on-task, eye-gaze (i.e., looking at a classroom whiteboard) versus off-task eye-gaze (i.e., looking away from the whiteboard).Results: We found that while children did not always look at distractors themselves for long periods of time, the presence of a distractor disrupted on-task gaze at task-relevant whiteboard stimuli and lowered rates of task performance. This suggests that children with attention deficits may have a hard time returning to tasks once those tasks are interrupted, even if the distractor itself does not hold attention. Eye-tracking measures within the VR context can reveal rich information about attentional disruption.Conclusions: Leveraging virtual reality technology in combination with eye-tracking measures is well-suited to advance the understanding of mechanisms underlying attentional impairment in naturalistic settings. Assessment within these immersive and well-controlled simulated environments provides new options for increasing our understanding of distractibility and its potential impact on the development of interventions for children with ADHD.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Davis, Matt; Nye, Benjamin; Sinatra, Gale; Swartout, William; Sjӧberg, Molly; Porter, Molly; Nelson, David; Kennedy, Alana; Herrick, Imogen; Weeks, Danaan DeNeve; Lindsey, Emily
Designing scientifically-grounded paleoart for augmented reality at La Brea Tar Pits Journal Article
In: Palaeontolgia Electronica, 2022, ISSN: 19353952, 10948074.
@article{davis_designing_2022,
title = {Designing scientifically-grounded paleoart for augmented reality at La Brea Tar Pits},
author = {Matt Davis and Benjamin Nye and Gale Sinatra and William Swartout and Molly Sjӧberg and Molly Porter and David Nelson and Alana Kennedy and Imogen Herrick and Danaan DeNeve Weeks and Emily Lindsey},
url = {https://palaeo-electronica.org/content/2022/3524-la-brea-tar-pits-paleoart},
doi = {10.26879/1191},
issn = {19353952, 10948074},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-15},
journal = {Palaeontolgia Electronica},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ionescu, Alina; Daele, Tom Van; Rizzo, Albert; Blair, Carolyn; Best, Paul
360° Videos for Immersive Mental Health Interventions: a Systematic Review Journal Article
In: J. technol. behav. sci., vol. 6, no. 4, pp. 631–651, 2021, ISSN: 2366-5963.
@article{ionescu_360_2021,
title = {360° Videos for Immersive Mental Health Interventions: a Systematic Review},
author = {Alina Ionescu and Tom Van Daele and Albert Rizzo and Carolyn Blair and Paul Best},
url = {https://doi.org/10.1007/s41347-021-00221-7},
doi = {10.1007/s41347-021-00221-7},
issn = {2366-5963},
year = {2021},
date = {2021-12-01},
urldate = {2022-09-13},
journal = {J. technol. behav. sci.},
volume = {6},
number = {4},
pages = {631–651},
abstract = {Virtual reality is increasingly recognized as a powerful method for clinical interventions in the mental health field, but has yet to achieve mainstream adoption in routine mental healthcare settings. A similar, yet slightly different technology, immersive 360° videos might have the potential to cover this gap, by requiring both lower costs and less technical skills to construct and operate such virtual environments. This systematic review therefore aims to identify, evaluate, and summarize mental health interventions using immersive 360° videos to support an understanding of their implementation in daily clinical practice. The quality of the 14 selected studies was evaluated using a critical appraisal tool, addressing populations with clinical levels of psychopathological symptoms, somatic conditions associated with psychological implications, and other at-risk groups. Immersive 360° videos successfully increased users’ feelings of presence, given their realistic features, and therefore yielded positive outcomes in clinical interventions where presence is considered as an essential precondition. Because the technical skills required to create immersive 360° video footage are fairly limited, most of the interventions using this approach have been created by mental health researchers or clinicians themselves. Immersive 360° videos are still in an early phase of implementation as a tool for clinical interventions for mental health, resulting in high heterogeneity in focus, procedures, and research designs. An important next step for making use of this technology may therefore involve the creation of standardized procedures, as a means to increase the quality of research and evidence-based interventions.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Leeds, Andrew; Kwok, David; Fast, Ed; Rizzo, Albert Skip; Hartholt, Arno
Building BRAVEMIND Vietnam: User-Centered Design for Virtual Reality Exposure Therapy Proceedings Article
In: 2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR), pp. 247–250, 2021.
@inproceedings{mozgai_building_2021,
title = {Building BRAVEMIND Vietnam: User-Centered Design for Virtual Reality Exposure Therapy},
author = {Sharon Mozgai and Andrew Leeds and David Kwok and Ed Fast and Albert Skip Rizzo and Arno Hartholt},
doi = {10.1109/AIVR52153.2021.00056},
year = {2021},
date = {2021-11-01},
booktitle = {2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)},
pages = {247–250},
abstract = {There has been a recent resurgence of Vietnam-era Veterans seeking Veteran Affairs (VA) services for post-traumatic stress disorder (PTSD). Multiple studies have reported positive outcomes using Virtual Reality Exposure Therapy (VRET) with Iraq/Afghanistan Veteran PTSD patients, but there have been fewer investigations into the acceptability and feasibility of VRET with older Veterans. We have extended an established VRET system, BRAVEMIND, to include Vietnam relevant content following a user-centered iterative design methodology. This paper will present the BRAVEMIND Vietnam VRET system, including setup, content, and technical architecture along with the findings of an initial focus group with Vietnam Veterans. These Veterans rated this system to be acceptable as a treatment tool for combat-related PTSD and provided valuable first-person accounts of their time in Vietnam to help guide the evolution of the VR content.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Adami, Pooya; Rodrigues, Patrick B.; Woods, Peter J.; Becerik-Gerber, Burcin; Soibelman, Lucio; Copur-Gencturk, Yasemin; Lucas, Gale
Effectiveness of VR-based training on improving construction workers’ knowledge, skills, and safety behavior in robotic teleoperation Journal Article
In: Advanced Engineering Informatics, vol. 50, pp. 101431, 2021, ISSN: 14740346.
@article{adami_effectiveness_2021,
title = {Effectiveness of VR-based training on improving construction workers’ knowledge, skills, and safety behavior in robotic teleoperation},
author = {Pooya Adami and Patrick B. Rodrigues and Peter J. Woods and Burcin Becerik-Gerber and Lucio Soibelman and Yasemin Copur-Gencturk and Gale Lucas},
url = {https://linkinghub.elsevier.com/retrieve/pii/S147403462100183X},
doi = {10.1016/j.aei.2021.101431},
issn = {14740346},
year = {2021},
date = {2021-10-01},
urldate = {2022-09-26},
journal = {Advanced Engineering Informatics},
volume = {50},
pages = {101431},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert “Skip”; Goodwin, Grace J.; Vito, Alyssa N. De; Bell, Joshua D.
Recent advances in virtual reality and psychology: Introduction to the special issue. Journal Article
In: Translational Issues in Psychological Science, vol. 7, no. 3, pp. 213–217, 2021, ISSN: 2332-2179, 2332-2136.
@article{rizzo_recent_2021,
title = {Recent advances in virtual reality and psychology: Introduction to the special issue.},
author = {Albert “Skip” Rizzo and Grace J. Goodwin and Alyssa N. De Vito and Joshua D. Bell},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/tps0000316},
doi = {10.1037/tps0000316},
issn = {2332-2179, 2332-2136},
year = {2021},
date = {2021-09-01},
urldate = {2022-09-13},
journal = {Translational Issues in Psychological Science},
volume = {7},
number = {3},
pages = {213–217},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
et al A Rizzo,
Normative Data for a Next Generation Virtual Classroom for Attention Assessment in Children with ADHD and Beyond! Proceedings Article
In: Proceedings of the 13th International Conference on Disability, Virtual Reality and Associated Technologies (ICDVRAT 2021), Serpa, Portugal, 2021.
@inproceedings{a_rizzo_et_al_normative_2021,
title = {Normative Data for a Next Generation Virtual Classroom for Attention Assessment in Children with ADHD and Beyond!},
author = {et al A Rizzo},
url = {http://studio.hei-lab.ulusofona.pt/archive/},
year = {2021},
date = {2021-09-01},
booktitle = {Proceedings of the 13th International Conference on Disability, Virtual Reality and Associated Technologies (ICDVRAT 2021)},
address = {Serpa, Portugal},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Awada, Mohamad; Zhu, Runhe; Becerik-Gerber, Burcin; Lucas, Gale; Southers, Erroll
An integrated emotional and physiological assessment for VR-based active shooter incident experiments Journal Article
In: Advanced Engineering Informatics, vol. 47, pp. 101227, 2021, ISSN: 14740346.
@article{awada_integrated_2021,
title = {An integrated emotional and physiological assessment for VR-based active shooter incident experiments},
author = {Mohamad Awada and Runhe Zhu and Burcin Becerik-Gerber and Gale Lucas and Erroll Southers},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1474034620301968},
doi = {10.1016/j.aei.2020.101227},
issn = {14740346},
year = {2021},
date = {2021-01-01},
urldate = {2022-10-24},
journal = {Advanced Engineering Informatics},
volume = {47},
pages = {101227},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bell, Benjamin; Bennett, Winston “Wink”; Kelsey, Elaine; Nye, Benjamin
Attention and Engagement in Virtual Environments: Measuring the Unobservable Proceedings Article
In: 2021.
@inproceedings{bell_attention_2021,
title = {Attention and Engagement in Virtual Environments: Measuring the Unobservable},
author = {Benjamin Bell and Winston “Wink” Bennett and Elaine Kelsey and Benjamin Nye},
url = {https://www.xcdsystem.com/iitsec/proceedings/index.cfm?Year=2021&AbID=95758&CID=862#View},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Hartholt, Arno; Mozgai, Sharon
From Combat to COVID-19 – Managing the Impact of Trauma Using Virtual Reality Journal Article
In: pp. 35, 0000.
@article{hartholt_combat_nodate,
title = {From Combat to COVID-19 – Managing the Impact of Trauma Using Virtual Reality},
author = {Arno Hartholt and Sharon Mozgai},
pages = {35},
abstract = {Research has documented the efficacy of clinical applications that leverage Virtual Reality (VR) for assessment and treatment purposes across a wide range of domains, including pain, phobias, and posttraumatic stress disorder (PTSD). As the field of Clinical VR matures, it is important to review its origins and examine how these initial explorations have progressed, what gaps remain, and what opportunities the community can pursue. We do this by reflecting on our personal scientific journey against the backdrop of the field in general. In particular, this paper discusses how a clinical research program that was initially designed to deliver trauma-focused VR exposure therapy (VRET) for combat-related PTSD has been evolved to expand its impact and address a wider range of trauma sources. Such trauma sources include sexual trauma and the needs of first responders and healthcare professionals serving on the frontlines of the COVID-19 pandemic. We provide an overview of the field and its general trends, discuss the genesis of our research agenda and its current status, and summarize upcoming opportunities, together with common challenges and lessons learned.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Rizzo, Albert A; Hartholt, Arno
Persuasive Technology for Suicide Prevention: A Virtual Human mHealth Application Proceedings Article
In: 0000.
@inproceedings{mozgai_persuasive_nodate,
title = {Persuasive Technology for Suicide Prevention: A Virtual Human mHealth Application},
author = {Sharon Mozgai and Albert A Rizzo and Arno Hartholt},
abstract = {We are demoing Battle Buddy, an mHealth application designed to support access to physical and mental wellness content as well as safety planning for U.S. military veterans. This virtual human interface will collect multimodal data through passive sensors native to popular wearables (e.g., Apple Watch) and deliver adaptive multimedia content specifically tailored to the user in the interdependent domains of physical, cognitive, and emotional health. Battle Buddy can deliver health interventions matched to the individual user via novel adaptive logic-based algorithms while employing various behavior change techniques (e.g., goal-setting, barrier identification, rewards, modeling, etc.). All interactions were specifically designed to engage and motivate by employing the persuasive strategies of (1) personalization, (2) self-monitoring, (3) tunneling, (4) suggestion, and (5) expertise.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Filter
2024
Aris, Timothy; Ustun, Volkan; Kumar, Rajay
Training Reinforcement Learning Agents to React to an Ambush for Military Simulations Journal Article
In: FLAIRS, vol. 37, 2024, ISSN: 2334-0762.
Abstract | Links | BibTeX | Tags: Simulation, VR
@article{aris_training_2024,
title = {Training Reinforcement Learning Agents to React to an Ambush for Military Simulations},
author = {Timothy Aris and Volkan Ustun and Rajay Kumar},
url = {https://journals.flvc.org/FLAIRS/article/view/135578},
doi = {10.32473/flairs.37.1.135578},
issn = {2334-0762},
year = {2024},
date = {2024-05-01},
urldate = {2024-08-13},
journal = {FLAIRS},
volume = {37},
abstract = {There is a need for realistic Opposing Forces (OPFOR)behavior in military training simulations. Current trainingsimulations generally only have simple, non-adaptivebehaviors, requiring human instructors to play the role ofOPFOR in any complicated scenario. This poster addressesthis need by focusing on a specific scenario: trainingreinforcement learning agents to react to an ambush. Itproposes a novel way to check for occlusion algorithmically.It shows vector fields showing the agent’s actions throughthe course of a training run. It shows that a single agentswitching between multiple goals is possible, at least in asimplified environment. Such an approach could reduce theneed to develop different agents for different scenarios.Finally, it shows a competent agent trained on a simplifiedReact to Ambush scenario, demonstrating the plausibility ofa scaled-up version.},
keywords = {Simulation, VR},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert Skip; Hartholt, Arno; Mozgai, Sharon
Settling the Score: Virtual Reality as a Tool to Enhance Trauma-Focused Therapy for PTSD Book Section
In: Rich, Grant J.; Kumar, V. K.; Farley, Frank H. (Ed.): Handbook of Media Psychology, pp. 187–213, Springer Nature Switzerland, Cham, 2024, ISBN: 978-3-031-56536-6 978-3-031-56537-3.
Links | BibTeX | Tags: DTIC, MedVR, Simulation, VR
@incollection{rich_settling_2024,
title = {Settling the Score: Virtual Reality as a Tool to Enhance Trauma-Focused Therapy for PTSD},
author = {Albert Skip Rizzo and Arno Hartholt and Sharon Mozgai},
editor = {Grant J. Rich and V. K. Kumar and Frank H. Farley},
url = {https://link.springer.com/10.1007/978-3-031-56537-3_14},
doi = {10.1007/978-3-031-56537-3_14},
isbn = {978-3-031-56536-6 978-3-031-56537-3},
year = {2024},
date = {2024-04-01},
urldate = {2024-06-18},
booktitle = {Handbook of Media Psychology},
pages = {187–213},
publisher = {Springer Nature Switzerland},
address = {Cham},
keywords = {DTIC, MedVR, Simulation, VR},
pubstate = {published},
tppubtype = {incollection}
}
Rodrigues, Patrick B.; Becerik-Gerber, Burcin; Soibelman, Lucio; Lucas, Gale M.; Roll, Shawn C.
Virtual Environment for Studying the Effects of Operational and Environmental Sounds on Teleoperated Demolition Proceedings Article
In: Computing in Civil Engineering 2023, pp. 54–61, American Society of Civil Engineers, Corvallis, Oregon, 2024, ISBN: 978-0-7844-8523-1.
Links | BibTeX | Tags: DTIC, Virtual Humans, VR
@inproceedings{rodrigues_virtual_2024,
title = {Virtual Environment for Studying the Effects of Operational and Environmental Sounds on Teleoperated Demolition},
author = {Patrick B. Rodrigues and Burcin Becerik-Gerber and Lucio Soibelman and Gale M. Lucas and Shawn C. Roll},
url = {https://ascelibrary.org/doi/10.1061/9780784485231.007},
doi = {10.1061/9780784485231.007},
isbn = {978-0-7844-8523-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-04-16},
booktitle = {Computing in Civil Engineering 2023},
pages = {54–61},
publisher = {American Society of Civil Engineers},
address = {Corvallis, Oregon},
keywords = {DTIC, Virtual Humans, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Ruying; Becerik-Gerber, Burçin; Lucas, Gale M.; Busta, Kelly
Development of a VR Training Platform for Active Shooter Incident Preparedness in Healthcare Environments via a Stakeholder-Engaged Process Proceedings Article
In: Computing in Civil Engineering 2023, pp. 45–53, American Society of Civil Engineers, Corvallis, Oregon, 2024, ISBN: 978-0-7844-8523-1.
Links | BibTeX | Tags: Virtual Humans, VR
@inproceedings{liu_development_2024,
title = {Development of a VR Training Platform for Active Shooter Incident Preparedness in Healthcare Environments via a Stakeholder-Engaged Process},
author = {Ruying Liu and Burçin Becerik-Gerber and Gale M. Lucas and Kelly Busta},
url = {https://ascelibrary.org/doi/10.1061/9780784485231.006},
doi = {10.1061/9780784485231.006},
isbn = {978-0-7844-8523-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-04-16},
booktitle = {Computing in Civil Engineering 2023},
pages = {45–53},
publisher = {American Society of Civil Engineers},
address = {Corvallis, Oregon},
keywords = {Virtual Humans, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
2023
Junghaenel, Doerte U.; Schneider, Stefan; Lucas, Gale; Boberg, Jill; Weinstein, Faye M.; Richeimer, Steven H.; Stone, Arthur A.; Lumley, Mark A.
In: Psychosom Med, vol. 85, no. 7, pp. 627–638, 2023, ISSN: 1534-7796, 0033-3174.
Abstract | Links | BibTeX | Tags: MedVR, VR
@article{junghaenel_virtual_2023,
title = {Virtual Human–Delivered Interviews for Patients With Chronic Pain: Feasibility, Acceptability, and a Pilot Randomized Trial of Standard Medical, Psychosocial, and Educational Interviews},
author = {Doerte U. Junghaenel and Stefan Schneider and Gale Lucas and Jill Boberg and Faye M. Weinstein and Steven H. Richeimer and Arthur A. Stone and Mark A. Lumley},
url = {https://journals.lww.com/10.1097/PSY.0000000000001228},
doi = {10.1097/PSY.0000000000001228},
issn = {1534-7796, 0033-3174},
year = {2023},
date = {2023-09-01},
urldate = {2024-07-11},
journal = {Psychosom Med},
volume = {85},
number = {7},
pages = {627–638},
abstract = {ABSTRACT
Objective
Seminal advances in virtual human (VH) technology have introduced highly interactive, computer-animated VH interviewers. Their utility for aiding in chronic pain care is unknown. We developed three interactive telehealth VH interviews—a
standard
pain-focused, a
psychosocial
risk factor, and a pain psychology and neuroscience
educational
interview. We then conducted a preliminary investigation of their feasibility, acceptability, and efficacy. We also experimentally compared a human and a computer-generated VH voice.
Methods
Patients (
N = 94},
keywords = {MedVR, VR},
pubstate = {published},
tppubtype = {article}
}
Objective
Seminal advances in virtual human (VH) technology have introduced highly interactive, computer-animated VH interviewers. Their utility for aiding in chronic pain care is unknown. We developed three interactive telehealth VH interviews—a
standard
pain-focused, a
psychosocial
risk factor, and a pain psychology and neuroscience
educational
interview. We then conducted a preliminary investigation of their feasibility, acceptability, and efficacy. We also experimentally compared a human and a computer-generated VH voice.
Methods
Patients (
N = 94
2022
Adami, Pooya; Rodrigues, Patrick B.; Woods, Peter J.; Becerik-Gerber, Burcin; Soibelman, Lucio; Copur-Gencturk, Yasemin; Lucas, Gale
Impact of VR-Based Training on Human–Robot Interaction for Remote Operating Construction Robots Journal Article
In: J. Comput. Civ. Eng., vol. 36, no. 3, pp. 04022006, 2022, ISSN: 0887-3801, 1943-5487.
Links | BibTeX | Tags: DTIC, UARC, Virtual Humans, VR
@article{adami_impact_2022,
title = {Impact of VR-Based Training on Human–Robot Interaction for Remote Operating Construction Robots},
author = {Pooya Adami and Patrick B. Rodrigues and Peter J. Woods and Burcin Becerik-Gerber and Lucio Soibelman and Yasemin Copur-Gencturk and Gale Lucas},
url = {https://ascelibrary.org/doi/10.1061/%28ASCE%29CP.1943-5487.0001016},
doi = {10.1061/(ASCE)CP.1943-5487.0001016},
issn = {0887-3801, 1943-5487},
year = {2022},
date = {2022-05-01},
urldate = {2022-09-23},
journal = {J. Comput. Civ. Eng.},
volume = {36},
number = {3},
pages = {04022006},
keywords = {DTIC, UARC, Virtual Humans, VR},
pubstate = {published},
tppubtype = {article}
}
Schweitzer, Julie B.; Rizzo, Albert “Skip”
Virtual Reality and ADHD: Clinical Assessment and Treatment in the Metaverse Journal Article
In: The ADHD Report, vol. 30, no. 3, pp. 1–9, 2022, ISSN: 1065-8025.
Links | BibTeX | Tags: MedVR, VR
@article{schweitzer_virtual_2022,
title = {Virtual Reality and ADHD: Clinical Assessment and Treatment in the Metaverse},
author = {Julie B. Schweitzer and Albert “Skip” Rizzo},
url = {https://guilfordjournals.com/doi/abs/10.1521/adhd.2022.30.3.1},
doi = {10.1521/adhd.2022.30.3.1},
issn = {1065-8025},
year = {2022},
date = {2022-05-01},
urldate = {2022-09-13},
journal = {The ADHD Report},
volume = {30},
number = {3},
pages = {1–9},
keywords = {MedVR, VR},
pubstate = {published},
tppubtype = {article}
}
Talbot, Thomas Brett; Chinara, Chinmay
Open Medical Gesture: An Open-Source Experiment in Naturalistic Physical Interactions for Mixed and Virtual Reality Simulations Proceedings Article
In: 2022.
Abstract | Links | BibTeX | Tags: DTIC, MedVR, MR, VR
@inproceedings{brett_talbot_open_2022,
title = {Open Medical Gesture: An Open-Source Experiment in Naturalistic Physical Interactions for Mixed and Virtual Reality Simulations},
author = {Thomas Brett Talbot and Chinmay Chinara},
url = {https://openaccess.cms-conferences.org/#/publications/book/978-1-958651-26-1/article/978-1-958651-26-1_0},
doi = {10.54941/ahfe1002054},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-13},
abstract = {Mixed (MR) and Virtual Reality (VR) simulations are hampered by requirements for hand controllers or attempts to perseverate in use of two-dimensional computer interface paradigms from the 1980s. From our efforts to produce more naturalistic interactions for combat medic training for the military, we have developed an open-source toolkit that enables direct hand controlled responsive interactions that is sensor independent and can function with depth sensing cameras, webcams or sensory gloves. From this research and review of current literature, we have discerned several best approaches for hand-based human computer interactions which provide intuitive, responsive, useful, and low frustration experiences for VR users. The center of an effective gesture system is a universal hand model that can map to inputs from several different kinds of sensors rather than depending on a specific commercial product. Parts of the hand are effectors in simulation space with a physics-based model. Therefore, translational and rotational forces from the hands will impact physical objects in VR which varies based on the mass of the virtual objects. We incorporate computer code w/ objects, calling them “Smart Objects”, which allows such objects to have movement properties and collision detection for expected manipulation. Examples of smart objects include scissors, a ball, a turning knob, a moving lever, or a human figure with moving limbs. Articulation points contain collision detectors and code to assist in expected hand actions. We include a library of more than 40 Smart Objects in the toolkit. Thus, is it possible to throw a ball, hit that ball with a bat, cut a bandage, turn on a ventilator or to lift and inspect a human arm.We mediate the interaction of the hands with virtual objects. Hands often violate the rules of a virtual world simply by passing through objects. One must interpret user intent. This can be achieved by introducing stickiness of the hands to objects. If the human’s hands overshoot an object, we place the hand onto that object’s surface unless the hand passes the object by a significant distance. We also make hands and fingers contact an object according to the object’s contours and do not allow fingers to sink into the interior of an object. Haptics, or a sense of physical resistance and tactile sensation from contacting physical objects is a supremely difficult technical challenge and is an expensive pursuit. Our approach ignores true haptics, but we have experimented with an alternative approach, called audio tactile synesthesia where we substitute the sensation of touch for that of sound. The idea is to associate parts of each hand with a tone of a specific frequency upon contacting objects. The attack rate of the sound envelope varies with the velocity of contact and hardness of the object being ‘touched’. Such sounds can feel softer or harder depending on the nature of ‘touch’ being experienced. This substitution technique can provide tactile feedback through indirect, yet still naturalistic means. The artificial intelligence (AI) technique to determine discrete hand gestures and motions within the physical space is a special form of AI called Long Short Term Memory (LSTM). LSTM allows much faster and flexible recognition than other machine learning approaches. LSTM is particularly effective with points in motion. Latency of recognition is very low. In addition to LSTM, we employ other synthetic vision & object recognition AI to the discrimination of real-world objects. This allows for methods to conduct virtual simulations. For example, it is possible to pick up a virtual syringe and inject a medication into a virtual patient through hand motions. We track the hand points to contact with the virtual syringe. We also detect when the hand is compressing the syringe plunger. We could also use virtual medications & instruments on human actors or manikins, not just on virtual objects. With object recognition AI, we can place a syringe on a tray in the physical world. The human user can pick up the syringe and use it on a virtual patient. Thus, we are able to blend physical and virtual simulation together seamlessly in a highly intuitive and naturalistic manner.The techniques and technologies explained here represent a baseline capability whereby interacting in mixed and virtual reality can now be much more natural and intuitive than it has ever been. We have now passed a threshold where we can do away with game controllers and magnetic trackers for VR. This advancement will contribute to greater adoption of VR solutions. To foster this, our team has committed to freely sharing these technologies for all purposes and at no cost as an open-source tool. We encourage the scientific, research, educational and medical communities to adopt these resources and determine their effectiveness and utilize these tools and practices to grow the body of useful VR applications.},
keywords = {DTIC, MedVR, MR, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Stokes, Jared D.; Rizzo, Albert; Geng, Joy J.; Schweitzer, Julie B.
Measuring Attentional Distraction in Children With ADHD Using Virtual Reality Technology With Eye-Tracking Journal Article
In: Frontiers in Virtual Reality, vol. 3, 2022, ISSN: 2673-4192.
Abstract | Links | BibTeX | Tags: MedVR, VR
@article{stokes_measuring_2022,
title = {Measuring Attentional Distraction in Children With ADHD Using Virtual Reality Technology With Eye-Tracking},
author = {Jared D. Stokes and Albert Rizzo and Joy J. Geng and Julie B. Schweitzer},
url = {https://www.frontiersin.org/articles/10.3389/frvir.2022.855895},
issn = {2673-4192},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-13},
journal = {Frontiers in Virtual Reality},
volume = {3},
abstract = {Objective: Distractions inordinately impair attention in children with Attention-Deficit Hyperactivity Disorder (ADHD) but examining this behavior under real-life conditions poses a challenge for researchers and clinicians. Virtual reality (VR) technologies may mitigate the limitations of traditional laboratory methods by providing a more ecologically relevant experience. The use of eye-tracking measures to assess attentional functioning in a VR context in ADHD is novel. In this proof of principle project, we evaluate the temporal dynamics of distraction via eye-tracking measures in a VR classroom setting with 20 children diagnosed with ADHD between 8 and 12 years of age.Method: We recorded continuous eye movements while participants performed math, Stroop, and continuous performance test (CPT) tasks with a series of “real-world” classroom distractors presented. We analyzed the impact of the distractors on rates of on-task performance and on-task, eye-gaze (i.e., looking at a classroom whiteboard) versus off-task eye-gaze (i.e., looking away from the whiteboard).Results: We found that while children did not always look at distractors themselves for long periods of time, the presence of a distractor disrupted on-task gaze at task-relevant whiteboard stimuli and lowered rates of task performance. This suggests that children with attention deficits may have a hard time returning to tasks once those tasks are interrupted, even if the distractor itself does not hold attention. Eye-tracking measures within the VR context can reveal rich information about attentional disruption.Conclusions: Leveraging virtual reality technology in combination with eye-tracking measures is well-suited to advance the understanding of mechanisms underlying attentional impairment in naturalistic settings. Assessment within these immersive and well-controlled simulated environments provides new options for increasing our understanding of distractibility and its potential impact on the development of interventions for children with ADHD.},
keywords = {MedVR, VR},
pubstate = {published},
tppubtype = {article}
}
Davis, Matt; Nye, Benjamin; Sinatra, Gale; Swartout, William; Sjӧberg, Molly; Porter, Molly; Nelson, David; Kennedy, Alana; Herrick, Imogen; Weeks, Danaan DeNeve; Lindsey, Emily
Designing scientifically-grounded paleoart for augmented reality at La Brea Tar Pits Journal Article
In: Palaeontolgia Electronica, 2022, ISSN: 19353952, 10948074.
Links | BibTeX | Tags: AR, MxR, VR
@article{davis_designing_2022,
title = {Designing scientifically-grounded paleoart for augmented reality at La Brea Tar Pits},
author = {Matt Davis and Benjamin Nye and Gale Sinatra and William Swartout and Molly Sjӧberg and Molly Porter and David Nelson and Alana Kennedy and Imogen Herrick and Danaan DeNeve Weeks and Emily Lindsey},
url = {https://palaeo-electronica.org/content/2022/3524-la-brea-tar-pits-paleoart},
doi = {10.26879/1191},
issn = {19353952, 10948074},
year = {2022},
date = {2022-01-01},
urldate = {2022-09-15},
journal = {Palaeontolgia Electronica},
keywords = {AR, MxR, VR},
pubstate = {published},
tppubtype = {article}
}
2021
Ionescu, Alina; Daele, Tom Van; Rizzo, Albert; Blair, Carolyn; Best, Paul
360° Videos for Immersive Mental Health Interventions: a Systematic Review Journal Article
In: J. technol. behav. sci., vol. 6, no. 4, pp. 631–651, 2021, ISSN: 2366-5963.
Abstract | Links | BibTeX | Tags: MedVR, VR
@article{ionescu_360_2021,
title = {360° Videos for Immersive Mental Health Interventions: a Systematic Review},
author = {Alina Ionescu and Tom Van Daele and Albert Rizzo and Carolyn Blair and Paul Best},
url = {https://doi.org/10.1007/s41347-021-00221-7},
doi = {10.1007/s41347-021-00221-7},
issn = {2366-5963},
year = {2021},
date = {2021-12-01},
urldate = {2022-09-13},
journal = {J. technol. behav. sci.},
volume = {6},
number = {4},
pages = {631–651},
abstract = {Virtual reality is increasingly recognized as a powerful method for clinical interventions in the mental health field, but has yet to achieve mainstream adoption in routine mental healthcare settings. A similar, yet slightly different technology, immersive 360° videos might have the potential to cover this gap, by requiring both lower costs and less technical skills to construct and operate such virtual environments. This systematic review therefore aims to identify, evaluate, and summarize mental health interventions using immersive 360° videos to support an understanding of their implementation in daily clinical practice. The quality of the 14 selected studies was evaluated using a critical appraisal tool, addressing populations with clinical levels of psychopathological symptoms, somatic conditions associated with psychological implications, and other at-risk groups. Immersive 360° videos successfully increased users’ feelings of presence, given their realistic features, and therefore yielded positive outcomes in clinical interventions where presence is considered as an essential precondition. Because the technical skills required to create immersive 360° video footage are fairly limited, most of the interventions using this approach have been created by mental health researchers or clinicians themselves. Immersive 360° videos are still in an early phase of implementation as a tool for clinical interventions for mental health, resulting in high heterogeneity in focus, procedures, and research designs. An important next step for making use of this technology may therefore involve the creation of standardized procedures, as a means to increase the quality of research and evidence-based interventions.},
keywords = {MedVR, VR},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Leeds, Andrew; Kwok, David; Fast, Ed; Rizzo, Albert Skip; Hartholt, Arno
Building BRAVEMIND Vietnam: User-Centered Design for Virtual Reality Exposure Therapy Proceedings Article
In: 2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR), pp. 247–250, 2021.
Abstract | Links | BibTeX | Tags: DTIC, MedVR, VHTL, VR
@inproceedings{mozgai_building_2021,
title = {Building BRAVEMIND Vietnam: User-Centered Design for Virtual Reality Exposure Therapy},
author = {Sharon Mozgai and Andrew Leeds and David Kwok and Ed Fast and Albert Skip Rizzo and Arno Hartholt},
doi = {10.1109/AIVR52153.2021.00056},
year = {2021},
date = {2021-11-01},
booktitle = {2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)},
pages = {247–250},
abstract = {There has been a recent resurgence of Vietnam-era Veterans seeking Veteran Affairs (VA) services for post-traumatic stress disorder (PTSD). Multiple studies have reported positive outcomes using Virtual Reality Exposure Therapy (VRET) with Iraq/Afghanistan Veteran PTSD patients, but there have been fewer investigations into the acceptability and feasibility of VRET with older Veterans. We have extended an established VRET system, BRAVEMIND, to include Vietnam relevant content following a user-centered iterative design methodology. This paper will present the BRAVEMIND Vietnam VRET system, including setup, content, and technical architecture along with the findings of an initial focus group with Vietnam Veterans. These Veterans rated this system to be acceptable as a treatment tool for combat-related PTSD and provided valuable first-person accounts of their time in Vietnam to help guide the evolution of the VR content.},
keywords = {DTIC, MedVR, VHTL, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Adami, Pooya; Rodrigues, Patrick B.; Woods, Peter J.; Becerik-Gerber, Burcin; Soibelman, Lucio; Copur-Gencturk, Yasemin; Lucas, Gale
Effectiveness of VR-based training on improving construction workers’ knowledge, skills, and safety behavior in robotic teleoperation Journal Article
In: Advanced Engineering Informatics, vol. 50, pp. 101431, 2021, ISSN: 14740346.
Links | BibTeX | Tags: DTIC, Learning Sciences, UARC, VR
@article{adami_effectiveness_2021,
title = {Effectiveness of VR-based training on improving construction workers’ knowledge, skills, and safety behavior in robotic teleoperation},
author = {Pooya Adami and Patrick B. Rodrigues and Peter J. Woods and Burcin Becerik-Gerber and Lucio Soibelman and Yasemin Copur-Gencturk and Gale Lucas},
url = {https://linkinghub.elsevier.com/retrieve/pii/S147403462100183X},
doi = {10.1016/j.aei.2021.101431},
issn = {14740346},
year = {2021},
date = {2021-10-01},
urldate = {2022-09-26},
journal = {Advanced Engineering Informatics},
volume = {50},
pages = {101431},
keywords = {DTIC, Learning Sciences, UARC, VR},
pubstate = {published},
tppubtype = {article}
}
Rizzo, Albert “Skip”; Goodwin, Grace J.; Vito, Alyssa N. De; Bell, Joshua D.
Recent advances in virtual reality and psychology: Introduction to the special issue. Journal Article
In: Translational Issues in Psychological Science, vol. 7, no. 3, pp. 213–217, 2021, ISSN: 2332-2179, 2332-2136.
Links | BibTeX | Tags: DTIC, MedVR, VR
@article{rizzo_recent_2021,
title = {Recent advances in virtual reality and psychology: Introduction to the special issue.},
author = {Albert “Skip” Rizzo and Grace J. Goodwin and Alyssa N. De Vito and Joshua D. Bell},
url = {http://doi.apa.org/getdoi.cfm?doi=10.1037/tps0000316},
doi = {10.1037/tps0000316},
issn = {2332-2179, 2332-2136},
year = {2021},
date = {2021-09-01},
urldate = {2022-09-13},
journal = {Translational Issues in Psychological Science},
volume = {7},
number = {3},
pages = {213–217},
keywords = {DTIC, MedVR, VR},
pubstate = {published},
tppubtype = {article}
}
et al A Rizzo,
Normative Data for a Next Generation Virtual Classroom for Attention Assessment in Children with ADHD and Beyond! Proceedings Article
In: Proceedings of the 13th International Conference on Disability, Virtual Reality and Associated Technologies (ICDVRAT 2021), Serpa, Portugal, 2021.
Links | BibTeX | Tags: MedVR, Virtual Humans, VR
@inproceedings{a_rizzo_et_al_normative_2021,
title = {Normative Data for a Next Generation Virtual Classroom for Attention Assessment in Children with ADHD and Beyond!},
author = {et al A Rizzo},
url = {http://studio.hei-lab.ulusofona.pt/archive/},
year = {2021},
date = {2021-09-01},
booktitle = {Proceedings of the 13th International Conference on Disability, Virtual Reality and Associated Technologies (ICDVRAT 2021)},
address = {Serpa, Portugal},
keywords = {MedVR, Virtual Humans, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Awada, Mohamad; Zhu, Runhe; Becerik-Gerber, Burcin; Lucas, Gale; Southers, Erroll
An integrated emotional and physiological assessment for VR-based active shooter incident experiments Journal Article
In: Advanced Engineering Informatics, vol. 47, pp. 101227, 2021, ISSN: 14740346.
Links | BibTeX | Tags: DTIC, VR
@article{awada_integrated_2021,
title = {An integrated emotional and physiological assessment for VR-based active shooter incident experiments},
author = {Mohamad Awada and Runhe Zhu and Burcin Becerik-Gerber and Gale Lucas and Erroll Southers},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1474034620301968},
doi = {10.1016/j.aei.2020.101227},
issn = {14740346},
year = {2021},
date = {2021-01-01},
urldate = {2022-10-24},
journal = {Advanced Engineering Informatics},
volume = {47},
pages = {101227},
keywords = {DTIC, VR},
pubstate = {published},
tppubtype = {article}
}
Bell, Benjamin; Bennett, Winston “Wink”; Kelsey, Elaine; Nye, Benjamin
Attention and Engagement in Virtual Environments: Measuring the Unobservable Proceedings Article
In: 2021.
Links | BibTeX | Tags: AR, DTIC, Machine Learning, UARC, VR
@inproceedings{bell_attention_2021,
title = {Attention and Engagement in Virtual Environments: Measuring the Unobservable},
author = {Benjamin Bell and Winston “Wink” Bennett and Elaine Kelsey and Benjamin Nye},
url = {https://www.xcdsystem.com/iitsec/proceedings/index.cfm?Year=2021&AbID=95758&CID=862#View},
year = {2021},
date = {2021-01-01},
keywords = {AR, DTIC, Machine Learning, UARC, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
0000
Hartholt, Arno; Mozgai, Sharon
From Combat to COVID-19 – Managing the Impact of Trauma Using Virtual Reality Journal Article
In: pp. 35, 0000.
Abstract | BibTeX | Tags: DTIC, MedVR, Virtual Humans, VR
@article{hartholt_combat_nodate,
title = {From Combat to COVID-19 – Managing the Impact of Trauma Using Virtual Reality},
author = {Arno Hartholt and Sharon Mozgai},
pages = {35},
abstract = {Research has documented the efficacy of clinical applications that leverage Virtual Reality (VR) for assessment and treatment purposes across a wide range of domains, including pain, phobias, and posttraumatic stress disorder (PTSD). As the field of Clinical VR matures, it is important to review its origins and examine how these initial explorations have progressed, what gaps remain, and what opportunities the community can pursue. We do this by reflecting on our personal scientific journey against the backdrop of the field in general. In particular, this paper discusses how a clinical research program that was initially designed to deliver trauma-focused VR exposure therapy (VRET) for combat-related PTSD has been evolved to expand its impact and address a wider range of trauma sources. Such trauma sources include sexual trauma and the needs of first responders and healthcare professionals serving on the frontlines of the COVID-19 pandemic. We provide an overview of the field and its general trends, discuss the genesis of our research agenda and its current status, and summarize upcoming opportunities, together with common challenges and lessons learned.},
keywords = {DTIC, MedVR, Virtual Humans, VR},
pubstate = {published},
tppubtype = {article}
}
Mozgai, Sharon; Rizzo, Albert A; Hartholt, Arno
Persuasive Technology for Suicide Prevention: A Virtual Human mHealth Application Proceedings Article
In: 0000.
Abstract | BibTeX | Tags: Virtual Humans, VR
@inproceedings{mozgai_persuasive_nodate,
title = {Persuasive Technology for Suicide Prevention: A Virtual Human mHealth Application},
author = {Sharon Mozgai and Albert A Rizzo and Arno Hartholt},
abstract = {We are demoing Battle Buddy, an mHealth application designed to support access to physical and mental wellness content as well as safety planning for U.S. military veterans. This virtual human interface will collect multimodal data through passive sensors native to popular wearables (e.g., Apple Watch) and deliver adaptive multimedia content specifically tailored to the user in the interdependent domains of physical, cognitive, and emotional health. Battle Buddy can deliver health interventions matched to the individual user via novel adaptive logic-based algorithms while employing various behavior change techniques (e.g., goal-setting, barrier identification, rewards, modeling, etc.). All interactions were specifically designed to engage and motivate by employing the persuasive strategies of (1) personalization, (2) self-monitoring, (3) tunneling, (4) suggestion, and (5) expertise.},
keywords = {Virtual Humans, VR},
pubstate = {published},
tppubtype = {inproceedings}
}