Publications
Search
Azmandian, Mahdi; Yahata, Rhys; Bolas, Mark; Suma, Evan
An Enhanced Steering Algorithm for Redirected Walking in Virtual Environments Proceedings Article
In: IEEE Virtual Reality 2014, pp. 65–66, 2014.
@inproceedings{azmandian_enhanced_2014,
title = {An Enhanced Steering Algorithm for Redirected Walking in Virtual Environments},
author = {Mahdi Azmandian and Rhys Yahata and Mark Bolas and Evan Suma},
url = {http://ict.usc.edu/pubs/An%20Enhanced%20Steering%20Algorithm%20for%20Redirected%20Walking%20in%20Virtual%20Environments.pdf},
year = {2014},
date = {2014-03-01},
booktitle = {IEEE Virtual Reality 2014},
pages = {65–66},
abstract = {Redirected walking techniques enable natural locomotion through immersive virtual environments that are considerably larger than the available real world walking space. However, the most effective strategy for steering the user remains an open question, as most previously presented algorithms simply redirect toward the center of the physical space. In this work, we present a theoretical framework that plans a walking path through a virtual environment and calculates the parameters for combining translation, rotation, and curvature gains such that the user can traverse a series of defined waypoints efficiently based on a utility function. This function minimizes the number of overt reorientations to avoid introducing potential breaks in presence. A notable advantage of this approach is that it leverages knowledge of the layout of both the physical and virtual environments to enhance the steering strategy.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jones, Andrew; Nagano, Koki; Liu, Jing; Busch, Jay; Yu, Xueming; Bolas, Mark; Debevec, Paul
Interpolating vertical parallax for an autostereoscopic three-dimensional projector array Journal Article
In: Journal of Electronic Imaging, vol. 23, no. 1, 2014, ISSN: 1017-9909.
@article{jones_interpolating_2014,
title = {Interpolating vertical parallax for an autostereoscopic three-dimensional projector array},
author = {Andrew Jones and Koki Nagano and Jing Liu and Jay Busch and Xueming Yu and Mark Bolas and Paul Debevec},
url = {http://electronicimaging.spiedigitallibrary.org/article.aspx?doi=10.1117/1.JEI.23.1.011005},
doi = {10.1117/1.JEI.23.1.011005},
issn = {1017-9909},
year = {2014},
date = {2014-03-01},
journal = {Journal of Electronic Imaging},
volume = {23},
number = {1},
abstract = {We present a technique for achieving tracked vertical parallax for multiple users using a variety of autostereoscopic projector array setups, including front- and rear-projection and curved display surfaces. This hybrid parallax approach allows for immediate horizontal parallax as viewers move left and right and tracked parallax as they move up and down, allowing cues such as three-dimensional (3-D) perspective and eye contact to be conveyed faithfully. We use a low-cost RGB-depth sensor to simultaneously track multiple viewer head positions in 3-D space, and we interactively update the imagery sent to the array so that imagery directed to each viewer appears from a consistent and correct vertical perspective. Unlike previous work, we do not assume that the imagery sent to each projector in the array is rendered from a single vertical perspective. This lets us apply hybrid parallax to displays where a single projector forms parts of multiple viewers’ imagery. Thus, each individual projected image is rendered with multiple centers of projection, and might show an object from above on the left and from below on the right. We demonstrate this technique using a dense horizontal array of pico-projectors aimed into an anisotropic vertical diffusion screen, yielding 1.5 deg angular resolution over 110 deg field of view. To create a seamless viewing experience for multiple viewers, we smoothly interpolate the set of viewer heights and distances on a per-vertex basis across the array’s field of view, reducing image distortion, cross talk, and artifacts from tracking errors.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Thomas, Jerald; Bashyal, Raghav; Goldstein, Samantha; Suma, Evan
MuVR: A Multi-user Virtual Reality Platform Proceedings Article
In: IEEE Virtual Reality 2014, pp. 115–116, IEEE, Minneapolis, Minnesota, 2014.
@inproceedings{thomas_muvr_2014,
title = {MuVR: A Multi-user Virtual Reality Platform},
author = {Jerald Thomas and Raghav Bashyal and Samantha Goldstein and Evan Suma},
url = {http://ict.usc.edu/pubs/MuVR%20-%20A%20Multi-user%20Virtual%20Reality%20Platform.pdf},
year = {2014},
date = {2014-03-01},
booktitle = {IEEE Virtual Reality 2014},
pages = {115–116},
publisher = {IEEE},
address = {Minneapolis, Minnesota},
abstract = {Consumer adoption of virtual reality technology has historically been held back by poor accessibility, the lack of intuitive multi-user capabilities, dependence on external infrastructure for rendering and tracking, and the amount of time and effort required to enter virtual reality systems. This poster presents the current status of our work creating MuVR, a Multi-User Virtual Reality platform that seeks to overcome these hindrances. The MuVR project comprises four main goals: scalable and easy to use multi-user capabilities, portable and self-contained hardware, a rapidly deployable system, and ready accessibility to others. We provide a description of the platform we developed to address these goals and discuss potential directions for future work.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Shapiro, Ari; Feng, Andrew; Wang, Ruizhe; Medioni, Gerard; Bolas, Mark; Suma, Evan A.
Automatic Acquisition and Animation of Virtual Avatars Proceedings Article
In: Virtual Reality (VR), 2014 iEEE, pp. 185–186, IEEE, Minneapolis, Minnesota, 2014, ISBN: 978-1-4799-2871-2.
@inproceedings{shapiro_automatic_2014,
title = {Automatic Acquisition and Animation of Virtual Avatars},
author = {Ari Shapiro and Andrew Feng and Ruizhe Wang and Gerard Medioni and Mark Bolas and Evan A. Suma},
url = {http://ict.usc.edu/pubs/Automatic%20acquisition%20and%20animation%20of%20virtual%20avatars.pdf},
doi = {10.1109/VR.2014.6802113},
isbn = {978-1-4799-2871-2},
year = {2014},
date = {2014-03-01},
booktitle = {Virtual Reality (VR), 2014 iEEE},
pages = {185–186},
publisher = {IEEE},
address = {Minneapolis, Minnesota},
abstract = {The USC Institute for Creative Technologies will demonstrate a pipline for automatic reconstruction and animation of lifelike 3D avatars acquired by rotating the user's body in front of a single Microsoft Kinect sensor. Based on a fusion of state-of-the-art techniques in computer vision, graphics, and animation, this approach can produce a fully rigged character model suitable for real-time virtual environments in less than four minutes.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Gratch, Jonathan; Kang, Sin-Hwa; Wang, Ning
Using Social Agents to Explore Theories of Rapport and Emotional Resonance Book Section
In: Social Emotions in Nature and Artifact, pp. 181 –195, 2014.
@incollection{gratch_using_2014,
title = {Using Social Agents to Explore Theories of Rapport and Emotional Resonance},
author = {Jonathan Gratch and Sin-Hwa Kang and Ning Wang},
url = {http://www.oxfordscholarship.com/view/10.1093/acprof:oso/9780195387643.001.0001/acprof-9780195387643-chapter-12},
year = {2014},
date = {2014-01-01},
booktitle = {Social Emotions in Nature and Artifact},
pages = {181 –195},
abstract = {We discuss several technical challenges must be overcome before realizing this vision. More importantly, success depends not on simply overcoming these challenges, but demonstrating that such interactivity has measurable and desirable consequences for human-computer interaction. In this chapter, we describe the Rapport Agent, an interactive agent and methodological tool designed to Emotions unfold in with bewildering complexity in face-to-face social interactions. Building computer programs that can engage people in this unfolding emotional dance is a fascinating prospect with potentially profound practical and scientific consequences. Computer agents that engage people in this manner could enhance our understanding of this fundamental social process and, more practically, have dramatic implications investigate the role of nonverbal patterning in human-computer and computer-mediated interaction. We outline a series of laboratory studies and resulting findings that give insight into how nonverbal patterns of behavior can influence both subjective perceptions (such as feelings of rapport or embarrassment) and ehavioural outcomes (such as speech fluency or intimate self-disclosure).},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Huang, Yu-Jen; Bolas, Mark; Suma, Evan
Fusing Depth, Color, and Skeleton Data for Enhanced Real-Time Hand Segmentation Proceedings Article
In: ACM Symposium on Spatial User Interaction, 2013.
@inproceedings{huang_fusing_2013,
title = {Fusing Depth, Color, and Skeleton Data for Enhanced Real-Time Hand Segmentation},
author = {Yu-Jen Huang and Mark Bolas and Evan Suma},
url = {http://ict.usc.edu/pubs/Fusing%20Depth,%20Color,%20and%20Skeleton%20Data%20for%20Enhanced%20Real-Time%20Hand%20Segmentation.pdf},
year = {2013},
date = {2013-07-01},
booktitle = {ACM Symposium on Spatial User Interaction},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Suma, Evan; Krum, David M.; Lange, Belinda; Koenig, Sebastian; Rizzo, Albert; Bolas, Mark
Adapting user interfaces for gestural interaction with the flexible action and articulated skeleton toolkit Journal Article
In: Computers & Graphics, vol. 37, no. 3, pp. 193–201, 2013.
@article{suma_adapting_2013,
title = {Adapting user interfaces for gestural interaction with the flexible action and articulated skeleton toolkit},
author = {Evan Suma and David M. Krum and Belinda Lange and Sebastian Koenig and Albert Rizzo and Mark Bolas},
url = {http://ict.usc.edu/pubs/Adapting%20user%20interfaces%20for%20gestural%20interaction%20with%20the%20%EF%AC%82exible%20action%20and%20articulated%20skeleton%20toolkit.pdf},
year = {2013},
date = {2013-05-01},
journal = {Computers & Graphics},
volume = {37},
number = {3},
pages = {193–201},
abstract = {We present the Flexible Action and Articulated Skeleton Toolkit (FAAST), a middleware software framework for integrating full-body interaction with virtual environments, video games, and other user interfaces. This toolkit provides a complete end-to-end solution that includes a graphical user interface for custom gesture creation, sensor configuration, skeletal tracking, action recognition, and a variety of output mechanisms to control third party applications, allowing virtually any PC application to be repurposed for gestural control even if it does not explicit support input from motion sensors. To facilitate intuitive and transparent gesture design, we define a syntax for representing human gestures using rule sets that correspond to the basic spatial and temporal components of an action. These individual rules form primitives that, although conceptually simple on their own, can be combined both simultaneously and in sequence to form sophisticated gestural interactions. In addition to presenting the system architecture and our approach for representing and designing gestural interactions, we also describe two case studies that evaluated the use of FAAST for controlling first-person video games and improving the accessibility of computing interfaces for individuals with motor impairments. Thus, this work represents an important step toward making gestural interaction more accessible for practitioners, researchers, and hobbyists alike.⬚},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Suma, Evan; Krum, David M.; Bolas, Mark
Redirected Walking in Mixed Reality Training Applications Book Section
In: Human Walking in Virtual Environments: Perception, Technology, and Applications, Springer, 2013, ISBN: 1-4419-8431-3.
@incollection{suma_redirected_2013,
title = {Redirected Walking in Mixed Reality Training Applications},
author = {Evan Suma and David M. Krum and Mark Bolas},
url = {http://www.amazon.com/Human-Walking-Virtual-Environments-Applications/dp/1441984313/ref=sr_1_1},
isbn = {1-4419-8431-3},
year = {2013},
date = {2013-05-01},
booktitle = {Human Walking in Virtual Environments: Perception, Technology, and Applications},
publisher = {Springer},
edition = {2013},
abstract = {To create effective immersive training experiences, it is important to provide intuitive interfaces that allow users to move around and interact with virtual content in a manner that replicates real world experiences. However, natural loco- motion remains an implementation challenge because the dimensions of the phys- ical tracking space restrict the size of the virtual environment that users can walk through. To relax these limitations, redirected walking techniques may be employed to enable walking through immersive virtual environments that are substantially larger than the physical tracking area. In this chapter, we present practical design considerations for employing redirected walking in immersive training applications and recent research evaluating the impact on spatial orientation. Additionally, we also describe an alternative implementation of redirection that is more appropriate for mixed reality environments. Finally, we discuss challenges and future directions for research in redirected walking with the goal of transitioning these techniques into practical training simulators.},
keywords = {},
pubstate = {published},
tppubtype = {incollection}
}
Jones, J. Adam; II, J. Edward Swan; Bolas, Mark
Peripheral Stimulation and its Effect on Perceived Spatial Scale in Virtual Environments Journal Article
In: IEEE Transactions on Visualization and Computer Graphics, vol. 19, no. 4, pp. 701–710, 2013.
@article{jones_peripheral_2013,
title = {Peripheral Stimulation and its Effect on Perceived Spatial Scale in Virtual Environments},
author = {J. Adam Jones and J. Edward Swan II and Mark Bolas},
url = {http://ict.usc.edu/pubs/Peripheral%20Stimulation%20and%20its%20Effect%20on%20Perceived%20Spatial%20Scale%20in%20Virtual%20Environments.pdf},
doi = {10.1109/TVCG.2013.37},
year = {2013},
date = {2013-04-01},
journal = {IEEE Transactions on Visualization and Computer Graphics},
volume = {19},
number = {4},
pages = {701–710},
abstract = {The following series of experiments explore the effect of static peripheral stimulation on the perception of distance and spatial scale in a typical head-mounted virtual environment. It was found that applying constant white light in an observer’s far periphery enabled the observer to more accurately judge distances using blind walking. An effect of similar magnitude was also found when observers estimated the size of a virtual space using a visual scale task. The presence of the effect across multiple psychophysical tasks provided confidence that a perceptual change was, in fact, being invoked by the addition of the peripheral stimulation. These results were also compared to observer performance in a very large field of view virtual environment and in the real world. The subsequent findings raise the possibility that distance judgments in virtual environments might be considerably more similar to those in the real world than previous work has suggested.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Finklestein, Samantha; Barnes, Tiffany; Wartell, Zachary; Suma, Evan
Evaluation of the Exertion and Motivation Factors of a Virtual Reality Exercise Game for Children with Autism Proceedings Article
In: Workshop on Virtual and Augmented Assistive Technology, Orlando, FL, 2013.
@inproceedings{finklestein_evaluation_2013,
title = {Evaluation of the Exertion and Motivation Factors of a Virtual Reality Exercise Game for Children with Autism},
author = {Samantha Finklestein and Tiffany Barnes and Zachary Wartell and Evan Suma},
url = {http://ict.usc.edu/pubs/Evaluation%20of%20the%20Exertion%20and%20Motivation%20Factors%20of%20a%20Virtual%20Reality%20Exercise%20Game%20for%20Children%20with%20Autism.pdf},
year = {2013},
date = {2013-03-01},
booktitle = {Workshop on Virtual and Augmented Assistive Technology},
address = {Orlando, FL},
abstract = {Children with autism experience significant positive behavioral and health benefits from exercise, though many of these children tend to lead sedentary lifestyles. Video games that incorporate physical activity, known as exergames, may help to motivate such children to engage in vigorous exercise, thus leading to more healthy lifestyles and reducing the likelihood of obesity. In this paper, we present a study of physical activity and motivation level for ten children with autism as they played an immersive virtual reality exergame that involved fast-paced full-body movement. Our results showed that most children, including non-verbal participants, were able to achieve vigorous activity levels, with several of them maintaining very high levels of exertion. Furthermore, the children reported high levels of enjoyment and indicated they would exercise more often if such games were routinely available. These encouraging findings suggest that exergames are a promising way to empower the families of children with autism with tools to help improve their child’s health and quality of life.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Vasylevska, Khrystyna; Kaufmann, Hannes; Bolas, Mark; Suma, Evan
Flexible Spaces: Dynamic Layout Generation for Infinite Walking in Virtual Environments Proceedings Article
In: IEEE Symposium on 3D User Interfaces, Orlando, FL, 2013.
@inproceedings{vasylevska_flexible_2013,
title = {Flexible Spaces: Dynamic Layout Generation for Infinite Walking in Virtual Environments},
author = {Khrystyna Vasylevska and Hannes Kaufmann and Mark Bolas and Evan Suma},
url = {http://ict.usc.edu/pubs/Flexible%20Spaces-%20Dynamic%20Layout%20Generation%20for%20Infinite%20Walking%20in%20Virtual%20Environments.pdf},
year = {2013},
date = {2013-03-01},
booktitle = {IEEE Symposium on 3D User Interfaces},
address = {Orlando, FL},
abstract = {Redirected walking techniques enable natural locomotion through immersive virtual environments (VEs) that are larger than the real world workspace. Most existing techniques rely upon manipulating the mapping between physical and virtual motions while the layout of the environment remains constant. However, if the primary focus of the experience is on the virtual world’s content, rather than on its spatial layout, then the goal of redirected walking can be achieved through an entirely different strategy. In this paper, we introduce flexible spaces – a novel redirection technique that enables infinite real walking in virtual environments that do not require replication of real world layouts. Flexible spaces overcome the limitations and generalize the use of overlapping (impossible) spaces and change blindness by employing procedural layout generation. Our approach allows VE designers to focus on the content of the virtual world independent of the implementation details imposed by real walking, thereby making spatial manipulation techniques more practical for use in a variety of application domains.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Vasylevska, Khrystyna; Kaufmann, Hannes; Bolas, Mark; Suma, Evan
Flexible Spaces: A Virtual Step Outside of Reality Proceedings Article
In: IEEE Virtual Reality, Orlando, FL, 2013.
@inproceedings{vasylevska_flexible_2013-1,
title = {Flexible Spaces: A Virtual Step Outside of Reality},
author = {Khrystyna Vasylevska and Hannes Kaufmann and Mark Bolas and Evan Suma},
url = {http://ict.usc.edu/pubs/Flexible%20Spaces-%20A%20Virtual%20Step%20Outside%20of%20Reality.pdf},
year = {2013},
date = {2013-03-01},
booktitle = {IEEE Virtual Reality},
address = {Orlando, FL},
abstract = {In this paper we introduce the concept of flexible spaces – a novel redirection technique that generalizes the use of overlapping (impossible) spaces and change blindness in an algorithm for dynamic layout generation. Flexible spaces is an impossible environment that violates the real world constancy in favor of providing the experience of seamless, unrestricted natural walking over a large-scale virtual environment (VE).},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Krum, David M.; Suma, Evan; Bolas, Mark
Spatial Misregistration of Virtual Human Audio: Implications of the Precedence Effect Proceedings Article
In: The 12th International Conference on Intelligent Virtual Agents (IVA), Santa Cruz, CA, 2012.
@inproceedings{krum_spatial_2012-1,
title = {Spatial Misregistration of Virtual Human Audio: Implications of the Precedence Effect},
author = {David M. Krum and Evan Suma and Mark Bolas},
url = {http://ict.usc.edu/pubs/Spatial%20Misregistration%20of%20Virtual%20Human%20Audio-%20Implications%20of%20the%20Precedence%20Effect.pdf},
year = {2012},
date = {2012-09-01},
booktitle = {The 12th International Conference on Intelligent Virtual Agents (IVA)},
address = {Santa Cruz, CA},
abstract = {Virtual humans are often presented as mixed reality characters projected onto screens that are blended into a physical setting. Stereo loudspeakers to the left and right of the screen are typically used for virtual human audio. Unfortunately, stereo pairs can produce an effect known as precedence, which causes users standing close to a particular loudspeaker to perceive a collapse of the stereo sound to that singular loudspeaker. We studied if this effect might degrade the presentation of a virtual character, or if this would be prevented by the ventriloquism effect. Our results demonstrate that from viewing distances common to virtual human scenarios, a movement equivalent to a single stride can induce a stereo collapse, creating conflicting perceived locations of the virtual human’s voice. Users also expressed a preference for a sound source collocated with the virtual human’s mouth rather than a stereo pair. These results provide several design implications for virtual human display systems.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Krum, David M.; Suma, Evan; Bolas, Mark
Spatial Misregistration of Virtual Human Audio: Implications of the Precedence Effect Proceedings Article
In: The 12th International Conference on Intelligent Virtual Agents (IVA), Santa Cruz, CA, 2012.
@inproceedings{krum_spatial_2012,
title = {Spatial Misregistration of Virtual Human Audio: Implications of the Precedence Effect},
author = {David M. Krum and Evan Suma and Mark Bolas},
url = {http://ict.usc.edu/pubs/Spatial%20Misregistration%20of%20Virtual%20Human%20Audio-%20Implications%20of%20the%20Precedence%20Effect.pdf},
year = {2012},
date = {2012-09-01},
booktitle = {The 12th International Conference on Intelligent Virtual Agents (IVA)},
address = {Santa Cruz, CA},
abstract = {Virtual humans are often presented as mixed reality characters projected onto screens that are blended into a physical setting. Stereo loudspeakers to the left and right of the screen are typically used for virtual human audio. Unfortunately, stereo pairs can produce an effect known as precedence, which causes users standing close to a particular loudspeaker to perceive a collapse of the stereo sound to that singular loudspeaker. We studied if this effect might degrade the presentation of a virtual character, or if this would be prevented by the ventriloquism effect. Our results demonstrate that from viewing distances common to virtual human scenarios, a movement equivalent to a single stride can induce a stereo collapse, creating conflicting perceived locations of the virtual human’s voice. Users also expressed a preference for a sound source collocated with the virtual human’s mouth rather than a stereo pair. These results provide several design implications for virtual human display systems.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Debevec, Paul; Graham, Paul; Busch, Jay; Bolas, Mark
A Cell Phone Based Platform for Facial Performance Capture Proceedings Article
In: International Conference and Exhibition on Computer Graphics and Interactive Techniques (SIGGRAPH), Los Angeles, CA, 2012.
@inproceedings{debevec_cell_2012,
title = {A Cell Phone Based Platform for Facial Performance Capture},
author = {Paul Debevec and Paul Graham and Jay Busch and Mark Bolas},
url = {http://ict.usc.edu/pubs/A%20Cell%20Phone%20Based%20Platform%20for%20Facial%20Performance%20Capture.pdf},
year = {2012},
date = {2012-08-01},
booktitle = {International Conference and Exhibition on Computer Graphics and Interactive Techniques (SIGGRAPH)},
address = {Los Angeles, CA},
abstract = {We demonstrate a novel light probe which can estimate the full dynamic range of a scene with multiple bright light sources. It places diffuse strips between mirrored spherical quadrants, effectively co-locating diffuse and mirrored probes to record the full dynamic range of illumination in a single exposure. From this image, we estimate the intensity of multiple saturated light sources by solving a linear system.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jones, J. Adam; II, J. Edward Swan; Singh, Gurjot; Reddy, Sujan; Moser, Kenneth; Hua, Chunya; Ellis, Stephen R.
Improvements in Visually Directed Walking in Virtual Environments Cannot be Explained by Changes in Gait Alone Proceedings Article
In: ACM Symposium in Applied Perception (SAP), pp. 11–16, Los Angeles, CA, 2012.
@inproceedings{jones_improvements_2012,
title = {Improvements in Visually Directed Walking in Virtual Environments Cannot be Explained by Changes in Gait Alone},
author = {J. Adam Jones and J. Edward Swan II and Gurjot Singh and Sujan Reddy and Kenneth Moser and Chunya Hua and Stephen R. Ellis},
url = {http://ict.usc.edu/pubs/Improvements%20in%20Visually%20Directed%20Walking%20in%20Virtual%20Environments%20Cannot%20be%20Explained%20by%20Changes%20in%20Gait%20Alone.pdf},
year = {2012},
date = {2012-08-01},
booktitle = {ACM Symposium in Applied Perception (SAP)},
pages = {11–16},
address = {Los Angeles, CA},
abstract = {A previous study indicated that peripheral visual information strongly affects the judgment of egocentric distances for users of immersive virtual environments. The experiment described in this document aimed to investigate if these effects could be explained in terms of changes in gait caused by visual information in the extreme periphery. Three conditions with varying degrees of peripheral occlusion were tested and participants’ walking characteristics measured. The results indicate that the improvements in distance judgments, as peripheral information increases, can only partially be explained in terms of gait modification, but likely involve both changes in the characteristics of gait and other spatial or movement parameters.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Graham, Paul; Busch, Jay; Bolas, Mark; Debevec, Paul
A Single-Shot Light Probe Proceedings Article
In: International Conference and Exhibition on Computer Graphics and Interactive Techniques (SIGGRAPH), Los Angeles, CA, 2012.
@inproceedings{graham_single-shot_2012,
title = {A Single-Shot Light Probe},
author = {Paul Graham and Jay Busch and Mark Bolas and Paul Debevec},
url = {http://ict.usc.edu/pubs/A%20Single-Shot%20Light%20Probe.pdf},
year = {2012},
date = {2012-08-01},
booktitle = {International Conference and Exhibition on Computer Graphics and Interactive Techniques (SIGGRAPH)},
address = {Los Angeles, CA},
abstract = {We demonstrate a novel light probe which can estimate the full dynamic range of a scene with multiple bright light sources. It places diffuse strips between mirrored spherical quadrants, effectively co-locating diffuse and mirrored probes to record the full dynamic range of illumination in a single exposure. From this image, we estimate the intensity of multiple saturated light sources by solving a linear system.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Jones, J. Adam; Suma, Evan; Krum, David M.; Bolas, Mark
Comparability of Narrow and Wide Field-Of-View Head-Mounted Displays for Medium-Field Distance Judgments Proceedings Article
In: ACM Symposium in Applied Perception (SAP), 2012.
@inproceedings{jones_comparability_2012,
title = {Comparability of Narrow and Wide Field-Of-View Head-Mounted Displays for Medium-Field Distance Judgments},
author = {J. Adam Jones and Evan Suma and David M. Krum and Mark Bolas},
url = {http://ict.usc.edu/pubs/Comparability%20of%20Narrow%20and%20Wide%20Field-Of-View%20Head-Mounted%20Displays%20for%20Medium-Field%20Distance%20Judgments.pdf},
year = {2012},
date = {2012-08-01},
booktitle = {ACM Symposium in Applied Perception (SAP)},
abstract = {As wider field-of-view displays become more common, the question arises as to whether or not data collected on these displays are comparable to those collected with smaller field-of-view displays. This document describes a pilot study that aimed to address these concerns by comparing medium-field distance judgments in a 60° FOV display, a 150° FOV display, and a simulated 60° FOV within the 150° FOV display. The results indicate that participants performed similarly in both the actual and simulated 60° FOV displays. On average, participants in the 150° FOV display improved distance judgments by 13% over the 60° FOV displays},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Suma, Evan; Lange, Belinda; Rizzo, Albert; Krum, David M.; Bolas, Mark
FAAST-R: Defining a Core Mechanic for Designing Gestural Interfaces Proceedings Article
In: The Dimensions of CHI: Touching and Designing 3D User Interfaces (3DCHI), Austin, TX, 2012.
@inproceedings{suma_faast-r_2012,
title = {FAAST-R: Defining a Core Mechanic for Designing Gestural Interfaces},
author = {Evan Suma and Belinda Lange and Albert Rizzo and David M. Krum and Mark Bolas},
url = {http://ict.usc.edu/pubs/FAAST-R-%20Defining%20a%20Core%20Mechanic%20for%20Designing%20Gestural%20Interfaces.pdf},
year = {2012},
date = {2012-05-01},
booktitle = {The Dimensions of CHI: Touching and Designing 3D User Interfaces (3DCHI)},
address = {Austin, TX},
abstract = {We present a syntax for representing human gestures using rule sets that correspond to the basic spatial and temporal components of an action. These individual rules form primitives that, although conceptually simple on their own, can be combined both simultaneously and in sequence to form sophisticated gestural interactions. Along with a graphical user interface for custom gesture creation, this approach was incorporated into the Flexible Action and Articulated Skeleton Toolkit as a recognition module (FAAST-R). This toolkit can either be used to facilitate the development of motion-based user interfaces or to repurpose existing closed-source applications and games by mapping body motions to keyboard and mouse events. Thus, this work represents an important step towards making gestural interaction more accessible for practitioners, researchers, and hobbyists alike.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lange, Belinda; Koenig, Sebastian; Chang, Chien-Yen; McConnel, Eric; Suma, Evan; Bolas, Mark; Rizzo, Albert
Designing informed game-bases rehabilitation tasks leveraging advances in virtual reality Journal Article
In: Disability and Rehabilitation, 2012.
@article{lange_designing_2012,
title = {Designing informed game-bases rehabilitation tasks leveraging advances in virtual reality},
author = {Belinda Lange and Sebastian Koenig and Chien-Yen Chang and Eric McConnel and Evan Suma and Mark Bolas and Albert Rizzo},
url = {http://ict.usc.edu/pubs/Designing%20informed%20game-bases%20rehabilitation%20tasks%20leveraging%20advances%20in%20virtual%20reality.pdf},
year = {2012},
date = {2012-04-01},
journal = {Disability and Rehabilitation},
abstract = {This paper details a brief history and rationale for the use of virtual reality (VR) technology for clinical research and intervention, and then focuses on game-based VR applications in the area of rehabilitation. An analysis of the match between rehabilitation task requirements and the assets available with VR technology is presented. Key messages and implications: Low-cost camera-based systems capable of tracking user behavior at sufficient levels for game-based virtual rehabilitation activities are currently available for in- home use. Authoring software is now being developed that aims to provide clinicians with a usable toolkit for leveraging this technology. This will facilitate informed professional input on software design, development and application to ensure safe and effective use in the rehabilitation context. Conclusion: The field of rehabilitation generally stands to benefit from the continual advances in VR technology, concomitant system cost reductions and an expanding clinical research literature and knowledge base. Home-based activity within VR systems that are low-cost, easy to deploy and maintain, and meet the requirements for “good” interactive rehabilitation tasks could radically improve users’ access to care, adherence to prescribed training and subsequently enhance functional activity in everyday life in clinical populations.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Filter
Sorry, no publications matched your criteria.