Publications


Demonstrations/ Posters
Rompapas, Damien Constantine; Campbell, James; Barnes, Eleanor; Fraser, Jack Douglas; Twynham, Bradley; Pham, Xuan Tien; Hien, Nguyen Thu; Lugtenberg, Geert; Yoshinari, Nishiki; Akkad, Sarah Al; Taylor, Andrew Gavin; Billinghurst, Mark
RockemBot Boxing: Facilitating Long-Distance Real-Time Collaborative Interactions with Limited Hand Tracking Volumes Presentation
30.11.2019.
@misc{Rompapas2018b,
title = {RockemBot Boxing: Facilitating Long-Distance Real-Time Collaborative Interactions with Limited Hand Tracking Volumes},
author = {Damien Constantine Rompapas and James Campbell and Eleanor Barnes and Jack Douglas Fraser and Bradley Twynham and Xuan Tien Pham and Nguyen Thu Hien and Geert Lugtenberg and Nishiki Yoshinari and Sarah Al Akkad and Andrew Gavin Taylor and Mark Billinghurst},
url = {https://beer-labs.net/wp-content/uploads/2021/12/RockemBot_Boxing__ISMAR_Submission.pdf},
year = {2020},
date = {2020-00-00},
urldate = {2020-00-00},
abstract = {This demonstration showcases a boxing game that facilitates interac-tions between two users over a larger-than-arms reach distance. In RockemBot boxing, users stand two meters apart, and use virtual fists as a means of knocking the opposing player’s virtual head in an intense matchup. By first re-mapping the user’s hand tracked input to a virtual model, and representing the user’s in the collaborative space as a semi-attached avatar, we allow real-time high fidelity interactions.
Keywords: Augmented Reality, Human Computer Interaction, Games and Entertainment Design},
keywords = {},
pubstate = {published},
tppubtype = {presentation}
}
Keywords: Augmented Reality, Human Computer Interaction, Games and Entertainment Design
Reddy, G S Rajshekar; Rompapas, Damien Constantine
VisuoTouch: Enabling Haptic Feedback in Augmented Reality through Visual Cues Presentation
30.11.2019.
@misc{Rompapas2018e,
title = {VisuoTouch: Enabling Haptic Feedback in Augmented Reality through Visual Cues},
author = {G S Rajshekar Reddy and Damien Constantine Rompapas},
url = {https://beer-labs.net/wp-content/uploads/2021/12/VisuoTouch__Enabling_Haptic_Feedback_in_Augmented_Reality_through_Visual_Cues.pdf},
year = {2020},
date = {2020-00-00},
urldate = {2020-00-00},
abstract = {The rapid advancements in Augmented Reality (AR) have recently included hand tracking frameworks that allow a system to understand the placement of a user’s hand in virtual space, allowing for hand interactions with AR content. However, the lack of haptic feedback leaves the user confounded on whether or not their hand has successfully collided with the virtual content. Furthermore, in poketo-select interactions, the user is unaware that they have triggered the selection process. In this demo, we showcase VisuoTouch, a system that enables the semblance of haptic feedback by providing visual cues. The cue illuminates the spot where the finger collides with the object. If the user continues to push through, a virtual finger is visualised as bending against the object, following real-world physics. We hope that by demonstrating this interesting approach, we can facilitate further exploration in the effectiveness and usefulness of these visual-haptic cues.},
keywords = {},
pubstate = {published},
tppubtype = {presentation}
}
Conferences/Short Papers
Rompapas, Damien Constantine; Campbell, James; Ta, Vincent; Cassinelli, Alvaro
Project Ariel: An Open Source Augmented Reality Headset for Industrial Applications Conference
2021.
@conference{Rompapas2018d,
title = {Project Ariel: An Open Source Augmented Reality Headset for Industrial Applications},
author = {Damien Constantine Rompapas and James Campbell and Vincent Ta and Alvaro Cassinelli},
url = {https://dl.acm.org/doi/abs/10.1145/3460418.3479359https://beer-labs.net/wp-content/uploads/2021/12/Project_Ariel__An_Open_Source_Augmented_Reality_Headset_for_Industrial_Applications.pdf},
year = {2021},
date = {2021-09-21},
urldate = {2021-09-21},
abstract = {Some of the biggest challenges in applying Augmented Reality (AR) technologies to the industry floor are in the form factor, and safety requirements of the head worn display. This includes alleviating issues such as peripheral view occlusion, and adaptation to personal protective equipment. In this work we present the design of Project Ariel, an Open Source 3D printable display specifically designed for use in industrial environments. It is our hope that with this technology, the average tradesman can utilize the powerful visualizations AR has to offer, significantly improving their daily work flow.
KEYWORDS
Augmented Reality; Headset Design; Optical See-Through; Open Source;},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
KEYWORDS
Augmented Reality; Headset Design; Optical See-Through; Open Source;
Rompapas, Damien Constantine; Quiros, Daniel Flores; Rodda, Charlton; Brown, Bryan Christopher; Zerkin, Noah Benjamin; Cassinelli, Alvaro
Project Esky: an Open Source Software Framework for High Fidelity Extended Reality Conference
2021.
@conference{Rompapas2018c,
title = {Project Esky: an Open Source Software Framework for High Fidelity Extended Reality},
author = {Damien Constantine Rompapas and Daniel Flores Quiros and Charlton Rodda and Bryan Christopher Brown and Noah Benjamin Zerkin and Alvaro Cassinelli},
url = {https://beer-labs.net/wp-content/uploads/2021/12/Esky__ISMAR_Submission.pdf},
year = {2021},
date = {2021-05-08},
urldate = {2021-05-08},
abstract = {This demonstration showcases a complete Open-Source Augmented Reality (AR) modular platform capable of high fidelity natural handinteractions with virtual content, high field of view, and spatial mapping for environment interactions. We do this via several live desktop demonstrations. Finally, included in this demonstration is a completed open source schematic, allowing anyone interested in utilizing our proposed platform to engage with high fidelity AR. It is our hope that the work described in this demo will be a stepping stone towards bringing high-fidelity AR content to researchers and commodity users alike.
Keywords: Augmented Reality, High Fidelity, Collaborative Augmented Reality, Open Source Platforms},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Keywords: Augmented Reality, High Fidelity, Collaborative Augmented Reality, Open Source Platforms
Rompapas, Damien; Sandor, Christian; Plopski, Alexander; Daniel Saakes, Dong Hyeok Yun; Taketomi, Takafumi; Kato, Hirokazu
Holoroyale: A Large Scale High Fidelity Augmented Reality Game Conference
2018, ISBN: 978-1-4503-5949-8/18/10.
@conference{Rompapas2018,
title = {Holoroyale: A Large Scale High Fidelity Augmented Reality Game},
author = {Damien Rompapas and Christian Sandor and Alexander Plopski and Daniel Saakes, Dong Hyeok Yun and Takafumi Taketomi and Hirokazu Kato},
url = {https://beer-labs.net/wp-content/uploads/2021/12/HoloRoyale___UIST.pdf},
doi = {10.1145/3266037.3271637},
isbn = {978-1-4503-5949-8/18/10},
year = {2018},
date = {2018-10-11},
urldate = {2018-10-11},
abstract = {INTRODUCTION
Recent years saw an explosion in Augmented Reality (AR) experiences for consumers. These experiences can be classified based on the scale of the interactive area (room vs city/global scale) , or the fidelity of the experience (high vs low) [4]. Experiences that target large areas, such as campus or world scale [7, 6], commonly have only rudimentary interactions with the physical world, and suffer from registration errors and jitter. We classify these experiences as large scale and low fidelity. On the other hand, various room sized experiences [5, 8] feature realistic interaction of virtual content with the real world. We classify these experiences as small scale and high fidelity.
Our work is the first to explore the domain of large scale high fidelity (LSHF) AR experiences. We build upon the small scale high fidelity capabilities of the Microsoft HoloLens to allow LSHF interactions. We demonstrate the capabilities of our system with a game specifically designed for LSHF
interactions, handling many challenges and limitations unique to the domain of LSHF AR through the game design.
Our contributions are twofold:
The lessons learned during the design and development of a system capable of LSHF AR interactions.
Identification of a set of reusable game elements specific to LSHF AR, including mechanisms for addressing spatio-temporal inconsistencies and crowd control. We believe our contributions will be fully applicable not only to games, but all LSHF AR experiences.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Recent years saw an explosion in Augmented Reality (AR) experiences for consumers. These experiences can be classified based on the scale of the interactive area (room vs city/global scale) , or the fidelity of the experience (high vs low) [4]. Experiences that target large areas, such as campus or world scale [7, 6], commonly have only rudimentary interactions with the physical world, and suffer from registration errors and jitter. We classify these experiences as large scale and low fidelity. On the other hand, various room sized experiences [5, 8] feature realistic interaction of virtual content with the real world. We classify these experiences as small scale and high fidelity.
Our work is the first to explore the domain of large scale high fidelity (LSHF) AR experiences. We build upon the small scale high fidelity capabilities of the Microsoft HoloLens to allow LSHF interactions. We demonstrate the capabilities of our system with a game specifically designed for LSHF
interactions, handling many challenges and limitations unique to the domain of LSHF AR through the game design.
Our contributions are twofold:
The lessons learned during the design and development of a system capable of LSHF AR interactions.
Identification of a set of reusable game elements specific to LSHF AR, including mechanisms for addressing spatio-temporal inconsistencies and crowd control. We believe our contributions will be fully applicable not only to games, but all LSHF AR experiences.
Journals
Rompapas, Damien Constantine; Rovira, Aitor; Plopski, Alexander; Sandor, Christian; Taketomi, Takefumi; Kato, Hirokazu
EyeAR: Refocusable Augmented Reality Content through Eye Measurements Journal Article
In: Multimodal Technology Interactivity, 1 (22), pp. 9, 2017.
@article{Rompapas2017,
title = {EyeAR: Refocusable Augmented Reality Content through Eye Measurements},
author = {Damien Constantine Rompapas and Aitor Rovira and Alexander Plopski and Christian Sandor and Takefumi Taketomi and Hirokazu Kato},
url = {https://beer-labs.net/wp-content/uploads/2022/01/mti-01-00022-v2.pdf},
doi = {https://doi.org/10.3390/mti1040022},
year = {2017},
date = {2017-09-26},
urldate = {2017-09-26},
journal = {Multimodal Technology Interactivity},
volume = {1},
number = {22},
pages = {9},
abstract = {Augmented Reality (AR) superimposes computer graphics (CG) onto a user’s view of the real world. A key quality problem in this field is to achieve coherence between reality and CG when the user’s eyes refocus or change pupil size. We designed and evaluated a display that improves coherence by measuring the user’s eye state and continuously adapting CG accordingly. Our tabletop prototype emulates an Optical See-Through Head-Mounted Display, a common AR display device. In our evaluation, participants observed three pillars at different depths. We then challenged them to identify a virtual pillar among the three while freely refocusing their eyes. Results show that our design significantly improved realism. Compared to Light Field Displays, our design aims to simplify display-optics while providing similar quality. We could only partially achieve this goal. We discuss the lessons we learned and how we plan to overcome the remaining challenges. The experimental protocol from our evaluation is useful for display developers as it can be used to measure the coherence of a display.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
PhD Thesis
Rompapas, Damien Constantine
Designing for Large Scale, High Fidelity, Collaborative Augmented Reality Experiences PhD Thesis
2019.
@phdthesis{nokey,
title = {Designing for Large Scale, High Fidelity, Collaborative Augmented Reality Experiences},
author = {Damien Constantine Rompapas},
url = {https://beer-labs.net/wp-content/uploads/2022/01/dthesis_Damien_Rompapas.pdf
https://www.youtube.com/watch?v=sn1MhIA0OTo },
year = {2019},
date = {2019-11-01},
abstract = {In recent years, there has been an increasing amount of Collaborative Augmented Reality (CAR) experiences, classifiable by the deployed scale and the
fidelity of the experience. In this thesis, I first explore the LSHF CAR design space, drawing on technical implementations and design aspects from AR and
video games. I then create and implement a software architecture that improves the accuracy of synchronized poses between multiple users. Finally, I apply my
target experience and technical implementation to the explored design space. A core design component of HoloRoyale is the use of visual repellers as user
redirection elements to guide players away from undesired areas. To evaluate the effectiveness of the employed visual repellers in a LSHF CAR context I
conducted a user study, deploying HoloRoyale in a 12.500m2 area. The results from the user study suggest that visual repellers are effective user redirection
elements that do not significantly impact the user’s overall immersion. Finally this thesis focuses on the visual consistency component of fidelity, expanding on
EyeAR: refocusable content on Optical See-Through Head Mounted Displays (OST-HMDs) by evaluating the fidelity of refocusable content displayed on a
single plane OST-HMD via. a modified Touring Test. The results from the evaluation show that refocusable content improves the fidelity of OST-HMD
experiences. This work is the first to explore the domain of LSHF CAR and provides insight into designing experiences in other AR domains.},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
fidelity of the experience. In this thesis, I first explore the LSHF CAR design space, drawing on technical implementations and design aspects from AR and
video games. I then create and implement a software architecture that improves the accuracy of synchronized poses between multiple users. Finally, I apply my
target experience and technical implementation to the explored design space. A core design component of HoloRoyale is the use of visual repellers as user
redirection elements to guide players away from undesired areas. To evaluate the effectiveness of the employed visual repellers in a LSHF CAR context I
conducted a user study, deploying HoloRoyale in a 12.500m2 area. The results from the user study suggest that visual repellers are effective user redirection
elements that do not significantly impact the user’s overall immersion. Finally this thesis focuses on the visual consistency component of fidelity, expanding on
EyeAR: refocusable content on Optical See-Through Head Mounted Displays (OST-HMDs) by evaluating the fidelity of refocusable content displayed on a
single plane OST-HMD via. a modified Touring Test. The results from the evaluation show that refocusable content improves the fidelity of OST-HMD
experiences. This work is the first to explore the domain of LSHF CAR and provides insight into designing experiences in other AR domains.