Papers! Prototypes! Year 1 of ViAjeRo…

It has not been an easy first year for ViAjero – the world has been turned upside down due to the Covid-19 pandemic, and some of the fantastic research we wanted to pursue in cars and planes has had to be postponed. However, despite these setbacks, we have been making exciting progress towards some of ViAjeRo’s key aims! New people have joined the project, with Daniel Pires de Sá Medeiros joining as an RA looking at passenger interaction techniques. On the papers front, we’ve published on the key challenges in passenger MR [1], workspaces suited to confined spaces [2] (see below, presented at ACM UIST), the feasibility of neurostimulation [3, 4], ethical challenges in mixed reality [5] and auditory mixed reality [6, 7].

We’ve given talks about the project to the likes of the Waterkant festival, Audi, and BBC R&D, and seen great interest in the concept of passenger mixed reality. Development wise, here’s a sneak peek of our in-car platform being tested in Glasgow, complete with accurate position and orientation tracking of a vehicle…

On the neurostimulation side, Gang Li and Frank Pollick have been building the platform needed to explore the physiological signals that might indicate the onset of motion sickness…

And regarding lab work, we recently took receipt of a RotoVR chair, which will enable us to explore motion sickness from the safety of a lab environment!

We hope that in the coming year society can start to get back to normal and beat Covid-19, and we’ll be pursuing more passenger MR research so that, when travel resumes, people can make the most of their travel time!

[1] [pdf] [doi] M. McGill, J. Williamson, A. Ng, F. Pollick, and S. Brewster, “Challenges in passenger use of mixed reality headsets in cars and other transportation,” Virtual reality, 2019.
[Bibtex]
@article{enlighten205513,
month = {December},
title = {Challenges in passenger use of mixed reality headsets in cars and other transportation},
author = {Mark McGill and Julie Williamson and Alexander Ng and Frank Pollick and Stephen Brewster},
publisher = {Springer},
year = {2019},
note = {This research was funded in part by the EPSRC IAA (303740)
and ESRC IAA (77563/1) joint project ?CarVR: Immersion in the Journey?. This project also received funding from the European Research
Council (ERC) under the European Union?s Horizon 2020 research
and innovation programme (Grant Agreement No. 835197 - ViAjeRo).},
journal = {Virtual Reality},
url = {http://eprints.gla.ac.uk/205513/},
abstract = {This paper examines key challenges in supporting passenger use of augmented and virtual reality headsets in transit. These headsets will allow passengers to break free from the restraints of physical displays placed in constrained environments such as cars, trains and planes. Moreover, they have the potential to allow passengers to make better use of their time by making travel more productive and enjoyable, supporting both privacy and immersion. However, there are significant barriers to headset usage by passengers in transit contexts. These barriers range from impediments that would entirely prevent safe usage and function (e.g. motion sickness) to those that might impair their adoption (e.g. social acceptability). We identify the key challenges that need to be overcome and discuss the necessary resolutions and research required to facilitate adoption and realize the potential advantages of using mixed reality headsets in transit.},
doi = {10.1007/s10055-019-00420-x}
}
[2] [pdf] [doi] M. Mcgill, A. Kehoe, E. Freeman, and S. Brewster, “Expanding the bounds of seated virtual workspaces,” Acm trans. comput.-hum. interact., vol. 27, iss. 3, 2020.
[Bibtex]
@article{10.1145/3380959,
author = {Mcgill, Mark and Kehoe, Aidan and Freeman, Euan and Brewster, Stephen},
title = {Expanding the Bounds of Seated Virtual Workspaces},
year = {2020},
issue_date = {June 2020},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
volume = {27},
number = {3},
issn = {1073-0516},
url = {https://doi.org/10.1145/3380959},
doi = {10.1145/3380959},
abstract = {Mixed Reality (MR), Augmented Reality (AR) and Virtual Reality (VR) headsets can improve upon existing physical multi-display environments by rendering large, ergonomic virtual display spaces whenever and wherever they are needed. However, given the physical and ergonomic limitations of neck movement, users may need assistance to view these display spaces comfortably. Through two studies, we developed new ways of minimising the physical effort and discomfort of viewing such display spaces. We first explored how the mapping between gaze angle and display position could be manipulated, helping users view wider display spaces than currently possible within an acceptable and comfortable range of neck movement. We then compared our implicit control of display position based on head orientation against explicit user control, finding significant benefits in terms of user preference, workload and comfort for implicit control. Our novel techniques create new opportunities for productive work by leveraging MR headsets to create interactive wide virtual workspaces with improved comfort and usability. These workspaces are flexible and can be used on-the-go, e.g., to improve remote working or make better use of commuter journeys.},
journal = {ACM Trans. Comput.-Hum. Interact.},
month = may,
articleno = {13},
numpages = {40},
keywords = {Virtual reality, displays, virtual displays, productivity, mixed reality, augmented reality, workspaces, rotational gain, virtual desktops, multi-monitor, display space}
}
[3] [pdf] [doi] G. Li, M. McGill, S. Brewster, and F. Pollick, “A review of electrostimulation-based cybersickness mitigations,” in 2020 ieee international conference on artificial intelligence and virtual reality (aivr), 2020.
[Bibtex]
@inproceedings{li2020review,
title={A Review of Electrostimulation-based Cybersickness Mitigations},
author={Li, Gang and McGill, Mark and Brewster, Stephen and Pollick, Frank},
year={2020},
url={https://eprints.gla.ac.uk/224088/},
booktitle={2020 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)},
doi={10.1109/AIVR50618.2020.00034}}
[4] [pdf] Honorable Mention Award [doi] G. Li, M. Varela, A. Francisco Habib, Q. Zhang, M. McGill, S. Brewster, and F. Pollick, “Exploring the feasibility of mitigating vr-hmd-induced cybersickness using cathodal transcranial direct current stimulation,” in 2020 ieee international conference on artificial intelligence and virtual reality (aivr), 2020.
[Bibtex]
@inproceedings{li2020feasibility,
title={Exploring the Feasibility of Mitigating VR-HMD-Induced Cybersickness Using Cathodal Transcranial Direct Current Stimulation},
author={Li, Gang and Macía Varela and Francisco, Habib, A and Zhang, Q and McGill, Mark and Brewster, Stephen and Pollick, Frank},
year={2020},
url={https://eprints.gla.ac.uk/224089/},
booktitle={2020 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)},
doi={10.1109/AIVR50618.2020.00030},
url={https://eprints.gla.ac.uk/224089/3/224089.pdf},
honorable = {yep}
}
[5] [pdf] [doi] J. Gugenheimer, M. McGill, S. Huron, C. Mai, J. Williamson, and M. Nebeling, “Exploring potentially abusive ethical, social and political implications of mixed reality research in hci,” in Extended abstracts of the 2020 chi conference on human factors in computing systems, New York, NY, USA, 2020, p. 1–8.
[Bibtex]
@inproceedings{10.1145/3334480.3375180,
author = {Gugenheimer, Jan and McGill, Mark and Huron, Samuel and Mai, Christian and Williamson, Julie and Nebeling, Michael},
title = {Exploring Potentially Abusive Ethical, Social and Political Implications of Mixed Reality Research in HCI},
year = {2020},
isbn = {9781450368193},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3334480.3375180},
doi = {10.1145/3334480.3375180},
abstract = {In recent years, Mixed Reality (MR) headsets have increasingly made advances in terms of capability, affordability and end-user adoption, slowly becoming everyday technology. HCI research typically explores positive aspects of these technologies, focusing on interaction, presence and immersive experiences. However, such technological advances and paradigm shifts often fail to consider the "dark patterns", with potential abusive scenarios, made possible by new technologies (cf. smartphone addiction, social media anxiety disorder). While these topics are getting recent attention in related fields and with the general population, this workshop is aimed at starting an active exploration of abusive, ethical, social and political scenarios of MR research inside the HCI community. With an HCI lens, workshop participants will engage in critical reviews of emerging MR technologies and applications and develop a joint research agenda to address them.},
booktitle = {Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems},
pages = {1–8},
numpages = {8},
keywords = {abuse, mixed reality, design fiction, ethics},
location = {Honolulu, HI, USA},
series = {CHI EA '20}
}
[6] [pdf] [doi] M. McGill, S. Brewster, D. McGookin, and G. Wilson, “Acoustic transparency and the changing soundscape of auditory mixed reality,” in Proceedings of the 2020 chi conference on human factors in computing systems, New York, NY, USA, 2020, p. 1–16.
[Bibtex]
@inproceedings{enlighten208325,
author = {McGill, Mark and Brewster, Stephen and McGookin, David and Wilson, Graham},
title = {Acoustic Transparency and the Changing Soundscape of Auditory Mixed Reality},
year = {2020},
isbn = {9781450367080},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {http://eprints.gla.ac.uk/208325/},
doi = {10.1145/3313831.3376702},
abstract = {Auditory headsets capable of actively or passively intermixing both real and virtual sounds are in-part acoustically transparent. This paper explores the consequences of acoustic transparency, both on the perception of virtual audio content, given the presence of a real-world auditory backdrop, and more broadly in facilitating a wearable, personal, private, always-available soundspace. We experimentally compare passive acoustically transparent, and active noise cancelling, orientation-tracked auditory headsets across a range of content types, both indoors and outdoors for validity. Our results show differences in terms of presence, realness and externalization for select content types. Via interviews and a survey, we discuss attitudes toward acoustic transparency (e.g. being perceived as safer), the potential shifts in audio usage that might be precipitated by adoption, and reflect on how such headsets and experiences fit within the area of Mixed Reality.},
booktitle = {Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems},
pages = {1–16},
numpages = {16},
keywords = {acoustic transparency, audio, mixed reality},
location = {Honolulu, HI, USA},
series = {CHI '20}
}
[7] [pdf] [doi] M. McGill, F. Mathis, M. Khamis, and J. Williamson, “Augmenting tv viewing using acoustically transparent auditory headsets,” in Acm international conference on interactive media experiences, New York, NY, USA, 2020, p. 34–44.
[Bibtex]
@inproceedings{10.1145/3391614.3393650,
author = {McGill, Mark and Mathis, Florian and Khamis, Mohamed and Williamson, Julie},
title = {Augmenting TV Viewing Using Acoustically Transparent Auditory Headsets},
year = {2020},
isbn = {9781450379762},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3391614.3393650},
doi = {10.1145/3391614.3393650},
abstract = {This paper explores how acoustically transparent auditory headsets can improve TV viewing by intermixing headset and TV audio, facilitating personal, private auditory enhancements and augmentations of TV content whilst minimizing occlusion of the sounds of reality. We evaluate the impact of synchronously mirroring select audio channels from the 5.1 mix (dialogue, environmental sounds, and the full mix), and selectively augmenting TV viewing with additional speech (e.g. Audio Description, Directors Commentary, and Alternate Language). For TV content, auditory headsets enable better spatialization and more immersive, enjoyable viewing; the intermixing of TV and headset audio creates unique listening experiences; and private augmentations offer new ways to (re)watch content with others. Finally, we reflect on how these headsets might facilitate more immersive augmented TV viewing experiences within reach of consumers.},
booktitle = {ACM International Conference on Interactive Media Experiences},
pages = {34–44},
numpages = {11},
keywords = {Augmented Reality, TV, Mixed Reality, Audio;},
location = {Cornella, Barcelona, Spain},
series = {IMX '20}
}