Publications
Below is a searchable list of publications by the projects of the Priority Program.
Saad, Alia; Winterhalter, Verena; Strauss, Marvin; Schneegass, Stefan
“I Feel More Worried About My Privacy” Public Perceptions of Biometric Traces in Everyday Interactions Proceedings Article
In: Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722813.
Abstract | Links | BibTeX | Tags: behavioral biometrics, biometric traces, privacy risks, usable privacy, user awareness
@inproceedings{10.1145/3772363.3798601,
title = {"I Feel More Worried About My Privacy" Public Perceptions of Biometric Traces in Everyday Interactions},
author = {Alia Saad and Verena Winterhalter and Marvin Strauss and Stefan Schneegass},
url = {https://doi.org/10.1145/3772363.3798601},
doi = {10.1145/3772363.3798601},
isbn = {9798400722813},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '26},
abstract = {People leave behind biometric traces through everyday interactions, often without fully understanding their implications. While biometric technologies increasingly rely on subtle behavioral and interaction-based signals, little is known about how people perceive these traces in daily life. We present findings from an exploratory online study (N = 120) that used short, scenario-based videos to illustrate situations in which biometric traces may be inadvertently exposed, including fingerprints, gait, thermal residues, and interaction patterns in virtual environments. We examine which traces people recognize, how concerned they are about potential misuse, and how brief exposure to such scenarios shapes privacy perception. Results show that awareness and concern frequently diverge. Participants were familiar with visible, well-known biometrics, yet less aware of emerging or interaction-borne traces. Importantly, exposure to the scenarios prompted several participants to reconsider their privacy assumptions.},
keywords = {behavioral biometrics, biometric traces, privacy risks, usable privacy, user awareness},
pubstate = {published},
tppubtype = {inproceedings}
}
Pfützenreuter, Niklas; Liebers, Carina; Goedicke, David; Degraen, Donald; Gruenefeld, Uwe; Schneegass, Stefan
Eye Want It All! Investigating Eye Tracking as Implicit Support for Generative Inpainting Proceedings Article
In: Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722813.
Abstract | Links | BibTeX | Tags: eye tracking, Generative Artificial Intelligence, Image Generation
@inproceedings{10.1145/3772363.3799314,
title = {Eye Want It All! Investigating Eye Tracking as Implicit Support for Generative Inpainting},
author = {Niklas Pfützenreuter and Carina Liebers and David Goedicke and Donald Degraen and Uwe Gruenefeld and Stefan Schneegass},
url = {https://doi.org/10.1145/3772363.3799314},
doi = {10.1145/3772363.3799314},
isbn = {9798400722813},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '26},
abstract = {Users often struggle to use Generative Artificial Intelligence (GenAI) models to generate a desired image, as controlling them solely with prompts is difficult. Current solutions to this problem, such as adding conditional controls, require users to provide explicit input, which can be tedious. To avoid depending on additional explicit input, this paper explores what implicit gaze behavior tells about user intentions when viewing generated images. In our user study (N = 16), we evaluated the correlation between gaze behavior and user annotations, showing that users looked longer at areas they wanted to regenerate. While our research is the first step, we believe our work can pave the way for incorporating implicit user input into interactions with GenAI systems.},
keywords = {eye tracking, Generative Artificial Intelligence, Image Generation},
pubstate = {published},
tppubtype = {inproceedings}
}
Sabnis, Nihar; Zenner, André; Løvaas, Erik Peralta; Weiss, Marco; Bianchi, Andrea; Strohmeier, Paul
Connected Material Experiences using Bimanual Vibrotactile Crosstalk in Virtual Reality Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: bimanual vibrotactile feedback, Consumer VR, material perception, motion-coupled vibrations, virtual reality
@inproceedings{10.1145/3772318.3790767,
title = {Connected Material Experiences using Bimanual Vibrotactile Crosstalk in Virtual Reality},
author = {Nihar Sabnis and André Zenner and Erik Peralta Løvaas and Marco Weiss and Andrea Bianchi and Paul Strohmeier},
url = {https://doi.org/10.1145/3772318.3790767},
doi = {10.1145/3772318.3790767},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {Perceiving material properties such as elasticity, flexibility, and torsion is inherently bimanual, as we rely on the relative motion of our hands to form a unified sense of materiality. Yet, most vibrotactile material rendering approaches are limited to a single hand or finger. While prior work has explored bimanual haptic interfaces, most depend on specialized hardware for specific interactions. In this paper, we demonstrate design strategies to support bimanual material exploration through motion-coupled vibrotactile feedback. Our technique introduces variable crosstalk between the controllers’ vibration to evoke connectedness, making two unconnected devices feel as though they manipulate a single object. The technique generalizes motion-coupled feedback approaches beyond previous single-point explorations. Through two user studies, we show that this approach (1) significantly enhances perceived connectedness and (2) conveys distinct material qualities such as elasticity and torsion. Finally, we present Dvihastundefinedya, an authoring tool for designing connected bimanual experiences in virtual reality.},
keywords = {bimanual vibrotactile feedback, Consumer VR, material perception, motion-coupled vibrations, virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Karpashevich, Pavel; Höök, Kristina; Bardzell, Jeffrey
Inside the Mirror, Wearing My own Body: Why UX Should Engage Monstrous Experiences Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: empirical studies, interaction design, monstrous experiences, user experience design, wearable computers
@inproceedings{10.1145/3772318.3790753,
title = {Inside the Mirror, Wearing My own Body: Why UX Should Engage Monstrous Experiences},
author = {Pavel Karpashevich and Kristina Höök and Jeffrey Bardzell},
url = {https://doi.org/10.1145/3772318.3790753},
doi = {10.1145/3772318.3790753},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {While engaging with four different wearable systems, we unexpectedly encountered felt experiences that resisted articulation and defied conventional classification. They were neither pleasant nor unpleasant, and yet both; neither comforting nor frightening, and yet both; neither recognizably human-like nor machinic, and yet both. Such ambiguous experiences might have gone unnoticed had we not attended to their somatic, felt dimensions. Existing user experience frameworks offered little guidance in making sense of these phenomena. However, through the lens of monster theory, these paradoxical experiences began to reveal their structure and significance. Drawing on concepts such as fusion, fission, massification, and incompleteness, we analyze and interpret the unexpected monstrous experiences arising from interacting with wearable systems. We argue that such experiences deserve a place in interaction design: not only for the enduring fascination of the monster, but also for its power to disrupt simplistic schemas, enrich design possibilities, and illuminate cultural shifts.},
keywords = {empirical studies, interaction design, monstrous experiences, user experience design, wearable computers},
pubstate = {published},
tppubtype = {inproceedings}
}
Riemer, Martin; Valletta, Elisa; Halbhuber, David; Bogon, Johanna
Anticipating Physical Processes in VR: Environment Type and Scale Alter Temporal Expectations Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: gravity, mental imagery, physical processes, space-time interaction, spatial scale, time perception, virtual reality
@inproceedings{10.1145/3772318.3791767,
title = {Anticipating Physical Processes in VR: Environment Type and Scale Alter Temporal Expectations},
author = {Martin Riemer and Elisa Valletta and David Halbhuber and Johanna Bogon},
url = {https://doi.org/10.1145/3772318.3791767},
doi = {10.1145/3772318.3791767},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {Accurate temporal expectations support interaction in virtual reality (VR), yet it remains unclear whether the internal models that guide such expectations in the real world transfer unchanged to immersive VR. We report two experiments examining expected durations of gravity-driven motion across real and virtual environments. In Experiment 1, participants imagined a ball rolling down ramps in a physical lab, a 1:1 VR replica, and an up-scaled VR room and produced the time the imagined process would take. Results revealed systematic distortions: durations were underestimated in VR relative to the physical lab, and larger virtual spaces elicited longer durations. Experiment 2 assessed whether participants incorporated gravity laws into their simulations. Although gravitational acceleration was consistently underestimated, it was incorporated in both real and virtual environments. Our findings show that VR and its spatial scale bias temporal expectations, with implications for the design of temporally coherent and physically plausible VR experiences.},
keywords = {gravity, mental imagery, physical processes, space-time interaction, spatial scale, time perception, virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Kalus, Alexander; Wolf, Katrin; Yildiran, Sümeyye R.; Kocur, Martin
Exploring the Time Course of the Proteus Effect: Effects of Avatar Age and Embodiment Time on Walking in Virtual Reality Proceedings Article
In: Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722813.
Abstract | Links | BibTeX | Tags: Age, Avatar, Embodiment, Proteus Effect, virtual reality
@inproceedings{10.1145/3772363.3799291,
title = {Exploring the Time Course of the Proteus Effect: Effects of Avatar Age and Embodiment Time on Walking in Virtual Reality},
author = {Alexander Kalus and Katrin Wolf and Sümeyye R. Yildiran and Martin Kocur},
url = {https://doi.org/10.1145/3772363.3799291},
doi = {10.1145/3772363.3799291},
isbn = {9798400722813},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the Extended Abstracts of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI EA '26},
abstract = {Avatar appearance can influence users’ behaviour within Virtual Reality (VR), a phenomenon known as the Proteus effect. Prior work suggests that walking behavior after VR exposure is affected by the previously embodied avatar’s apparent age. However, little is known about how such effects unfold during ongoing avatar embodiment in VR. We conducted a study where 32 full-body tracked participants embodied young- and old-looking avatars and repeatedly completed a walking route in VR. Results show that participants walked significantly slower when embodying old-looking avatars. Presence and body ownership increased over time. Interestingly, embodiment duration did not significantly affect the magnitude of the Proteus effect on walking speed, with descriptive differences remaining largely stable. These results suggest that the behavioral impact of avatar age persists without substantial change over a 15 to 20-minute VR session. Our findings contribute to a deeper understanding of avatar age as a design parameter in VR.},
keywords = {Age, Avatar, Embodiment, Proteus Effect, virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Ma, Yong; Zhang, Xuesong; Zhang, Xuedong; Bartłomiejczyk, Natalia; Je, Seungwoo; Holzer, Adrian; Fjeld, Morten; Butz, Andreas Martin
Beyond Words: Measuring User Experience through Speech Analysis in Voice User Interfaces Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: Voice user interfaces; user experience; speech analytics; paralinguistics; implicit UX sensing.
@inproceedings{10.1145/3772318.3791747,
title = {Beyond Words: Measuring User Experience through Speech Analysis in Voice User Interfaces},
author = {Yong Ma and Xuesong Zhang and Xuedong Zhang and Natalia Bartłomiejczyk and Seungwoo Je and Adrian Holzer and Morten Fjeld and Andreas Martin Butz},
url = {https://doi.org/10.1145/3772318.3791747},
doi = {10.1145/3772318.3791747},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {Voice assistants (VAs) are typically evaluated through task performance metrics and self-report questionnaires, but people’s voices themselves carry rich paralinguistic cues that reveal affect, effort, and interaction breakdowns. We present a within-subjects study (N=49) that systematically compared three VA personas across three usage scenarios to investigate whether speech-derived audio features can serve as a proxy for user experience (UX). Participants’ speech was analyzed for temporal, spectral, and linguistic markers, alongside standardized UX measures, brief mood and stress ratings, and a post-study questionnaire. We found correlations between specific speech features and self-reported satisfaction and experience. Furthermore, a machine learning model trained on speech features achieved promising accuracy in classifying UX levels, indicating that this might be a reasonable alternative to self-report instruments. Our findings establish speech as a viable, real-time signal for implicitly measuring UX and point toward adaptive VUIs that respond dynamically to emotional and usability-related vocal cues.},
keywords = {Voice user interfaces; user experience; speech analytics; paralinguistics; implicit UX sensing.},
pubstate = {published},
tppubtype = {inproceedings}
}
Stellmacher, Carolin; Dratzidis, Leon Tristan; Zenner, André; Wald, Iddo Yehoshua; Schöning, Johannes; Rogers, Yvonne; Degraen, Donald; Colley, Mark
Understanding How Mobile Interactions Shape Grasp and Contact Patterns Beyond the Touchscreen Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: Beyond the Touchscreen, Contact Pattern, Grasp, Mobile Phone, Smartphone
@inproceedings{10.1145/3772318.3790565,
title = {Understanding How Mobile Interactions Shape Grasp and Contact Patterns Beyond the Touchscreen},
author = {Carolin Stellmacher and Leon Tristan Dratzidis and André Zenner and Iddo Yehoshua Wald and Johannes Schöning and Yvonne Rogers and Donald Degraen and Mark Colley},
url = {https://doi.org/10.1145/3772318.3790565},
doi = {10.1145/3772318.3790565},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {The way users hold a smartphone depends on the interaction task, yet little is known about the fingers’ engagement with the device’s surfaces beyond the touchscreen. Such an understanding not only opens up opportunities for novel on- and off-screen interactions, but also the device’s possible physical affordances. We present a study (N=23) that examines the hands’ physical engagement with the smartphone beyond the touchscreen across nine mobile interactions. Grasps were annotated from photographs, and contact regions were captured using residual heat traces from grasping the device. Our findings show that fingers and palms adopt a variety of support roles and postures when engaging with the smartphone’s back and side edges. The hand-contact maps reveal distinct patterns, differing in contact frequency and placement. This work contributes an empirical characterisation of hands’ back and edge engagement, highlighting design opportunities for future smartphone usage extending beyond the touchscreen.},
keywords = {Beyond the Touchscreen, Contact Pattern, Grasp, Mobile Phone, Smartphone},
pubstate = {published},
tppubtype = {inproceedings}
}
Keppel, Jonas; Prochazka, Marvin; Lewin, Stefan; Stroehnisch, Markus; Strauss, Marvin; Zenner, André; Degraen, Donald; Matviienko, Andrii; Schneegass, Stefan
Determining Perception Thresholds for Real and Virtual Inclinations While Cycling in Virtual Reality Proceedings Article
In: Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2026, ISBN: 9798400722783.
Abstract | Links | BibTeX | Tags: Biking, Exergames, Inclination, Indoor Cycling, Perception, Sports, Thresholds, virtual reality
@inproceedings{10.1145/3772318.3791538,
title = {Determining Perception Thresholds for Real and Virtual Inclinations While Cycling in Virtual Reality},
author = {Jonas Keppel and Marvin Prochazka and Stefan Lewin and Markus Stroehnisch and Marvin Strauss and André Zenner and Donald Degraen and Andrii Matviienko and Stefan Schneegass},
url = {https://doi.org/10.1145/3772318.3791538},
doi = {10.1145/3772318.3791538},
isbn = {9798400722783},
year = {2026},
date = {2026-01-01},
booktitle = {Proceedings of the 2026 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
series = {CHI '26},
abstract = {In virtual reality (VR) experiences, mismatches between reality and virtuality are usually undesirable, as they can disrupt immersion and induce cybersickness. However, when carefully controlled, they may expand the design space of VR. This research investigates perceptual detection thresholds for mismatches between real and virtual inclinations during cycling in VR. Using a custom simulation},
keywords = {Biking, Exergames, Inclination, Indoor Cycling, Perception, Sports, Thresholds, virtual reality},
pubstate = {published},
tppubtype = {inproceedings}
}
Chamunorwa, Michael; Müller, Heiko; Boll, Susanne
Discovering the Potential of Living Room Objects as Alternative Smart Home Controllers: An Exploration of Secondary Affordances Journal Article
In: Proc. ACM Hum.-Comput. Interact., vol. 9, no. 8, pp. 70–95, 2025, ISSN: 2573-0142.
Abstract | Links | BibTeX | Tags:
@article{Chamunorwa2025,
title = {Discovering the Potential of Living Room Objects as Alternative Smart Home Controllers: An Exploration of Secondary Affordances},
author = {Michael Chamunorwa and Heiko Müller and Susanne Boll},
doi = {10.1145/3773061},
issn = {2573-0142},
year = {2025},
date = {2025-11-13},
journal = {Proc. ACM Hum.-Comput. Interact.},
volume = {9},
number = {8},
pages = {70--95},
publisher = {Association for Computing Machinery (ACM)},
abstract = {Smart home appliances are becoming more popular, yet their user interfaces (UI) often lack integration into living spaces and daily practices. To facilitate more natural interactions, reduce clutter and promote better integration in our living spaces, we propose embedding controls into everyday objects.
This paper outlines our approach in three steps: identifying suitable household objects through ethnographic research, gathering interaction ideas via user gesture elicitation, and evaluating the intuitiveness of conceptual interfaces through a Wizard-of-Oz study.
Our findings suggest that an object's primary function has less influence on its potential as a smart home controller. Instead, its location, physical characteristics, and intuitive affordances play a more significant role in shaping its potential. Designers should, therefore, focus on making these affordances easily discoverable by concurrently considering the object's physical properties, function, and interaction context. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
This paper outlines our approach in three steps: identifying suitable household objects through ethnographic research, gathering interaction ideas via user gesture elicitation, and evaluating the intuitiveness of conceptual interfaces through a Wizard-of-Oz study.
Our findings suggest that an object’s primary function has less influence on its potential as a smart home controller. Instead, its location, physical characteristics, and intuitive affordances play a more significant role in shaping its potential. Designers should, therefore, focus on making these affordances easily discoverable by concurrently considering the object’s physical properties, function, and interaction context.
Weiss, Yannick; Villa, Steeven; Ziarko, Moritz; Müller, Florian
Manipulating Stiffness Perception of Compliant Objects While Pinching in Virtual Reality
2025.
@{Weiss2025,
title = {Manipulating Stiffness Perception of Compliant Objects While Pinching in Virtual Reality},
author = {Yannick Weiss and Steeven Villa and Moritz Ziarko and Florian Müller},
doi = {10.1145/3756884.3765988},
year = {2025},
date = {2025-11-12},
pages = {1--11},
publisher = {ACM},
keywords = {},
pubstate = {published},
tppubtype = {}
}
Hein, Ilka; Ullrich, Daniel; Bakirova, Galiiabanu; Diefenbach, Sarah
What kind of technology transparency do users appreciate? Comparison of textual and graphic cues in app design Journal Article
In: vol. 24, no. 2, pp. 457–471, 2025, ISSN: 2196-6826.
Abstract | Links | BibTeX | Tags:
@article{Hein2025b,
title = {What kind of technology transparency do users appreciate? Comparison of textual and graphic cues in app design},
author = {Ilka Hein and Daniel Ullrich and Galiiabanu Bakirova and Sarah Diefenbach},
doi = {10.1515/icom-2025-0018},
issn = {2196-6826},
year = {2025},
date = {2025-09-25},
volume = {24},
number = {2},
pages = {457--471},
publisher = {Walter de Gruyter GmbH},
abstract = {Abstract
App recommendations and data visualizations such as weather forecasts, navigation aids, or sleep tracking graphs play an increasingly important role in daily decisions. However, the apps’ underlying functioning often remains opaque, possibly resulting in a suboptimal user experience or inadequate reliance on recommendations. To approach design solutions for this, the paper investigates the effects of textual and graphic transparency cues on users’ mental model accuracy, user experience, and explanation satisfaction, using the example of a weather and a sleep tracking app. An online experiment with 293 participants showed that textual transparency cues (i.e., verbal explanations) led to higher felt and objectively measured mental model accuracy than graphic transparency cues (i.e., data visualizations). Textual cues were also more satisfying than graphic cues but did not result in significantly different ratings of user experience. Moreover, differences between textual and graphic cues in subjective mental model accuracy and explanation satisfaction were stronger for the weather than the sleep tracking app, implying context-specific differences in the impact of transparency cues. The results and limitations are discussed and linked to the challenge of finding a sweet spot for technology transparency design. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Liebers, Carina; Ertas, Metehan; Pfützenreuter, Niklas; Auda, Jonas; Gruenefeld, Uwe; Schneegass, Stefan
AI to the Rescue: Supporting Manual Annotation for Dataset Creation
2025.
@{Liebers2025,
title = {AI to the Rescue: Supporting Manual Annotation for Dataset Creation},
author = {Carina Liebers and Metehan Ertas and Niklas Pfützenreuter and Jonas Auda and Uwe Gruenefeld and Stefan Schneegass},
doi = {10.1145/3743049.3743086},
year = {2025},
date = {2025-08-30},
pages = {91--98},
publisher = {ACM},
keywords = {},
pubstate = {published},
tppubtype = {}
}
Oechsner, Carl; Leusmann, Jan; Welsch, Robin; Butz, Andreas Martin; Mayer, Sven
Influence of Perceived Danger on Proxemics in Human-Robot Object Handovers
2025.
@{Oechsner2025,
title = {Influence of Perceived Danger on Proxemics in Human-Robot Object Handovers},
author = {Carl Oechsner and Jan Leusmann and Robin Welsch and Andreas Martin Butz and Sven Mayer},
doi = {10.1145/3743049.3743064},
year = {2025},
date = {2025-08-30},
pages = {111--120},
publisher = {ACM},
keywords = {},
pubstate = {published},
tppubtype = {}
}
Nowak, Oliver; Müller, Erik; Sahabi, Sarah; Borchers, Jan
Towards Textile User Interface Design Guidelines for Eyes-Free Use
2025.
@{Nowak2025,
title = {Towards Textile User Interface Design Guidelines for Eyes-Free Use},
author = {Oliver Nowak and Erik Müller and Sarah Sahabi and Jan Borchers},
doi = {10.1145/3715668.3736377},
year = {2025},
date = {2025-07-05},
pages = {312--317},
publisher = {ACM},
keywords = {},
pubstate = {published},
tppubtype = {}
}
Hein, Ilka; Diefenbach, Sarah
In: International Journal of Human–Computer Interaction, pp. 1–18, 2025, ISSN: 1532-7590.
@article{Hein2025,
title = {Towards a Comprehensive View on Technology Transparency: A Cross-Technology Investigation of Psychological and Design Factors Around Users’ Transparency Need and Perception},
author = {Ilka Hein and Sarah Diefenbach},
doi = {10.1080/10447318.2025.2502983},
issn = {1532-7590},
year = {2025},
date = {2025-06-03},
journal = {International Journal of Human–Computer Interaction},
pages = {1--18},
publisher = {Informa UK Limited},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kalus, Alexander; Lanzinger, Michelle; Rolny, Laurin; Strasser, Benedikt; Wolf, Katrin; Henze, Niels; Bogon., Johanna
Heavy Looks, Slower Moves: Effects of Physical and Visual Object Weight on Pointing in Virtual Reality Proceedings Article
In: Extended Abstracts of the CHI Conference on Human Factors in Computing Systems (CHI EA ’25), ACM ACM, 2025.
@inproceedings{kalus-chi25ea,
title = {Heavy Looks, Slower Moves: Effects of Physical and Visual Object Weight on Pointing in Virtual Reality},
author = {Alexander Kalus AND Michelle Lanzinger AND Laurin Rolny AND Benedikt Strasser AND Katrin Wolf AND Niels Henze AND Johanna Bogon.},
doi = {10.1145/3706599.3720211},
year = {2025},
date = {2025-04-25},
booktitle = {Extended Abstracts of the CHI Conference on Human Factors in Computing Systems (CHI EA ’25)},
publisher = {ACM},
organization = {ACM},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Diefenbach, Sarah; Riehle, Anna; Jannott, Hannah; Vornhagen, Joëlle-Sophie; Stoll, Johannes; Markhoff, Lea; Terzi, Pia
Psychological needs related to civil inattention: A qualitative and quantitative view on public encounters Journal Article
In: British Journal of Social Psychology, vol. 64, no. 1, pp. e12828, 2025.
BibTeX | Tags:
@article{diefenbach2025psychological,
title = {Psychological needs related to civil inattention: A qualitative and quantitative view on public encounters},
author = {Sarah Diefenbach and Anna Riehle and Hannah Jannott and Joëlle-Sophie Vornhagen and Johannes Stoll and Lea Markhoff and Pia Terzi},
year = {2025},
date = {2025-01-01},
journal = {British Journal of Social Psychology},
volume = {64},
number = {1},
pages = {e12828},
publisher = {Wiley Online Library},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gol, Reyhaneh Sabbagh; Valkov, Dimitar; Linsen, Lars
XMTC: Explainable Early Classification of Multivariate Time Series in Reach-to-Grasp Hand Kinematics Journal Article
In: arXiv preprint arXiv:2502.04398, 2025.
BibTeX | Tags:
@article{gol2025xmtc,
title = {XMTC: Explainable Early Classification of Multivariate Time Series in Reach-to-Grasp Hand Kinematics},
author = {Reyhaneh Sabbagh Gol and Dimitar Valkov and Lars Linsen},
year = {2025},
date = {2025-01-01},
journal = {arXiv preprint arXiv:2502.04398},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Uhde, Alarith; Dreyer, Lianara; Hassenzahl, Marc
The Witness Experience Inventory Journal Article
In: Interacting with Computers, pp. iwaf010, 2025, ISSN: 1873-7951.
Abstract | Links | BibTeX | Tags:
@article{10.1093/iwc/iwaf010,
title = {The Witness Experience Inventory},
author = {Alarith Uhde and Lianara Dreyer and Marc Hassenzahl},
url = {https://doi.org/10.1093/iwc/iwaf010},
doi = {10.1093/iwc/iwaf010},
issn = {1873-7951},
year = {2025},
date = {2025-01-01},
journal = {Interacting with Computers},
pages = {iwaf010},
abstract = {Interactions with technology are part of social life, for example in cafés, trains, or parks. This social situatedness not only changes how users experience these interactions. It also influences the situated experiences for other co-located people (“witnesses”). However, despite a large body of research on user experiences, the relation between an interaction and witness experiences, and ways to design for them, remain underexplored. To address this gap, this paper introduces the “Witness Experience Inventory”, a research tool grounded in social-interpretivist theories, that offers a pragmatic approach to study how interactions with technology affect witness experiences. Based on an analysis of eight interactive technologies, we illustrate how the Witness Experience Inventory can inform the design of socially situated interactions with technology to avoid negative and create more positive witness experiences. We provide guidelines for applications of the Witness Experience Inventory in future research and its adaptable coding template. Both build on experiences from our own research, but give future researchers and practitioners the flexibility to adapt the tool to the social settings they study.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}