publications([{ "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3500866.3516371", "title": "µGlyph: a Microgesture Notation", "url": "https://hal.science/hal-04026125", "abstract": "In the active field of hand microgestures, microgesture descriptions are typically expressed informally and are accompanied by images, leading to ambiguities and contradictions. An important step in moving the field forward is a rigorous basis for precisely describing, comparing, and analyzing microgestures. Towards this goal, we propose µGlyph, a hybrid notation based on a vocabulary of events inspired by finger biomechanics. First, we investigate the expressiveness of µGlyph by building a database of 118 microgestures extracted from the literature. Second, we experimentally explore the usability of µGlyph. Participants correctly read and wrote µGlyph descriptions 90% of the time, as compared to 46% for conventional descriptions. Third we present tools that promote µGlyph usage, including a visual editor with LaTeX export. We finally describe how µGlyph can guide research on designing, developing, and evaluating microgesture interaction. Results demonstrate the strong potential of µGlyph to establish a common ground for microgesture research.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/CGN23a/", "pages": "3:1-13", "bibtype": "inproceedings", "id": 948, "abbr": "CGN23a", "address": "Hamburg, Germany", "date": "2023-04-23", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems" }, { "lang": "en", "publisher": "IEEE", "doi": "https://doi.org/10.1109/ISMAR59233.2023.00095", "title": "3D Selection in Mixed Reality: Designing a Two-Phase Technique To Reduce Fatigue", "url": "https://hal.science/hal-04297966", "abstract": "Mid-air pointing is widely used for 3D selection in Mixed Reality but leads to arm fatigue. In a first exploratory experiment we study a two-phase design and compare modalities for each phase: mid-air gestures, eye-gaze and microgestures. Results suggest that eye-gaze and microgestures are good candidates to reduce fatigue and improve interaction speed. We therefore propose two 3D selection techniques: Look&MidAir and Look&Micro. Both techniques include a first phase during which users control a cone directed along their eye-gaze. Using the flexion of their non-dominant hand index finger, users pre-select the objects intersecting this cone. If several objects are pre-selected, a disambiguation phase is performed using direct mid-air touch for Look&MidAir or thumb to finger microgestures for Look&Micro. In a second study, we compare both techniques to the standard raycasting technique. Results show that Look&MidAir and Look&Micro perform similarly. However they are 55% faster, perceived easier to use and are less tiring than the baseline. We discuss how the two techniques could be combined for greater flexibility and for object manipulation after selection.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/CGN23b/", "pages": "800-809", "bibtype": "inproceedings", "id": 955, "abbr": "CGN23b", "address": "Sydney (Australia), Australia", "date": "2023-10-16", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "type_publi": "icolcomlec" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3604272", "title": "Studying the Visual Representation of Microgestures", "url": "https://hal.science/hal-04193374", "abstract": "The representations of microgestures are essentials for researchers presenting their results through academic papers and system designers proposing tutorials to novice users. However, those representations remain disparate and inconsistent. As a first attempt to investigate how to best graphically represent microgestures, we created 21 designs, each depicting static and dynamic versions of 4 commonly used microgestures (tap, swipe, flex and hold). We first studied these designs in a quantitative online experiment with 45 participants. We then conducted a qualitative laboratory experiment in Augmented Reality with 16 participants. Based on the results, we provide design guidelines on which elements of a microgesture should be represented and how. In particular, it is recommended to represent the actuator and the trajectory of a microgesture. Also, although preferred by users, dynamic representations are not considered better than their static counterparts for depicting a microgesture and do not necessarily result in a better user recognition", "authors": { "1": { "first_name": "Vincent", "last_name": "Lambert" }, "2": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "3": { "first_name": "Alix", "last_name": "Goguey" }, "4": { "first_name": "Sylvain", "last_name": "Malacria" }, "5": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/LCG+23a/", "id": 961, "bibtype": "inproceedings", "abbr": "LCG+23a", "address": "Athens, Greece", "date": "2023-09-25", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "ACM International Conference on Mobile Human-Computer Interaction (MobileHCI 2023)" }, { "lang": "fr", "type_publi": "these", "title": "Understanding and designing microgesture interaction", "url": "https://hal.science/tel-04359801", "abstract": "Over the last three decades, some of the objects we use in our daily life have gradually become computers. Our habits are changing with these mutations, and it is now not uncommon that we interact with these computers while performing other tasks, e.g. checking our GPS position on our smartwatch while biking. Over the last ten years, a new interaction modality has emerged to meet these needs, hand microgestures. Hand microgestures, simplified to microgestures, are fast and subtle movements of the fingers. They enable interaction in parallel with a main task, as they are quick and can be performed while holding an object. However, as it is a recent modality, the field of research still lacks structure and sometimes coherence. For instance, there is no convention for naming or describing microgestures, which can lead to terminological inconsistencies between different studies. Moreover, the literature focuses mainly on how to build systems to sense and recognize microgestures. Thus, few studies examine the expected properties of microgestures, such as speed or low impact on physical fatigue in certain contexts of use. As a result, this thesis focuses on the study of microgestures, from their description to their application in a specific field, i.e. Augmented Reality (AR), as well as their sensing and recognition.Our scientific approac is comprised of three steps. In the first step, we focus on the space of possibilities. After a literature review to highlight the diversity of microgestures and terminological issues, we present μGlyph, a notation to describe microgestures. Next, we present a user study to understand the constraints imposed when holding an object on the feasibility of microgestures. The results of this study were used to create a set of three rules to determine the feasibility of microgestures in different contexts, i.e. different grasps. For ease of use, we reused μGlyph to provide a visual description of these rules. Finally, we study different ways of making a set of microgestures compatible with many contexts, i.e. that each microgesture in the set is feasible in all contexts.With the space of possibilities defined, we focus on the design of systems for sensing and recognizing microgestures. After a review of such systems in the literature, we present our easily reproducible sensing systems that we implemented, resulting in two gloves. We then present a user study on the impact of wearing these gloves on the feasibility of microgestures. Our results suggest that our gloves have little impact on the feasibility of microgestures. Next, we present a more comprehensive system that recognizes both microgestures and contexts. Our studies on recognition rates suggest that our system is usable for microgesture detection, with a recognition rate of 94%, but needs to be improved for context recognition, with a rate of 80%. Finally, we present a proof-of-concept of a modular glove and a recognition system based on μGlyph to enable the unification of microgesture sensing systems.Our final step is then dedicated to interaction techniques based on microgestures. We focus on the properties of microgestures for 3D selection in AR. We have designed two 3D selection techniques based on eye-gaze and microgestures for interaction with low fatigue. Our results suggest that the combination of eye-gaze and microgesture enables fast interaction while minimizing fatigue, compared to the commonly used virtual pointer. We conclude with an extension of our techniques to integrate 3D object manipulation in AR.", "year": 2023, "uri": "http://iihm.imag.fr/publication/C23a/", "bibtype": "phdthesis", "abbr": "C23a", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" } }, "date": "2023-12-18", "type": "Thèses et habilitations", "id": 959 }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "µGlyph: a Graphical Notation to Describe Microgestures", "url": "https://hal.archives-ouvertes.fr/hal-03655062", "abstract": "Hand microgestures define a promising modality for rapid and eye-free interaction while holding or not an object. Studied in many contexts, e.g. in virtual/augmented reality, there is no consensual definition of a microgesture, nor a notation to accurately describe a microgesture. The absence of a reference framework leads to ambiguities in the naming or description of microgestures. We propose µGlyph, a graphical notation to precisely describe hand microgestures with different level of abstraction. This notation is based on a vocabulary of elementary events from the biomechanics of the hand. Each event is associated with a context of execution as well as optionnal characteristics such as the finger that makes the micromovement. We study the descriptive power of the µGlyph notation by positioning it with respect to the existing design axes and by describing the most common microgestures of the literature.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/CGN22a/", "id": 941, "bibtype": "inproceedings", "abbr": "CGN22a", "address": "Namur, Belgium", "date": "2022-04-05", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "33ème conférence internationale francophone sur l’Interaction Humain-Machine (IHM'22)" }, { "lang": "en", "publisher": "Société Informatique de France", "type_publi": "autre", "title": "Projet ANR (2015-2018) « Autour du plan 2D »", "url": "https://hal.archives-ouvertes.fr/hal-03655986", "journal": "1024 : Bulletin de la Société Informatique de France", "year": 2022, "number": 19, "uri": "http://iihm.imag.fr/publication/CCC+22a/", "bibtype": "unpublished", "abbr": "CCC+22a", "authors": { "1": { "first_name": "Julien", "last_name": "Castet" }, "2": { "first_name": "Florent", "last_name": "Cabric" }, "3": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "4": { "first_name": "Dominique", "last_name": "Cunin" }, "5": { "first_name": "Emmanuel", "last_name": "Dubois" }, "6": { "first_name": "Elio", "last_name": "Keddisseh" }, "7": { "first_name": "Yann", "last_name": "Laurillau" }, "8": { "first_name": "Laurence", "last_name": "Nigay" }, "9": { "first_name": "Michael", "last_name": "Ortega" }, "10": { "first_name": "Gary", "last_name": "Perelman" }, "11": { "first_name": "Carole", "last_name": "Plasson" }, "12": { "first_name": "Mathieu", "last_name": "Raynal" }, "13": { "first_name": "Houssem", "last_name": "Saidi" }, "14": { "first_name": "Marcos", "last_name": "Serrano" } }, "date": "2022-04-01", "type": "Autres publications", "id": 933 }]);