publications([{ "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "Microgesture Interaction in Context: demonstrations of the ANR MIC project Interaction par microgeste en contexte : démonstrations du projet ANR MIC", "url": "https://hal.science/hal-05311866", "abstract": "We present demonstrations from the ANR MIC project. MIC aims at studying and promoting microgesture-based interaction by putting it in practice in use situations. The demontrations show interaction techniques based on microgestures or on the combination of microgestures with another modality including haptic feedback as well as mechanisms that support discoverability and learnability of microgestures. The demonstrations illustrate three different contexts of use: 1) Augmented/Virtual Reality because microgesture interaction does not require us to hold any external device and is less physically demanding than mid-air interaction. 2) Car driving because microgestures may be performed in parallel with other tasks, they only require a few seconds and only one hand. 3) Eyes-free interaction (i.e. users with visual impairments) because users can perform microgestures by relying on proprioception only, without looking at their hand.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Aurélien", "last_name": "Conil" }, "3": { "first_name": "Alix", "last_name": "Goguey" }, "4": { "first_name": "Vincent", "last_name": "Lambert" }, "5": { "first_name": "Laurence", "last_name": "Nigay" }, "6": { "first_name": "Charles", "last_name": "Bailly" }, "7": { "first_name": "Julien", "last_name": "Castet" }, "8": { "first_name": "Michael", "last_name": "Ortega" }, "9": { "first_name": "Zoé", "last_name": "Lacroux" }, "10": { "first_name": "Céline", "last_name": "Lemercier" }, "11": { "first_name": "Pierre-Vincent", "last_name": "Paubel" }, "12": { "first_name": "Sandra", "last_name": "Bardot" }, "13": { "first_name": "Christophe", "last_name": "Jouffrais" }, "14": { "first_name": "Suliac", "last_name": "Lavenant" }, "15": { "first_name": "Sylvain", "last_name": "Malacria" }, "16": { "first_name": "Thomas", "last_name": "Pietrzak" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/CCG+25a/", "id": 1000, "bibtype": "inproceedings", "abbr": "CCG+25a", "address": "Toulouse, France", "date": "2025-11-03", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'25 - 36e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3689050.3707690", "title": "The not-so-masochist teapot", "url": "https://hal.science/hal-04854127", "abstract": "The not-so-masochist teapot challenges the human capacity to go beyond the first impression. The not-so-masochist teapot appears as an unusable teapot at first with its spout above the handle. Yet, if participants go beyond this first impression and start to make tea, the spout rotate to a usable location, i.e. opposite of the handle, just in time for the tea to be ready. With the not-so-masochist teapot we are questioning the unusability of objects and the capacity to go beyond first impressions.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Jasper", "last_name": "Flügge" }, "3": { "first_name": "Eric", "last_name": "Chaffangeon" }, "4": { "first_name": "Katrin", "last_name": "Wolf" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/CFC+25a/", "id": 996, "bibtype": "inproceedings", "abbr": "CFC+25a", "address": "Bordeaux, France", "date": "2025-02-04", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Nineteenth International Conference on Tangible, Embedded, and Embodied Interaction" }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3765712.3765726", "title": "Sensitive Pen : An Open-Source And Low-Cost Digital Pen For Diagnosing Children With Dysgraphia", "url": "https://hal.science/hal-05294824", "abstract": "Handwriting is a complex motor activity. Handwriting disorders, known as dysgraphia, have a considerable impact on an individual’s academic and professional success. To facilitate the diagnosis of dysgraphia, we propose an open-source and low-cost digital pen, called Sensitive Pen. In a first study, we evaluate its ease of use, usefulness and acceptability with psychomotor therapists. In a second study, we are testing the Sensitive Pen with children aged 6-10 to assess its ability to diagnose dysgraphia using machine learning. Our results show that psychomotor therapists would be ready and interested in using such a tool. Moreover, on a sample of 20 children, using the pen’s cinematic and angle data, we obtained a true positive rate, i.e. of dysgraphic children, of 100%, and an overall accuracy of 65%.", "authors": { "1": { "first_name": "Ana", "last_name": "Phelippeau" }, "2": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "3": { "first_name": "Adrien", "last_name": "Husson" }, "4": { "first_name": "Joël", "last_name": "Chevrier" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/PCH+25a/", "id": 997, "bibtype": "inproceedings", "abbr": "PCH+25a", "address": "Toulouse, France", "date": "2025-11-03", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'25 - 36e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3765712.3765723", "title": "Couplage et contrôle de points de vue en réalité mixte collaborative : la notation graphique (Point de Vue)*- PV*", "url": "https://hal.science/hal-05294806", "abstract": "Mixed Reality (MR) is increasingly being studied for synchronous remote collaboration in various fields, from industrial maintenance to education. It allows to combine virtual reality, augmented reality, and 2D interfaces to visualize and share real and virtual content. The notion of coupling of collaborative activities, central to studies of Computer-Supported Cooperative Work (CSCW) applications, takes various forms with MR due to its 3D and immersive characteristics, as well as the heterogeneity of interactive technologies. To cope with this diversity, we propose a graphical notation, PV*, that precisely describes the coupling relationships between multiple viewpoints on physical or virtual views, as well as their control by collaborators. We study the descriptive power of PV* by describing several collaborative situations from the literature and discuss its generative power by exploring several design possibilities described with PV*.", "authors": { "1": { "first_name": "Thomaz", "last_name": "Fèvre" }, "2": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "3": { "first_name": "Cédric", "last_name": "Fleury" }, "4": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/FCF+25a/", "id": 998, "bibtype": "inproceedings", "abbr": "FCF+25a", "address": "Toulouse, France", "date": "2025-11-04", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'25 - 36e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "Viewpoint control techniques for mixed reality collaboration generated with the(Point of View)* - PV* graphical notation", "url": "https://hal.science/hal-05312071", "abstract": "Mixed Reality (MR) is increasingly being used for synchronous remote collaboration in various fields, from industrial maintenance to education. It allows to combine virtual reality, augmented reality, and 2D interfaces to visualize and share real and virtual content. The notion of coupling of collaborative activities, central to studies of Computer-Supported Cooperative Work (CSCW) applications, takes various forms with MR due to its 3D and immersive characteristics, as well as the heterogeneity of interactive technologies. To copewith this diversity, we have proposed a graphical notation, PV*, that precisely describes the coupling relationships between multiple viewpoints on physical or virtual views, as well as their control by collaborators. In this demonstration, we present different viewpoint control techniques designed with PV*, illustrating the generative power of our notation.", "authors": { "1": { "first_name": "Thomaz", "last_name": "Fèvre" }, "2": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "3": { "first_name": "Cédric", "last_name": "Fleury" }, "4": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/FCF+25b/", "id": 999, "bibtype": "inproceedings", "abbr": "FCF+25b", "address": "Toulouse, France", "date": "2025-11-03", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'25 : 36e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "en", "publisher": "Elsevier", "type_publi": "irevcomlec", "bibtype": "article", "title": "Microgesture + Grasp: A journey from human capabilities to interaction with microgestures", "url": "https://hal.science/hal-04801105", "abstract": "Microgestures, i.e. fast and subtle finger movements, have shown a high potential for ubiquitous interaction. However, work to-date either focuses on grasp contexts (holding an object) or on the free-hand context (no held object). These two contexts influence the microgestures feasibility. Researchers have created sets of microgesture feasible across the entire taxonomy of everyday grasps. However, those sets include a limited number of microgestures as compared to those for the free-hand context, for which microgestures are distinguished according to fine characteristics such as the part of the finger being touched or the number of fingers used. We present the first study on microgestures feasibility across free-hand and grasp contexts. We also study, for the first time, the use of finer characteristics of a microgesture in grasp context: area surface. Then, we present a set of rules to determine guess the feasibility of a microgesture in a given context without the need of doing time-consuming feasibility studies. In both studies, some microgesture were not feasible across all considered contexts. We are therefore exploring different ways of defining a set of microgestures compatible with free-hand and grasping contexts.", "year": 2025, "uri": "http://iihm.imag.fr/publication/CGN25a/", "id": 972, "volume": 195, "abbr": "CGN25a", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "date": "2025-01-01", "type": "Revues internationales avec comité de lecture", "journal": "International Journal of Human-Computer Studies" }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "µPoly: a Toolkit to Design Microgesture Interaction", "url": "https://hal.science/hal-04499957", "abstract": "Numerous microgesture recognition systems have been proposed. These systems differ in shape, sensor types and recognition algorithms. However, in the absence of a microgesture event standard and a toolbox for microgesture interaction, it is difficult for an interaction designer to easily and quickly test different recognition systems and microgesture sets. We propose μPoly, a toolbox based on μGlyph, a notation for describing microgestures, to address this problem.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Aurélien", "last_name": "Conil" }, "3": { "first_name": "Alix", "last_name": "Goguey" }, "4": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2024, "uri": "http://iihm.imag.fr/publication/CCG+24a/", "id": 987, "bibtype": "inproceedings", "abbr": "CCG+24a", "address": "Paris, France", "date": "2024-03-25", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'24 - 35e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3500866.3516371", "title": "µGlyph: a Microgesture Notation", "url": "https://hal.science/hal-04026125", "abstract": "In the active field of hand microgestures, microgesture descriptions are typically expressed informally and are accompanied by images, leading to ambiguities and contradictions. An important step in moving the field forward is a rigorous basis for precisely describing, comparing, and analyzing microgestures. Towards this goal, we propose µGlyph, a hybrid notation based on a vocabulary of events inspired by finger biomechanics. First, we investigate the expressiveness of µGlyph by building a database of 118 microgestures extracted from the literature. Second, we experimentally explore the usability of µGlyph. Participants correctly read and wrote µGlyph descriptions 90% of the time, as compared to 46% for conventional descriptions. Third we present tools that promote µGlyph usage, including a visual editor with LaTeX export. We finally describe how µGlyph can guide research on designing, developing, and evaluating microgesture interaction. Results demonstrate the strong potential of µGlyph to establish a common ground for microgesture research.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/CGN23a/", "pages": "3:1-13", "bibtype": "inproceedings", "id": 948, "abbr": "CGN23a", "address": "Hamburg, Germany", "date": "2023-04-23", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems" }, { "lang": "en", "publisher": "IEEE", "doi": "https://doi.org/10.1109/ISMAR59233.2023.00095", "title": "3D Selection in Mixed Reality: Designing a Two-Phase Technique To Reduce Fatigue", "url": "https://hal.science/hal-04297966", "abstract": "Mid-air pointing is widely used for 3D selection in Mixed Reality but leads to arm fatigue. In a first exploratory experiment we study a two-phase design and compare modalities for each phase: mid-air gestures, eye-gaze and microgestures. Results suggest that eye-gaze and microgestures are good candidates to reduce fatigue and improve interaction speed. We therefore propose two 3D selection techniques: Look&MidAir and Look&Micro. Both techniques include a first phase during which users control a cone directed along their eye-gaze. Using the flexion of their non-dominant hand index finger, users pre-select the objects intersecting this cone. If several objects are pre-selected, a disambiguation phase is performed using direct mid-air touch for Look&MidAir or thumb to finger microgestures for Look&Micro. In a second study, we compare both techniques to the standard raycasting technique. Results show that Look&MidAir and Look&Micro perform similarly. However they are 55% faster, perceived easier to use and are less tiring than the baseline. We discuss how the two techniques could be combined for greater flexibility and for object manipulation after selection.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/CGN23b/", "pages": "800-809", "bibtype": "inproceedings", "id": 955, "abbr": "CGN23b", "address": "Sydney (Australia), Australia", "date": "2023-10-16", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "2023 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "type_publi": "icolcomlec" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/10.1145/3604272", "title": "Studying the Visual Representation of Microgestures", "url": "https://hal.science/hal-04193374", "abstract": "The representations of microgestures are essentials for researchers presenting their results through academic papers and system designers proposing tutorials to novice users. However, those representations remain disparate and inconsistent. As a first attempt to investigate how to best graphically represent microgestures, we created 21 designs, each depicting static and dynamic versions of 4 commonly used microgestures (tap, swipe, flex and hold). We first studied these designs in a quantitative online experiment with 45 participants. We then conducted a qualitative laboratory experiment in Augmented Reality with 16 participants. Based on the results, we provide design guidelines on which elements of a microgesture should be represented and how. In particular, it is recommended to represent the actuator and the trajectory of a microgesture. Also, although preferred by users, dynamic representations are not considered better than their static counterparts for depicting a microgesture and do not necessarily result in a better user recognition", "authors": { "1": { "first_name": "Vincent", "last_name": "Lambert" }, "2": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "3": { "first_name": "Alix", "last_name": "Goguey" }, "4": { "first_name": "Sylvain", "last_name": "Malacria" }, "5": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2023, "uri": "http://iihm.imag.fr/publication/LCG+23a/", "id": 961, "bibtype": "inproceedings", "abbr": "LCG+23a", "address": "Athens, Greece", "date": "2023-09-25", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "ACM International Conference on Mobile Human-Computer Interaction (MobileHCI 2023)" }, { "lang": "fr", "type_publi": "these", "title": "Understanding and designing microgesture interaction", "url": "https://hal.science/tel-04359801", "abstract": "Over the last three decades, some of the objects we use in our daily life have gradually become computers. Our habits are changing with these mutations, and it is now not uncommon that we interact with these computers while performing other tasks, e.g. checking our GPS position on our smartwatch while biking. Over the last ten years, a new interaction modality has emerged to meet these needs, hand microgestures. Hand microgestures, simplified to microgestures, are fast and subtle movements of the fingers. They enable interaction in parallel with a main task, as they are quick and can be performed while holding an object. However, as it is a recent modality, the field of research still lacks structure and sometimes coherence. For instance, there is no convention for naming or describing microgestures, which can lead to terminological inconsistencies between different studies. Moreover, the literature focuses mainly on how to build systems to sense and recognize microgestures. Thus, few studies examine the expected properties of microgestures, such as speed or low impact on physical fatigue in certain contexts of use. As a result, this thesis focuses on the study of microgestures, from their description to their application in a specific field, i.e. Augmented Reality (AR), as well as their sensing and recognition.Our scientific approac is comprised of three steps. In the first step, we focus on the space of possibilities. After a literature review to highlight the diversity of microgestures and terminological issues, we present μGlyph, a notation to describe microgestures. Next, we present a user study to understand the constraints imposed when holding an object on the feasibility of microgestures. The results of this study were used to create a set of three rules to determine the feasibility of microgestures in different contexts, i.e. different grasps. For ease of use, we reused μGlyph to provide a visual description of these rules. Finally, we study different ways of making a set of microgestures compatible with many contexts, i.e. that each microgesture in the set is feasible in all contexts.With the space of possibilities defined, we focus on the design of systems for sensing and recognizing microgestures. After a review of such systems in the literature, we present our easily reproducible sensing systems that we implemented, resulting in two gloves. We then present a user study on the impact of wearing these gloves on the feasibility of microgestures. Our results suggest that our gloves have little impact on the feasibility of microgestures. Next, we present a more comprehensive system that recognizes both microgestures and contexts. Our studies on recognition rates suggest that our system is usable for microgesture detection, with a recognition rate of 94%, but needs to be improved for context recognition, with a rate of 80%. Finally, we present a proof-of-concept of a modular glove and a recognition system based on μGlyph to enable the unification of microgesture sensing systems.Our final step is then dedicated to interaction techniques based on microgestures. We focus on the properties of microgestures for 3D selection in AR. We have designed two 3D selection techniques based on eye-gaze and microgestures for interaction with low fatigue. Our results suggest that the combination of eye-gaze and microgesture enables fast interaction while minimizing fatigue, compared to the commonly used virtual pointer. We conclude with an extension of our techniques to integrate 3D object manipulation in AR.", "year": 2023, "uri": "http://iihm.imag.fr/publication/C23a/", "bibtype": "phdthesis", "abbr": "C23a", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" } }, "date": "2023-12-18", "type": "Thèses et habilitations", "id": 959 }, { "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "µGlyph: a Graphical Notation to Describe Microgestures", "url": "https://hal.archives-ouvertes.fr/hal-03655062", "abstract": "Hand microgestures define a promising modality for rapid and eye-free interaction while holding or not an object. Studied in many contexts, e.g. in virtual/augmented reality, there is no consensual definition of a microgesture, nor a notation to accurately describe a microgesture. The absence of a reference framework leads to ambiguities in the naming or description of microgestures. We propose µGlyph, a graphical notation to precisely describe hand microgestures with different level of abstraction. This notation is based on a vocabulary of elementary events from the biomechanics of the hand. Each event is associated with a context of execution as well as optionnal characteristics such as the finger that makes the micromovement. We study the descriptive power of the µGlyph notation by positioning it with respect to the existing design axes and by describing the most common microgestures of the literature.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/CGN22a/", "id": 941, "bibtype": "inproceedings", "abbr": "CGN22a", "address": "Namur, Belgium", "date": "2022-04-05", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "33ème conférence internationale francophone sur l’Interaction Humain-Machine (IHM'22)" }, { "lang": "en", "publisher": "Société Informatique de France", "type_publi": "autre", "title": "Projet ANR (2015-2018) « Autour du plan 2D »", "url": "https://hal.archives-ouvertes.fr/hal-03655986", "journal": "1024 : Bulletin de la Société Informatique de France", "year": 2022, "number": 19, "uri": "http://iihm.imag.fr/publication/CCC+22a/", "bibtype": "unpublished", "abbr": "CCC+22a", "authors": { "1": { "first_name": "Julien", "last_name": "Castet" }, "2": { "first_name": "Florent", "last_name": "Cabric" }, "3": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "4": { "first_name": "Dominique", "last_name": "Cunin" }, "5": { "first_name": "Emmanuel", "last_name": "Dubois" }, "6": { "first_name": "Elio", "last_name": "Keddisseh" }, "7": { "first_name": "Yann", "last_name": "Laurillau" }, "8": { "first_name": "Laurence", "last_name": "Nigay" }, "9": { "first_name": "Michael", "last_name": "Ortega" }, "10": { "first_name": "Gary", "last_name": "Perelman" }, "11": { "first_name": "Carole", "last_name": "Plasson" }, "12": { "first_name": "Mathieu", "last_name": "Raynal" }, "13": { "first_name": "Houssem", "last_name": "Saidi" }, "14": { "first_name": "Marcos", "last_name": "Serrano" } }, "date": "2022-04-01", "type": "Autres publications", "id": 933 }]);