publications([{ "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "Microgesture Interaction in Context: demonstrations of the ANR MIC project Interaction par microgeste en contexte : démonstrations du projet ANR MIC", "url": "https://hal.science/hal-05311866", "abstract": "We present demonstrations from the ANR MIC project. MIC aims at studying and promoting microgesture-based interaction by putting it in practice in use situations. The demontrations show interaction techniques based on microgestures or on the combination of microgestures with another modality including haptic feedback as well as mechanisms that support discoverability and learnability of microgestures. The demonstrations illustrate three different contexts of use: 1) Augmented/Virtual Reality because microgesture interaction does not require us to hold any external device and is less physically demanding than mid-air interaction. 2) Car driving because microgestures may be performed in parallel with other tasks, they only require a few seconds and only one hand. 3) Eyes-free interaction (i.e. users with visual impairments) because users can perform microgestures by relying on proprioception only, without looking at their hand.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Aurélien", "last_name": "Conil" }, "3": { "first_name": "Alix", "last_name": "Goguey" }, "4": { "first_name": "Vincent", "last_name": "Lambert" }, "5": { "first_name": "Laurence", "last_name": "Nigay" }, "6": { "first_name": "Charles", "last_name": "Bailly" }, "7": { "first_name": "Julien", "last_name": "Castet" }, "8": { "first_name": "Michael", "last_name": "Ortega" }, "9": { "first_name": "Zoé", "last_name": "Lacroux" }, "10": { "first_name": "Céline", "last_name": "Lemercier" }, "11": { "first_name": "Pierre-Vincent", "last_name": "Paubel" }, "12": { "first_name": "Sandra", "last_name": "Bardot" }, "13": { "first_name": "Christophe", "last_name": "Jouffrais" }, "14": { "first_name": "Suliac", "last_name": "Lavenant" }, "15": { "first_name": "Sylvain", "last_name": "Malacria" }, "16": { "first_name": "Thomas", "last_name": "Pietrzak" } }, "year": 2025, "uri": "http://iihm.imag.fr/publication/CCG+25a/", "id": 1000, "bibtype": "inproceedings", "abbr": "CCG+25a", "address": "Toulouse, France", "date": "2025-11-03", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "IHM'25 - 36e Conférence Internationale Francophone sur l'Interaction Humain-Machine" }, { "lang": "en", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "Studying the Perception of Vibrotactile Haptic Cues on the Finger, Hand and Forearm for Representing Microgestures", "url": "https://inria.hal.science/hal-04680841", "abstract": "We explore the use of vibrotactile haptic cues for representing microgestures. We built a four-axes haptic device for providing vibrotactile cues mapped to all four fingers. We also designed six patterns, inspired by six most commonly studied microgestures. The patterns can be played independently on each axis of the device. We ran an experiment with 36 participants testing three different device locations (fingers, back of the hand, and forearm) for pattern and axis recognition. For all three device locations, participants interpreted the patterns with similar accuracy. We also found that they were better at distinguishing the axes when the device is placed on the fingers. Hand and Forearm device locations remain suitable alternatives but involve a greater trade-off between recognition rate and expressiveness. We report the recognition rates obtained for the different patterns, axes and their combinations per device location. These results per device location are important, as constraints of various kinds, such as hardware, context of use and user activities, influence device location. We discuss this choice of device location by improving literature microgesture-based scenarios with haptic feedback or feedforward.", "authors": { "1": { "first_name": "Suliac", "last_name": "Lavenant" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Sylvain", "last_name": "Malacria" }, "4": { "first_name": "Laurence", "last_name": "Nigay" }, "5": { "first_name": "Thomas", "last_name": "Pietrzak" } }, "year": 2024, "uri": "http://iihm.imag.fr/publication/LGM+24a/", "id": 978, "bibtype": "inproceedings", "abbr": "LGM+24a", "address": "Bellevue, WA, United States", "date": "2024-10-21", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2024)" }, { "lang": "en", "publisher": "ACM", "doi": "https://doi.org/10.1145/3447526.3472038", "title": "Understanding User Strategies When Touching Arbitrary Shaped Objects", "url": "https://hal.inria.fr/hal-03272566", "abstract": "We investigate how users touch arbitrary shapes. First, we performed semi-structured interviews with a fifteen-shape set as prop to identify touch strategies. Results reveal four main potential touch strategies, from which we devised nine mathematical candidate models. We investigate the ability of these models to predict human behaviour in a controlled experiment. We found that the center of a shape's bounding box best approximates a user's target location when touching arbitrary shapes. Our findings not only invite designers to use a larger variety of shapes, but can also be used to design touch interaction adapted to user behaviour using our model. As an example, they are likely to be valuable for the creation of applications exposing shapes of various complexities, like drawing applications.", "authors": { "1": { "first_name": "Quentin", "last_name": "Roy" }, "2": { "first_name": "Simon", "last_name": "Perrault" }, "3": { "first_name": "Katherine", "last_name": "Fennedy" }, "4": { "first_name": "Thomas", "last_name": "Pietrzak" }, "5": { "first_name": "Anne", "last_name": "Roudaut" } }, "year": 2021, "uri": "http://iihm.imag.fr/publication/RPF+21a/", "pages": "9:11", "bibtype": "inproceedings", "id": 937, "abbr": "RPF+21a", "address": "Toulouse, France", "date": "2021-09-27", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of the ACM International Conference on Mobile Human-Computer Interaction (MobileHCI 2021)", "type_publi": "icolcomlec" }]);