publications([{ "lang": "fr", "type_publi": "icolcomlec", "doi": "https://doi.org/None", "title": "µGlyph: a Graphical Notation to Describe Microgestures", "url": "https://hal.archives-ouvertes.fr/hal-03655062", "abstract": "Hand microgestures define a promising modality for rapid and eye-free interaction while holding or not an object. Studied in many contexts, e.g. in virtual/augmented reality, there is no consensual definition of a microgesture, nor a notation to accurately describe a microgesture. The absence of a reference framework leads to ambiguities in the naming or description of microgestures. We propose µGlyph, a graphical notation to precisely describe hand microgestures with different level of abstraction. This notation is based on a vocabulary of elementary events from the biomechanics of the hand. Each event is associated with a context of execution as well as optionnal characteristics such as the finger that makes the micromovement. We study the descriptive power of the µGlyph notation by positioning it with respect to the existing design axes and by describing the most common microgestures of the literature.", "authors": { "1": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/CGN22a/", "id": 941, "bibtype": "inproceedings", "abbr": "CGN22a", "address": "Namur, Belgium", "date": "2022-04-05", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "33ème conférence internationale francophone sur l’Interaction Humain-Machine (IHM'22)" }, { "lang": "en", "publisher": "ACM", "doi": "https://doi.org/10.1145/3536221.3556589", "title": "Keep in Touch: Combining Touch Interaction with Thumb-to-Finger µGestures for People with Visual Impairment", "url": "https://hal.archives-ouvertes.fr/hal-03778999", "abstract": "We present a set of 8 thumb-to-finger microgestures (TTF μGestures) that can be used as an additional modality to enrich touch interaction in eyes-free situations. TTF μGestures possess characteristics especially suited for people with visual impairment (PVI). They have never been studied specifically for PVI to improve accessibility of touchscreen devices. We studied a set of 33 common TTF μGestures to determine which are feasible and usable without seeing while the index is touching a surface. We found that the constrained position of the hand and the absence of vision prevent participants from being able to efficiently target a specific phalanx. Thus, we propose a set of 8 TTF μGestures (6 taps, 2 swipes) balancing resiliency (i.e., low error-rate) and expressivity (i.e., number of possible inputs): as a dimension combined with the touch modality, it would realistically multiply the touch command space by eight. Within our set of 8 TTF μGestures, we chose a subset of 4 μGestures (2 taps and 2 swipes) and implemented an exploration scenario of an audio-tactile map with a raised-line overlay on a touchscreen and tested it with 7 PVI. Their feedback was positive on the potential benefits of TTF μGestures in enhancing the touch modality and supporting PVI interaction with touchscreen devices", "authors": { "1": { "first_name": "Gauthier", "last_name": "Faisandaz" }, "2": { "first_name": "Alix", "last_name": "Goguey" }, "3": { "first_name": "Christophe", "last_name": "Jouffrais" }, "4": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/FGJ+22a/", "pages": "105–116", "bibtype": "inproceedings", "id": 945, "abbr": "FGJ+22a", "address": "Bengaluru (Bangalore), India", "date": "2022-11-07", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "24th ACM International Conference on Multimodal Interaction (ICMI 2022)", "type_publi": "icolcomlec" }, { "lang": "en", "publisher": "IEEE", "doi": "https://doi.org/10.1109/ISMAR55827.2022.00062", "title": "Selection Techniques for 3D Extended Desktop Workstation with AR HMD", "url": "https://hal.science/hal-03928037", "abstract": "Extending a standard desktop workstation (i.e. a screen, a mouse, a keyboard) with virtual scenes displayed on an Augmented Reality Head-Mounted Display (AR HMD) offers many identified advantages including limited physical space requirements, very large and flexible display spaces, and 3D stereoscopic views. While the technologies become more mainstream, the remaining open question is how to interact with such hybrid workstations that combine 2D views displayed on a physical monitor and 3D views displayed on a HoloLens. For a selection task, we compared mouse-based interaction (standard for 2D desktop workstations) and direct touch interaction in mid-air (standard for 3D AR) while considering different positions of the 3D scene according to a physical monitor. To extend mouse-based selection to 3D views, we experimentally explored different interaction metaphors where the mouse cursor moves either on a horizontal or a vertical plane in a 3D virtual scene. To check for ecological validity of our results, we conducted an additional study focusing on interaction with a 2D/3D Gapminder dataset visualization. The results show 1) that the mouse-based interaction, as compared to direct touch interaction in mid-air, is easy and efficient, 2) that using a vertical plane placed in front of the 3D virtual scene to mimic the double screen metaphor outperforms other interaction techniques and 3) that flexibility is required to allow users to choose the selection techniques and to position the 3D virtual scene relative to the physical monitor. Based on these results, we derive interaction design guidelines for hybrid workstations.", "authors": { "1": { "first_name": "Carole", "last_name": "Plasson" }, "2": { "first_name": "Renaud", "last_name": "Blanch" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/PBN22a/", "pages": "460-469", "bibtype": "inproceedings", "id": 947, "abbr": "PBN22a", "address": "Singapore, Singapore", "date": "2022-10-17", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "2022 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)", "type_publi": "icolcomlec" }, { "lang": "en", "publisher": "Société Informatique de France", "type_publi": "autre", "title": "Projet ANR (2015-2018) « Autour du plan 2D »", "url": "https://hal.archives-ouvertes.fr/hal-03655986", "journal": "1024 : Bulletin de la Société Informatique de France", "year": 2022, "number": 19, "uri": "http://iihm.imag.fr/publication/CCC+22a/", "bibtype": "unpublished", "abbr": "CCC+22a", "authors": { "1": { "first_name": "Julien", "last_name": "Castet" }, "2": { "first_name": "Florent", "last_name": "Cabric" }, "3": { "first_name": "Adrien", "last_name": "Chaffangeon Caillet" }, "4": { "first_name": "Dominique", "last_name": "Cunin" }, "5": { "first_name": "Emmanuel", "last_name": "Dubois" }, "6": { "first_name": "Elio", "last_name": "Keddisseh" }, "7": { "first_name": "Yann", "last_name": "Laurillau" }, "8": { "first_name": "Laurence", "last_name": "Nigay" }, "9": { "first_name": "Michael", "last_name": "Ortega" }, "10": { "first_name": "Gary", "last_name": "Perelman" }, "11": { "first_name": "Carole", "last_name": "Plasson" }, "12": { "first_name": "Mathieu", "last_name": "Raynal" }, "13": { "first_name": "Houssem", "last_name": "Saidi" }, "14": { "first_name": "Marcos", "last_name": "Serrano" } }, "date": "2022-04-01", "type": "Autres publications", "id": 933 }, { "lang": "en", "publisher": "Springer International Publishing", "doi": "https://doi.org/10.1007/978-3-030-98388-8_18", "title": "Teaching HCI Engineering: Four Case Studies", "url": "https://hal.archives-ouvertes.fr/hal-03641764", "abstract": "The paper presents the work carried out at the HCI Engineering Education workshop, organised by IFIP working groups 2.7/13.4 and 13.1. It describes four case studies of projects and exercises used in Human-Computer Interaction Engineering courses. We propose a common framework for presenting the case studies and describe the four case studies in detail. We then draw conclusions on the differences between the presented case studies that highlight the diversity and multidisciplinary aspects to be taught in a Human-Computer Interaction Engineering course. As future work, we plan to create a repository of case studies as a resource for teachers.", "year": 2022, "uri": "http://iihm.imag.fr/publication/CCM+22a/", "pages": "195-210", "bibtype": "unpublished", "id": 943, "abbr": "CCM+22a", "authors": { "1": { "first_name": "Sybille", "last_name": "Caffiau" }, "2": { "first_name": "José", "last_name": "Campos" }, "3": { "first_name": "Célia", "last_name": "Martinie" }, "4": { "first_name": "Laurence", "last_name": "Nigay" }, "5": { "first_name": "Philippe", "last_name": "Palanque" }, "6": { "first_name": "Lucio Davide", "last_name": "Spano" } }, "date": "2022-03-20", "type": "Autres publications", "booktitle": "Sense, Feel, Design : INTERACT 2021 IFIP TC 13 Workshops, Bari, Italy, August 30 – September 3, 2021, Revised Selected Papers", "type_publi": "autre" }, { "lang": "en", "bibtype": "unpublished", "type_publi": "autre", "title": "InSarViz, an open source interactive visualization tool for satellite SAR interferometry", "url": "https://express.converia.de/frontend/index.php?page_id=22746&additions_conferenceschedule_action=detail&additions_conferenceschedule_controller=paperList&pid=67245&hash=7db2286d651fb8f341cfa30b2afdbf5", "abstract": "Satellite SAR interferometry (InSAR) is a well-established technique in Earth Observation that is able to monitor ground displacement with a high precision (up to mm/year), combining high spatial resolution (up to a few m) and large coverage capabilities (up to continental scale) with a temporal resolution from a few days to a few weeks. It is used to study a wide range of phenomena (e.g. earthquakes, landslides, permafrost, volcanoes, glaciers dynamics, subsidence, building and infrastructure deformation, etc.).\r\n\r\nFor several reasons (data availability, non-intuitive radar image geometry, complexity of the processing, etc.), InSAR has long remained a niche technology and few free open-source tools have been dedicated to it compared to the widely-used multi-purposes optical imagery. Most tools are focused on data processing (e.g. ROI_PAC, DORIS, GMTSAR, StaMPS, ISCE, NSBAS, OTB, SNAP, LICSBAS), but very few are tailored to the specific visualization needs of the different InSAR products (interferograms, network of interferograms, datacube of InSAR time-series). Similarly, generic remote-sensing or GIS software like QGIS are also limited when used with InSAR data. Some visualization tools with dedicated InSAR functionality like the pioneer MDX software (provided by the Jet Propulsion Lab, https://software.nasa.gov/software/NPO-35238-1) were designed to visualize a single radar image or interferogram, but not large datasets. The ESA SNAP toolbox also offers nice additional features to switch from radar to ground geometry.\r\n\r\nHowever, new spatial missions, like the Sentinel-1 mission of the European program COPERNICUS with a systematic background acquisition strategy and an open data policy, provide unprecedented access to massive SAR data sets. Those new datasets allow to generate a network of thousands of interferograms over a same area, from which time-serie analysis results in spatio-temporal data cube: a layer of this data cube is a 2D map that contains the displacement of each pixel of an image relative to the same pixel in the reference date image. A typical data cube size is 4000x6000x200, where 4000x6000 are the spatial dimensions (pixels) and 200 is a typical number of images taken since the beginning of the mission (2014). The aforementioned tools are not suited to manage such large and multifaceted datasets.\r\nIn particular, fluid and interactive data visualization of large, multidimensional datasets is non-trivial. If data cube visualization is a more generic problem and an active research topic in EO and beyond, some specifics of InSAR (radar geometry, wrapped phase, relative measurement in space and in time, multiple types of products useful for interpretation…) call for a new, dedicated visualization tool.\r\nWe started the InSARviz project with a survey of expert users in the French InSAR community covering different application domains (earthquake, volcano, landslides), and we identified a strong need for an application that allows to navigate interactively in spatio-temporal data cubes.\r\n\r\nSome of the requirements for the tools are generic (e.g., handling of big dataset, flexibility with respect to the input formats, smooth and user-driven navigation along the cube dimensions) and other more specific (relative comparison between points at different location, selection of a set of pixels and the simultaneous vizualisation of their behavior in both time and space, visualization of the data in radar and ground geometries…)\r\n\r\nTo meet those needs we designed the InSARViz application with the following characteristics:\r\n- A standalone application that takes advantage of the hardware (i.e. GPU, SSD hard drive, capability to run on cluster as a standalone application). We choose the Python language for its well-known advantages (interpreted language, readable, large community) and we use QT for the graphical user interface and OpenGL for the hardware graphical acceleration.\r\n- Using the GDAL library to load the data. This will allow to handle all the input formats that are managed by GDAL (e.g. GeoTIFF). Moreover, we designed a plug-in strategy that allows users to easily manage their own custom data formats.\r\n- We take advantage of Python/QT/OpenGL stack that ensures efficient user interaction with the data. For example, the temporal displacement profile of a point is drawn on the fly while the mouse is hovering over the corresponding pixel. The “on the fly” feature allows the user to identify points of interest. The user can then enter another mode in which they can select a set of points. The application will then draw the temporal profiles of the selected points, allowing a comparison of their behavior in time. This feature can be used when studying earthquakes as users can select points across a fault, allowing to have a general view of the behavior of the phenomenon at different places and times.\r\n- Multiple windows design allows the user to visualize at the same time data in radar geometry and in standard map projection, and also to localize a zoomed-in area on the global map. A layer management system is provided to quickly access files and their metadata.\r\n- Visualization tools commonly use aggregation methods (like e.g. smoothing, averaging, clustering) to drastically accelerate image display, but they thus induce observation and interpretation biases that are detrimental to the user. To avoid those bias, the tool focuses on keeping true to the original data and allowing the user to customize the rendering manually (colorscale, outliers selection, level-of-detail)\r\nIn our road map, we also plan to develop a new functionality to visualize interactively a network of interferograms.\r\n\r\nWe plan to demonstrate the capabilities of the InSARviz tool during the symposium.\r\n \r\nThe InSARviz project was supported by CNES, focused on SENTINEL1, and CNRS. ", "authors": { "1": { "first_name": "Margaux", "last_name": "Mouchené" }, "2": { "first_name": "Renaud", "last_name": "Blanch" }, "3": { "first_name": "Erwan", "last_name": "Pathier" }, "4": { "first_name": "Franck", "last_name": "Thollard" } }, "year": 2022, "uri": "http://iihm.imag.fr/publication/MBP+22a/", "id": 946, "note": "Poster présenté à :\r\n", "abbr": "MBP+22a", "address": "Bonn, Germany", "date": "2022-05-23", "document": "http://iihm.imag.fr/publs/2022/2022_ESA_LPS.pdf", "type": "Autres publications", "booktitle": "European Spatial Agency Living Planet Symposium" }, { "lang": "en", "publisher": "Springer International Publishing", "doi": "https://doi.org/10.1007/978-3-030-98388-8_24", "title": "Teaching Human-Computer Interaction in the Software Engineering Master's Degree Program of the University Grenoble Alpes", "url": "https://hal.archives-ouvertes.fr/hal-03685075", "abstract": "The training of the Master's degree in software engineering of the University Grenoble Alpes covers foundational courseware in computer science (programming, complexity, database, networks, interactive systems) during the first year and more advanced engineering courses (in terms of cloud computing, large-scale data management, architecture, program testing and verification) during the second year. This paper focuses on two HCI courses as part of this curriculum in software engineering, and describes the content and the pedagogical approach we implemented for teaching HCI to computer science students. The paper explains why the authors adopt a tool-based approach for the first-year course on engineering HCI and a project-based approach with experimental evaluation for the second-year course on advanced interaction including multimodality.", "year": 2022, "uri": "http://iihm.imag.fr/publication/CN22a/", "pages": "270-278", "bibtype": "unpublished", "id": 944, "abbr": "CN22a", "authors": { "1": { "first_name": "Sybille", "last_name": "Caffiau" }, "2": { "first_name": "Laurence", "last_name": "Nigay" } }, "date": "2022-03-20", "type": "Autres publications", "booktitle": "INTERACT 2021: Sense, Feel, Design", "type_publi": "autre" }]);