publications([{ "lang": "en", "publisher": "IEEE Computer Society", "doi": "http://dx.doi.org/10.1109/ISMAR-AMH.2012.6483986", "title": "Interactions and systems for augmenting a live dance performance", "abstract": "The context of this work is to develop, adapt and integrate augmented reality related tools to enhance the emotion involved in cultural performances. Part of the work was dedicated to augmenting a stage in a live performance, with dance as an application case. In this paper, we present a milestone of this work, an augmented dance show that brings together several tools and technologies that were developed over the project's lifetime. This is the result of mixing an artistic process with scientific research and development. This augmented show brings to stage issues from the research fields of Human-Machine Interaction (HMI) and Augmented Reality (AR). Virtual elements are added on stage (visual and audio) and the dancer is able to interact with them in real-time, using different interaction techniques. The originality of this work is threefold. Firstly, we propose a set of movement-based interaction techniques that can be used independently on stage or in another context. In this set, some techniques are direct, while others go through a high level of abstraction. Namely, we performed movement-based emotion recognition on the dancer, and used the recognized emotions to generate emotional music pieces and emotional poses for a humanoid robot. Secondly, those interaction techniques rely on various interconnected systems that can be reassembled. We hence propose an integrated, interactive system for augmenting a live performance, a context where system failure is not tolerated. The final system can be adapted following the artist's preferences. Finally, those systems were validated through an on field experiment - the show itself - after which we gathered and analyzed the feedback from both the audience and the choreographer.", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" }, "4": { "first_name": "Jean-Baptiste", "last_name": "de la Rivière" }, "5": { "first_name": "Jean-Claude", "last_name": "Martin" }, "6": { "first_name": "Matthieu", "last_name": "Courgeon" }, "7": { "first_name": "Myriam", "last_name": "Desainte-Catherine" }, "8": { "first_name": "Emmanuel", "last_name": "Orvain" }, "9": { "first_name": "Vincent", "last_name": "Girondel" }, "10": { "first_name": "Gaël", "last_name": "Domemger" } }, "year": 2012, "uri": "http://iihm.imag.fr/publication/CCN+12a/", "pages": "29-38", "bibtype": "inproceedings", "id": 606, "abbr": "CCN+12a", "address": "Atlanta, USA", "date": "2012-11-05", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "11th IEEE International Symposium on Mixes and Augmented Reality, ISMAR 2012, Atlanta USA, November 5-8 2012", "type_publi": "icolcomlec" }, { "lang": "fr", "publisher": "ACM New York, NY, USA", "type_publi": "colcomlec", "title": "eMotion : un outil pour personnaliser la reconnaissance d’émotions ", "abstract": "L’expression émotionnelle d’un individu est influencée par sa personnalité bien sûr, mais également par sa position, les contraintes qui s’exercent sur lui ou l’agencement de son environnement de travail. L’expression émotionnelle par le mouvement est particulièrement sensible à ces divers facteurs. Il est donc nécessaire de pouvoir paramétrer la reconnaissance d’émotions d’un sujet selon les particularités de l’environnement de travail évalué. Les systèmes actuels de reconnaissance d’émotions ne permettent pas une telle adaptabilité. Dans cet article nous présentons l’outil logiciel eMotion pour la reconnaissance d’émotions par le mouvement. eMotion présente des interfaces permettant à un évaluateur de paramétrer l’extraction de caractéristiques émotionnellement pertinentes à ses besoins.", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2010, "uri": "http://iihm.imag.fr/publication/CCN10a/", "pages": "59-66", "bibtype": "inproceedings", "id": 514, "abbr": "CCN10a", "address": "Biarritz, France", "date": "2010-06-14", "document": "http://iihm.imag.fr/publs/2010/ERGOIA-2010-ClayCoutureNigay.pdf", "type": "Conférences nationales avec comité de lecture sur texte complet", "booktitle": "Actes de la conférence ERGO'IA 2010" }, { "lang": "fr", "publisher": "ACM New York, NY, USA", "type_publi": "colcomlec", "title": "Reconnaissance d'Emotions : un Point de Vue Interaction Multimodale ", "abstract": "Le domaine de la reconnaissance d'émotions atteint un stade de maturité où commence à émerger un besoin en termes d’ingénierie et en particulier de modèles de conception. Partant de cette constatation, nous proposons d’exploiter les résultats obtenus en ingénierie de l’interaction multimodale pour les appliquer et les adapter à la reconnaissance d’émotions, une émotion étant intrinséquement multimodale. En particulier, nous adaptons la définition d’une modalité d’interaction au cas des modalités mises en jeu lors de la reconnaissance passive d’émotions. Une modalité définie, nous étudions ensuite les relations entre modalités en nous appuyant sur les propriétés CARE de l’interaction multimodale. Nous soulignons le pouvoir génératif de CARE ainsi que les apports de notre modèle en ingénierie de la reconnaissance d’émotions. \r\n", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2010, "uri": "http://iihm.imag.fr/publication/CCN10b/", "pages": "157-160", "bibtype": "inproceedings", "id": 523, "abbr": "CCN10b", "address": "Luxembourg", "date": "2010-09-20", "document": "http://iihm.imag.fr/publs/2010/IHM2010-Clay-Couture-Nigay.pdf", "type": "Conférences nationales avec comité de lecture sur texte complet", "booktitle": "Actes de la 22ème Conférence francophone sur l’Interaction Homme-Machine (IHM’2010, Luxembourg, Septembre 2010)" }, { "lang": "en", "publisher": "IEEE", "type_publi": "icolcomlec", "title": "Engineering affective computing: A unifying software architecture", "abstract": "In the field of affective computing, one of the most exciting motivations is to enable a computer to sense users' emotions. To achieve this goal an interactive application has to incorporate emotional sensitivity. Following an engineering approach, the key point is then to define a unifying software architecture that allows any interactive system to become emotionally sensitive. Most research focus on identifying and validating interpretation systems and/or emotional characteristics from different modalities. However, there is little focus on modeling generic software architecture for emotion recognition. Therefore, we propose an integrative approach and define such a generic software architecture based on the grounding theory of multimodality. We state that emotion recognition should be multimodal and serve as a tool for interaction. As such, we use results on multimodality in interactive applications to propose the emotion branch, a component-based architecture model for emotion recognition systems that integrates itself within general models for interactive systems. The emotion branch unifies existing emotion recognition applications architectures following the usual three-level schema: capturing signals from sensors, extracting and analyzing emotionally-relevant characteristics from the obtained data and interpreting these characteristics into an emotion. We illustrate the feasibility and the advantages of the emotion branch with a test case that we developed for gesture-based emotion recognition. \r\n", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2009, "uri": "http://iihm.imag.fr/publication/CCN09a/", "pages": "1-6", "bibtype": "inproceedings", "id": 519, "editor": "IEEE", "address": "Amsterdam, The Netherlands", "date": "2009-09-10", "document": "http://iihm.imag.fr/publs/2009/ACII-2009ClayCoutureNigay.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "booktitle": "Proceedings of ACII’09, the 3rd International Conference on Affective Computing and Intelligent Interaction, Amsterdam, The Netherlands, Spetember 10-12, 2009, IEEE", "abbr": "CCN09a" }, { "lang": "en", "publisher": "ASME", "type_publi": "colloque", "title": "Towards an Architecture Model for Emotion Recognition in Interactive Systems: Application to a Ballet Dance Show", "abstract": "In the context of the very dynamic and challenging domain of affective computing, we adopt a software engineering point of view on emotion recognition in interactive systems. Our goal is threefold: first, developing an architecture model for emotion recognition. This architecture model emphasizes multimodality and reusability. Second, developing a prototype based on this architecture model. For this prototype we focus on gesture-based emotion recognition. And third, using this prototype for augmenting a ballet dance show. We hence describe an overview of our work so far, from the design of a flexible and multimodal emotion recognition architecture model, to a presentation of a gesture-based emotion recognition prototype based on this model, to a prototype that augments a ballet stage, taking emotions as inputs.", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "year": 2009, "uri": "http://iihm.imag.fr/publication/CCN09b/", "pages": "19-24", "bibtype": "inproceedings", "id": 530, "abbr": "CCN09b", "address": "Chalon-sur-Saône, France", "date": "2009-02-25", "document": "http://iihm.imag.fr/publs/2009/WINVR2009.pdf", "type": "Autres conférences et colloques avec actes", "booktitle": "Proceedings of WinVR'09, the ASME/AFM 2009 World Conference on Innovative Virtual Reality - World Conference on Innovative Virtual Reality (WinVR'09)" }, { "bibtype": "inproceedings", "type_publi": "icolcomlec", "title": "Emotion capture based on body postures and movements", "booktitle": "Conference Proceedings of TIGERA 2007", "year": 2007, "uri": "http://iihm.imag.fr/publication/CCN07a/", "note": "27 pages.", "abbr": "CCN07a", "authors": { "1": { "first_name": "Alexis", "last_name": "Clay" }, "2": { "first_name": "Nadine", "last_name": "Couture" }, "3": { "first_name": "Laurence", "last_name": "Nigay" } }, "date": "2007-03-12", "document": "http://iihm.imag.fr/publs/2007/TIGERA2007.pdf", "type": "Conférences internationales de large diffusion avec comité de lecture sur texte complet", "id": 471 }]);